Merge branch 'master' of github.com:teltek/gst-plugin-ndi into fixed_doc

This commit is contained in:
Samuel 2021-11-12 13:08:15 +01:00
commit 1818a5a7a2
21 changed files with 3315 additions and 2894 deletions

View file

@ -8,22 +8,24 @@ description = "NewTek NDI Plugin"
edition = "2018"
[dependencies]
glib = "0.10"
gst = { package = "gstreamer", version = "0.16", features = ["v1_12"] }
gst-base = { package = "gstreamer-base", version = "0.16" }
gst-audio = { package = "gstreamer-audio", version = "0.16" }
gst-video = { package = "gstreamer-video", version = "0.16", features = ["v1_12"] }
glib = "0.14"
gst = { package = "gstreamer", version = "0.17.4", features = ["v1_12"] }
gst-base = { package = "gstreamer-base", version = "0.17" }
gst-audio = { package = "gstreamer-audio", version = "0.17" }
gst-video = { package = "gstreamer-video", version = "0.17", features = ["v1_12"] }
byte-slice-cast = "1"
once_cell = "1.0"
byteorder = "1.0"
[build-dependencies]
gst-plugin-version-helper = "0.2"
gst-plugin-version-helper = "0.7"
[features]
default = ["interlaced-fields", "reference-timestamps", "sink"]
interlaced-fields = ["gst/v1_16", "gst-video/v1_16"]
reference-timestamps = ["gst/v1_14"]
sink = ["gst/v1_18", "gst-base/v1_18"]
advanced-sdk = []
[lib]
name = "gstndi"

View file

@ -1,27 +1,28 @@
GStreamer NDI Plugin for Linux
====================
*Compiled and tested with NDI SDK 3.5, 3.8, 4.0 and 4.1*
*Compiled and tested with NDI SDK 4.0, 4.1 and 5.0*
This is a plugin for the [GStreamer](https://gstreamer.freedesktop.org/) multimedia framework that allows GStreamer to receive a stream from a [NDI](https://www.newtek.com/ndi/) source. This plugin has been developed by [Teltek](http://teltek.es/) and was funded by the [University of the Arts London](https://www.arts.ac.uk/) and [The University of Manchester](https://www.manchester.ac.uk/).
Currently the plugin has two source elements, `ndivideosrc` to get video from the stream and `ndiaudiosrc` for audio. By just providing the name or the ip of the stream, all the information required from the stream is picked up automatically, such as resolution, framerate, audio channels, ...
Currently the plugin has a source element for receiving from NDI sources, a sink element to provide an NDI source and a device provider for discovering NDI sources on the network.
Some examples of how to use these elements from the command line:
```
#Information about the elements
gst-inspect-1.0 ndi
gst-inspect-1.0 ndivideosrc
gst-inspect-1.0 ndiaudiosrc
```console
# Information about the elements
$ gst-inspect-1.0 ndi
$ gst-inspect-1.0 ndisrc
$ gst-inspect-1.0 ndisink
#Video pipeline
gst-launch-1.0 ndivideosrc ndi-name="GC-DEV2 (OBS)" ! autovideosink
#Audio pipeline
gst-launch-1.0 ndiaudiosrc ndi-name="GC-DEV2 (OBS)" ! autoaudiosink
# Discover all NDI sources on the network
$ gst-device-monitor-1.0 -f Source/Network:application/x-ndi
#Video and audio pipeline
gst-launch-1.0 ndivideosrc ndi-name="GC-DEV2 (OBS)" ! autovideosink ndiaudiosrc ndi-name="GC-DEV2 (OBS)" ! autoaudiosink
# Audio/Video source pipeline
$ gst-launch-1.0 ndisrc ndi-name="GC-DEV2 (OBS)" ! ndisrcdemux name=demux demux.video ! queue ! videoconvert ! autovideosink demux.audio ! queue ! audioconvert ! autoaudiosink
# Audio/Video sink pipeline
$ gst-launch-1.0 videotestsrc is-live=true ! video/x-raw,format=UYVY ! ndisinkcombiner name=combiner ! ndisink ndi-name="My NDI source" audiotestsrc is-live=true ! combiner.audio
```
Feel free to contribute to this project. Some ways you can contribute are:
@ -32,11 +33,9 @@ Compilation of the NDI element
-------
To compile the NDI element it's necessary to install Rust, the NDI SDK and the following packages for gstreamer:
```
apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
gstreamer1.0-plugins-base gstreamer1.0-plugins-good \
gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly \
gstreamer1.0-libav libgstrtspserver-1.0-dev
```console
$ apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
gstreamer1.0-plugins-base
```
To install the required NDI library there are two options:
@ -57,11 +56,12 @@ By defult GStreamer 1.18 is required, to use an older version. You can build wit
If all went ok, you should see info related to the NDI element. To make the plugin available without using `GST_PLUGIN_PATH` it's necessary to copy the plugin to the gstreamer plugins folder.
```
cargo build --release
sudo install -o root -g root -m 644 target/release/libgstndi.so /usr/lib/x86_64-linux-gnu/gstreamer-1.0/
sudo ldconfig
gst-inspect-1.0 ndi
```console
$ cargo build --release
$ sudo install -o root -g root -m 644 target/release/libgstndi.so /usr/lib/x86_64-linux-gnu/gstreamer-1.0/
$ sudo ldconfig
$ gst-inspect-1.0 ndi
```
More info about GStreamer plugins written in Rust:

View file

@ -1,5 +1,3 @@
extern crate gst_plugin_version_helper;
fn main() {
gst_plugin_version_helper::get_info()
gst_plugin_version_helper::info()
}

View file

@ -1,4 +1,3 @@
use glib::subclass;
use gst::prelude::*;
use gst::subclass::prelude::*;
use gst::{gst_error, gst_log, gst_trace};
@ -9,65 +8,69 @@ use std::sync::atomic;
use std::sync::Mutex;
use std::thread;
use once_cell::sync::Lazy;
use crate::ndi;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"ndideviceprovider",
gst::DebugColorFlags::empty(),
Some("NewTek NDI Device Provider"),
)
});
#[derive(Debug)]
struct DeviceProvider {
cat: gst::DebugCategory,
pub struct DeviceProvider {
thread: Mutex<Option<thread::JoinHandle<()>>>,
current_devices: Mutex<Vec<gst::Device>>,
current_devices: Mutex<Vec<super::Device>>,
find: Mutex<Option<ndi::FindInstance>>,
is_running: atomic::AtomicBool,
}
#[glib::object_subclass]
impl ObjectSubclass for DeviceProvider {
const NAME: &'static str = "NdiDeviceProvider";
type Type = super::DeviceProvider;
type ParentType = gst::DeviceProvider;
type Instance = subclass::simple::InstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
glib::glib_object_subclass!();
fn new() -> Self {
Self {
cat: gst::DebugCategory::new(
"ndideviceprovider",
gst::DebugColorFlags::empty(),
Some("NewTek NDI Device Provider"),
),
thread: Mutex::new(None),
current_devices: Mutex::new(vec![]),
find: Mutex::new(None),
is_running: atomic::AtomicBool::new(false),
}
}
fn class_init(klass: &mut subclass::simple::ClassStruct<Self>) {
klass.set_metadata(
"NewTek NDI Device Provider",
"Source/Audio/Video/Network",
"NewTek NDI Device Provider",
"Ruben Gonzalez <rubenrua@teltek.es>, Daniel Vilar <daniel.peiteado@teltek.es>, Sebastian Dröge <sebastian@centricular.com>",
);
}
}
impl ObjectImpl for DeviceProvider {
glib::glib_object_impl!();
}
impl ObjectImpl for DeviceProvider {}
impl DeviceProviderImpl for DeviceProvider {
fn probe(&self, _device_provider: &gst::DeviceProvider) -> Vec<gst::Device> {
self.current_devices.lock().unwrap().clone()
fn metadata() -> Option<&'static gst::subclass::DeviceProviderMetadata> {
static METADATA: Lazy<gst::subclass::DeviceProviderMetadata> = Lazy::new(|| {
gst::subclass::DeviceProviderMetadata::new("NewTek NDI Device Provider",
"Source/Audio/Video/Network",
"NewTek NDI Device Provider",
"Ruben Gonzalez <rubenrua@teltek.es>, Daniel Vilar <daniel.peiteado@teltek.es>, Sebastian Dröge <sebastian@centricular.com>")
});
Some(&*METADATA)
}
fn start(&self, device_provider: &gst::DeviceProvider) -> Result<(), gst::LoggableError> {
fn probe(&self, _device_provider: &Self::Type) -> Vec<gst::Device> {
self.current_devices
.lock()
.unwrap()
.iter()
.map(|d| d.clone().upcast())
.collect()
}
fn start(&self, device_provider: &Self::Type) -> Result<(), gst::LoggableError> {
let mut thread_guard = self.thread.lock().unwrap();
if thread_guard.is_some() {
gst_log!(
self.cat,
obj: device_provider,
"Device provider already started"
);
gst_log!(CAT, obj: device_provider, "Device provider already started");
return Ok(());
}
@ -85,17 +88,13 @@ impl DeviceProviderImpl for DeviceProvider {
{
let mut find_guard = imp.find.lock().unwrap();
if find_guard.is_some() {
gst_log!(imp.cat, obj: &device_provider, "Already started");
gst_log!(CAT, obj: &device_provider, "Already started");
return;
}
let find = match ndi::FindInstance::builder().build() {
None => {
gst_error!(
imp.cat,
obj: &device_provider,
"Failed to create Find instance"
);
gst_error!(CAT, obj: &device_provider, "Failed to create Find instance");
return;
}
Some(find) => find,
@ -121,7 +120,8 @@ impl DeviceProviderImpl for DeviceProvider {
Ok(())
}
fn stop(&self, _device_provider: &gst::DeviceProvider) {
fn stop(&self, _device_provider: &Self::Type) {
if let Some(_thread) = self.thread.lock().unwrap().take() {
self.is_running.store(false, atomic::Ordering::SeqCst);
// Don't actually join because that might take a while
@ -130,7 +130,7 @@ impl DeviceProviderImpl for DeviceProvider {
}
impl DeviceProvider {
fn poll(&self, device_provider: &gst::DeviceProvider, first: bool) {
fn poll(&self, device_provider: &super::DeviceProvider, first: bool) {
let mut find_guard = self.find.lock().unwrap();
let find = match *find_guard {
None => return,
@ -138,7 +138,7 @@ impl DeviceProvider {
};
if !find.wait_for_sources(if first { 1000 } else { 5000 }) {
gst_trace!(self.cat, obj: device_provider, "No new sources found");
gst_trace!(CAT, obj: device_provider, "No new sources found");
return;
}
@ -154,9 +154,9 @@ impl DeviceProvider {
let old_device_imp = Device::from_instance(old_device);
let old_source = old_device_imp.source.get().unwrap();
if !sources.contains(&old_source.0) {
if !sources.contains(&*old_source) {
gst_log!(
self.cat,
CAT,
obj: device_provider,
"Source {:?} disappeared",
old_source
@ -165,7 +165,7 @@ impl DeviceProvider {
} else {
// Otherwise remember that we had it before already and don't have to announce it
// again. After the loop we're going to remove these all from the sources vec.
remaining_sources.push(old_source.0.to_owned());
remaining_sources.push(old_source.to_owned());
}
}
@ -182,18 +182,8 @@ impl DeviceProvider {
// Now go through all new devices and announce them
for source in sources {
gst_log!(
self.cat,
obj: device_provider,
"Source {:?} appeared",
source
);
// Add once for audio, another time for video
let device = Device::new(&source, true);
device_provider.device_add(&device);
current_devices_guard.push(device);
let device = Device::new(&source, false);
gst_log!(CAT, obj: device_provider, "Source {:?} appeared", source);
let device = super::Device::new(&source);
device_provider.device_add(&device);
current_devices_guard.push(device);
}
@ -201,48 +191,38 @@ impl DeviceProvider {
}
#[derive(Debug)]
struct Device {
cat: gst::DebugCategory,
source: OnceCell<(ndi::Source<'static>, glib::Type)>,
pub struct Device {
source: OnceCell<ndi::Source<'static>>,
}
#[glib::object_subclass]
impl ObjectSubclass for Device {
const NAME: &'static str = "NdiDevice";
type Type = super::Device;
type ParentType = gst::Device;
type Instance = subclass::simple::InstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
glib::glib_object_subclass!();
fn new() -> Self {
Self {
cat: gst::DebugCategory::new(
"ndidevice",
gst::DebugColorFlags::empty(),
Some("NewTek NDI Device"),
),
source: OnceCell::new(),
}
}
}
impl ObjectImpl for Device {
glib::glib_object_impl!();
}
impl ObjectImpl for Device {}
impl DeviceImpl for Device {
fn create_element(
&self,
_device: &gst::Device,
_device: &Self::Type,
name: Option<&str>,
) -> Result<gst::Element, gst::LoggableError> {
let source_info = self.source.get().unwrap();
let element = glib::Object::new(
source_info.1,
let element = glib::Object::with_type(
crate::ndisrc::NdiSrc::static_type(),
&[
("name", &name),
("ndi-name", &source_info.0.ndi_name()),
("url-address", &source_info.0.url_address()),
("ndi-name", &source_info.ndi_name()),
("url-address", &source_info.url_address()),
],
)
.unwrap()
@ -253,27 +233,15 @@ impl DeviceImpl for Device {
}
}
impl Device {
fn new(source: &ndi::Source<'_>, is_audio: bool) -> gst::Device {
let display_name = format!(
"{} ({})",
source.ndi_name(),
if is_audio { "Audio" } else { "Video" }
);
let device_class = format!(
"Source/{}/Network",
if is_audio { "Audio" } else { "Video" }
);
impl super::Device {
fn new(source: &ndi::Source<'_>) -> super::Device {
let display_name = source.ndi_name();
let device_class = "Source/Audio/Video/Network";
// Get the caps from the template caps of the corresponding source element
let element_type = if is_audio {
crate::ndiaudiosrc::NdiAudioSrc::get_type()
} else {
crate::ndivideosrc::NdiVideoSrc::get_type()
};
let element_class = gst::ElementClass::from_type(element_type).unwrap();
let templ = element_class.get_pad_template("src").unwrap();
let caps = templ.get_caps().unwrap();
let element_class =
glib::Class::<gst::Element>::from_type(crate::ndisrc::NdiSrc::static_type()).unwrap();
let templ = element_class.pad_template("src").unwrap();
let caps = templ.caps();
// Put the url-address into the extra properties
let extra_properties = gst::Structure::builder("properties")
@ -281,34 +249,17 @@ impl Device {
.field("url-address", &source.url_address())
.build();
let device = glib::Object::new(
Device::get_type(),
&[
("caps", &caps),
("display-name", &display_name),
("device-class", &device_class),
("properties", &extra_properties),
],
)
.unwrap()
.dynamic_cast::<gst::Device>()
let device = glib::Object::new::<super::Device>(&[
("caps", &caps),
("display-name", &display_name),
("device-class", &device_class),
("properties", &extra_properties),
])
.unwrap();
let device_impl = Device::from_instance(&device);
device_impl
.source
.set((source.to_owned(), element_type))
.unwrap();
device_impl.source.set(source.to_owned()).unwrap();
device
}
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::DeviceProvider::register(
Some(plugin),
"ndideviceprovider",
gst::Rank::Primary,
DeviceProvider::get_type(),
)
}

View file

@ -0,0 +1,26 @@
use glib::prelude::*;
mod imp;
glib::wrapper! {
pub struct DeviceProvider(ObjectSubclass<imp::DeviceProvider>) @extends gst::DeviceProvider, gst::Object;
}
unsafe impl Send for DeviceProvider {}
unsafe impl Sync for DeviceProvider {}
glib::wrapper! {
pub struct Device(ObjectSubclass<imp::Device>) @extends gst::Device, gst::Object;
}
unsafe impl Send for Device {}
unsafe impl Sync for Device {}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::DeviceProvider::register(
Some(plugin),
"ndideviceprovider",
gst::Rank::Primary,
DeviceProvider::static_type(),
)
}

View file

@ -1,23 +1,21 @@
use glib::prelude::*;
mod device_provider;
pub mod ndi;
mod ndiaudiosrc;
#[cfg(feature = "sink")]
mod ndisink;
#[cfg(feature = "sink")]
mod ndisinkcombiner;
#[cfg(feature = "sink")]
pub mod ndisinkmeta;
mod ndisrc;
mod ndisrcdemux;
pub mod ndisrcmeta;
pub mod ndisys;
mod ndivideosrc;
pub mod receiver;
use crate::ndi::*;
use crate::ndisys::*;
use crate::receiver::*;
use std::collections::HashMap;
use std::time;
use once_cell::sync::Lazy;
@ -34,17 +32,97 @@ pub enum TimestampMode {
Timecode = 2,
#[genum(name = "NDI Timestamp", nick = "timestamp")]
Timestamp = 3,
#[genum(name = "Receive Time", nick = "receive-time")]
ReceiveTime = 4,
}
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy, glib::GEnum)]
#[repr(u32)]
#[genum(type_name = "GstNdiRecvColorFormat")]
pub enum RecvColorFormat {
#[genum(name = "BGRX or BGRA", nick = "bgrx-bgra")]
BgrxBgra = 0,
#[genum(name = "UYVY or BGRA", nick = "uyvy-bgra")]
UyvyBgra = 1,
#[genum(name = "RGBX or RGBA", nick = "rgbx-rgba")]
RgbxRgba = 2,
#[genum(name = "UYVY or RGBA", nick = "uyvy-rgba")]
UyvyRgba = 3,
#[genum(name = "Fastest", nick = "fastest")]
Fastest = 4,
#[genum(name = "Best", nick = "best")]
Best = 5,
#[cfg(feature = "advanced-sdk")]
#[genum(name = "Compressed v1", nick = "compressed-v1")]
CompressedV1 = 6,
#[cfg(feature = "advanced-sdk")]
#[genum(name = "Compressed v2", nick = "compressed-v2")]
CompressedV2 = 7,
#[cfg(feature = "advanced-sdk")]
#[genum(name = "Compressed v3", nick = "compressed-v3")]
CompressedV3 = 8,
#[cfg(feature = "advanced-sdk")]
#[genum(name = "Compressed v3 with audio", nick = "compressed-v3-with-audio")]
CompressedV3WithAudio = 9,
#[cfg(feature = "advanced-sdk")]
#[genum(name = "Compressed v4", nick = "compressed-v4")]
CompressedV4 = 10,
#[cfg(feature = "advanced-sdk")]
#[genum(name = "Compressed v4 with audio", nick = "compressed-v4-with-audio")]
CompressedV4WithAudio = 11,
#[cfg(feature = "advanced-sdk")]
#[genum(name = "Compressed v5", nick = "compressed-v5")]
CompressedV5 = 12,
#[cfg(feature = "advanced-sdk")]
#[genum(name = "Compressed v5 with audio", nick = "compressed-v5-with-audio")]
CompressedV5WithAudio = 13,
}
impl From<RecvColorFormat> for NDIlib_recv_color_format_e {
fn from(v: RecvColorFormat) -> Self {
match v {
RecvColorFormat::BgrxBgra => NDIlib_recv_color_format_BGRX_BGRA,
RecvColorFormat::UyvyBgra => NDIlib_recv_color_format_UYVY_BGRA,
RecvColorFormat::RgbxRgba => NDIlib_recv_color_format_RGBX_RGBA,
RecvColorFormat::UyvyRgba => NDIlib_recv_color_format_UYVY_RGBA,
RecvColorFormat::Fastest => NDIlib_recv_color_format_fastest,
RecvColorFormat::Best => NDIlib_recv_color_format_best,
#[cfg(feature = "advanced-sdk")]
RecvColorFormat::CompressedV1 => NDIlib_recv_color_format_ex_compressed,
#[cfg(feature = "advanced-sdk")]
RecvColorFormat::CompressedV2 => NDIlib_recv_color_format_ex_compressed_v2,
#[cfg(feature = "advanced-sdk")]
RecvColorFormat::CompressedV3 => NDIlib_recv_color_format_ex_compressed_v3,
#[cfg(feature = "advanced-sdk")]
RecvColorFormat::CompressedV3WithAudio => {
NDIlib_recv_color_format_ex_compressed_v3_with_audio
}
#[cfg(feature = "advanced-sdk")]
RecvColorFormat::CompressedV4 => NDIlib_recv_color_format_ex_compressed_v4,
#[cfg(feature = "advanced-sdk")]
RecvColorFormat::CompressedV4WithAudio => {
NDIlib_recv_color_format_ex_compressed_v4_with_audio
}
#[cfg(feature = "advanced-sdk")]
RecvColorFormat::CompressedV5 => NDIlib_recv_color_format_ex_compressed_v5,
#[cfg(feature = "advanced-sdk")]
RecvColorFormat::CompressedV5WithAudio => {
NDIlib_recv_color_format_ex_compressed_v5_with_audio
}
}
}
}
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
if !ndi::initialize() {
return Err(glib::glib_bool_error!("Cannot initialize NDI"));
return Err(glib::bool_error!("Cannot initialize NDI"));
}
device_provider::register(plugin)?;
ndivideosrc::register(plugin)?;
ndiaudiosrc::register(plugin)?;
ndisrc::register(plugin)?;
ndisrcdemux::register(plugin)?;
#[cfg(feature = "sink")]
{
ndisinkcombiner::register(plugin)?;
@ -68,7 +146,7 @@ static TIMECODE_CAPS: Lazy<gst::Caps> =
static TIMESTAMP_CAPS: Lazy<gst::Caps> =
Lazy::new(|| gst::Caps::new_simple("timestamp/x-ndi-timestamp", &[]));
gst::gst_plugin_define!(
gst::plugin_define!(
ndi,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,

View file

@ -3,7 +3,6 @@ use crate::ndisys::*;
use std::ffi;
use std::mem;
use std::ptr;
use std::sync::{Arc, Mutex};
use byte_slice_cast::*;
@ -242,27 +241,16 @@ impl<'a> RecvBuilder<'a> {
if ptr.is_null() {
None
} else {
Some(RecvInstance(Arc::new((
RecvInstanceInner(ptr::NonNull::new_unchecked(ptr)),
Mutex::new(()),
))))
Some(RecvInstance(ptr::NonNull::new_unchecked(ptr)))
}
}
}
}
// Any access to the RecvInstanceInner apart from calling the capture function must be protected by
// the mutex
#[derive(Debug, Clone)]
pub struct RecvInstance(Arc<(RecvInstanceInner, Mutex<()>)>);
pub struct RecvInstance(ptr::NonNull<::std::os::raw::c_void>);
#[derive(Debug)]
struct RecvInstanceInner(ptr::NonNull<::std::os::raw::c_void>);
unsafe impl Send for RecvInstanceInner {}
// Not 100% true but we ensure safety with the mutex. The documentation says that only the
// capturing itself can be performed from multiple threads at once safely.
unsafe impl Sync for RecvInstanceInner {}
unsafe impl Send for RecvInstance {}
impl RecvInstance {
pub fn builder<'a>(
@ -275,91 +263,53 @@ impl RecvInstance {
url_address,
allow_video_fields: true,
bandwidth: NDIlib_recv_bandwidth_highest,
color_format: NDIlib_recv_color_format_e::NDIlib_recv_color_format_UYVY_BGRA,
color_format: NDIlib_recv_color_format_UYVY_BGRA,
ndi_recv_name,
}
}
pub fn set_tally(&self, tally: &Tally) -> bool {
unsafe {
let _lock = (self.0).1.lock().unwrap();
NDIlib_recv_set_tally(((self.0).0).0.as_ptr(), &tally.0)
}
unsafe { NDIlib_recv_set_tally(self.0.as_ptr(), &tally.0) }
}
pub fn send_metadata(&self, metadata: &MetadataFrame) -> bool {
unsafe {
let _lock = (self.0).1.lock().unwrap();
NDIlib_recv_send_metadata(((self.0).0).0.as_ptr(), metadata.as_ptr())
}
unsafe { NDIlib_recv_send_metadata(self.0.as_ptr(), metadata.as_ptr()) }
}
pub fn get_queue(&self) -> Queue {
unsafe {
let _lock = (self.0).1.lock().unwrap();
let mut queue = mem::MaybeUninit::uninit();
NDIlib_recv_get_queue(((self.0).0).0.as_ptr(), queue.as_mut_ptr());
NDIlib_recv_get_queue(self.0.as_ptr(), queue.as_mut_ptr());
Queue(queue.assume_init())
}
}
pub fn capture(
&self,
video: bool,
audio: bool,
metadata: bool,
timeout_in_ms: u32,
) -> Result<Option<Frame>, ()> {
pub fn capture(&self, timeout_in_ms: u32) -> Result<Option<Frame>, ()> {
unsafe {
// Capturing from multiple threads at once is safe according to the documentation
let ptr = ((self.0).0).0.as_ptr();
let ptr = self.0.as_ptr();
let mut video_frame = mem::zeroed();
let mut audio_frame = mem::zeroed();
let mut metadata_frame = mem::zeroed();
let res = NDIlib_recv_capture_v2(
let res = NDIlib_recv_capture_v3(
ptr,
if video {
&mut video_frame
} else {
ptr::null_mut()
},
if audio {
&mut audio_frame
} else {
ptr::null_mut()
},
if metadata {
&mut metadata_frame
} else {
ptr::null_mut()
},
&mut video_frame,
&mut audio_frame,
&mut metadata_frame,
timeout_in_ms,
);
match res {
NDIlib_frame_type_e::NDIlib_frame_type_audio => {
assert!(audio);
Ok(Some(Frame::Audio(AudioFrame::BorrowedRecv(
audio_frame,
self,
))))
}
NDIlib_frame_type_e::NDIlib_frame_type_video => {
assert!(video);
Ok(Some(Frame::Video(VideoFrame::BorrowedRecv(
video_frame,
self,
))))
}
NDIlib_frame_type_e::NDIlib_frame_type_metadata => {
assert!(metadata);
Ok(Some(Frame::Metadata(MetadataFrame::Borrowed(
metadata_frame,
self,
))))
}
NDIlib_frame_type_e::NDIlib_frame_type_audio => Ok(Some(Frame::Audio(
AudioFrame::BorrowedRecv(audio_frame, self),
))),
NDIlib_frame_type_e::NDIlib_frame_type_video => Ok(Some(Frame::Video(
VideoFrame::BorrowedRecv(video_frame, self),
))),
NDIlib_frame_type_e::NDIlib_frame_type_metadata => Ok(Some(Frame::Metadata(
MetadataFrame::Borrowed(metadata_frame, self),
))),
NDIlib_frame_type_e::NDIlib_frame_type_error => Err(()),
_ => Ok(None),
}
@ -367,7 +317,7 @@ impl RecvInstance {
}
}
impl Drop for RecvInstanceInner {
impl Drop for RecvInstance {
fn drop(&mut self) {
unsafe { NDIlib_recv_destroy(self.0.as_ptr() as *mut _) }
}
@ -420,7 +370,7 @@ pub struct SendInstance(ptr::NonNull<::std::os::raw::c_void>);
unsafe impl Send for SendInstance {}
impl SendInstance {
pub fn builder<'a>(ndi_name: &'a str) -> SendBuilder<'a> {
pub fn builder(ndi_name: &str) -> SendBuilder {
SendBuilder {
ndi_name,
clock_video: false,
@ -436,7 +386,7 @@ impl SendInstance {
pub fn send_audio(&mut self, frame: &AudioFrame) {
unsafe {
NDIlib_send_send_audio_v2(self.0.as_ptr(), frame.as_ptr());
NDIlib_send_send_audio_v3(self.0.as_ptr(), frame.as_ptr());
}
}
}
@ -551,26 +501,144 @@ impl<'a> VideoFrame<'a> {
}
}
pub fn data(&self) -> &[u8] {
// FIXME: Unclear if this is correct. Needs to be validated against an actual
// interlaced stream
let frame_size = if self.frame_format_type()
== NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_0
|| self.frame_format_type()
== NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_1
pub fn data(&self) -> Option<&[u8]> {
let fourcc = self.fourcc();
if [
NDIlib_FourCC_video_type_UYVY,
NDIlib_FourCC_video_type_UYVA,
NDIlib_FourCC_video_type_P216,
NDIlib_FourCC_video_type_PA16,
NDIlib_FourCC_video_type_YV12,
NDIlib_FourCC_video_type_I420,
NDIlib_FourCC_video_type_NV12,
NDIlib_FourCC_video_type_BGRA,
NDIlib_FourCC_video_type_BGRX,
NDIlib_FourCC_video_type_RGBA,
NDIlib_FourCC_video_type_RGBX,
]
.contains(&fourcc)
{
self.yres() * self.line_stride_or_data_size_in_bytes() / 2
} else {
self.yres() * self.line_stride_or_data_size_in_bytes()
};
// FIXME: Unclear if this is correct. Needs to be validated against an actual
// interlaced stream
let frame_size = if self.frame_format_type()
== NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_0
|| self.frame_format_type()
== NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_1
{
self.yres() * self.line_stride_or_data_size_in_bytes() / 2
} else {
self.yres() * self.line_stride_or_data_size_in_bytes()
};
return unsafe {
use std::slice;
match self {
VideoFrame::BorrowedRecv(ref frame, _)
| VideoFrame::BorrowedGst(ref frame, _) => Some(slice::from_raw_parts(
frame.p_data as *const u8,
frame_size as usize,
)),
}
};
}
#[cfg(feature = "advanced-sdk")]
if [
NDIlib_FourCC_video_type_ex_SHQ0_highest_bandwidth,
NDIlib_FourCC_video_type_ex_SHQ2_highest_bandwidth,
NDIlib_FourCC_video_type_ex_SHQ7_highest_bandwidth,
NDIlib_FourCC_video_type_ex_SHQ0_lowest_bandwidth,
NDIlib_FourCC_video_type_ex_SHQ2_lowest_bandwidth,
NDIlib_FourCC_video_type_ex_SHQ7_lowest_bandwidth,
]
.contains(&fourcc)
{
return unsafe {
use std::slice;
match self {
VideoFrame::BorrowedRecv(ref frame, _)
| VideoFrame::BorrowedGst(ref frame, _) => Some(slice::from_raw_parts(
frame.p_data as *const u8,
frame.line_stride_or_data_size_in_bytes as usize,
)),
}
};
}
None
}
#[cfg(feature = "advanced-sdk")]
pub fn compressed_packet(&self) -> Option<CompressedPacket> {
use byteorder::{LittleEndian, ReadBytesExt};
use std::io::Cursor;
use std::slice;
unsafe {
use std::slice;
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
slice::from_raw_parts(frame.p_data as *const u8, frame_size as usize)
}
let fourcc = self.fourcc();
if ![
NDIlib_FourCC_video_type_ex_H264_highest_bandwidth,
NDIlib_FourCC_video_type_ex_H264_lowest_bandwidth,
NDIlib_FourCC_video_type_ex_HEVC_highest_bandwidth,
NDIlib_FourCC_video_type_ex_HEVC_lowest_bandwidth,
NDIlib_FourCC_video_type_ex_H264_alpha_highest_bandwidth,
NDIlib_FourCC_video_type_ex_H264_alpha_lowest_bandwidth,
NDIlib_FourCC_video_type_ex_HEVC_alpha_highest_bandwidth,
NDIlib_FourCC_video_type_ex_HEVC_alpha_lowest_bandwidth,
]
.contains(&fourcc)
{
return None;
}
let data = match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
slice::from_raw_parts(
frame.p_data as *const u8,
frame.line_stride_or_data_size_in_bytes as usize,
)
}
};
let mut cursor = Cursor::new(data);
let version = cursor.read_u32::<LittleEndian>().ok()?;
if version != ndisys::NDIlib_compressed_packet_version_0 {
return None;
}
let fourcc = cursor.read_u32::<LittleEndian>().ok()?;
let pts = cursor.read_i64::<LittleEndian>().ok()?;
let dts = cursor.read_i64::<LittleEndian>().ok()?;
let _reserved = cursor.read_u64::<LittleEndian>().ok()?;
let flags = cursor.read_u32::<LittleEndian>().ok()?;
let data_size = cursor.read_u32::<LittleEndian>().ok()?;
let extra_data_size = cursor.read_u32::<LittleEndian>().ok()?;
let expected_size = (ndisys::NDIlib_compressed_packet_version_0 as usize)
.checked_add(data_size as usize)?
.checked_add(extra_data_size as usize)?;
if data.len() < expected_size {
return None;
}
Some(CompressedPacket {
fourcc,
pts,
dts,
key_frame: flags & ndisys::NDIlib_compressed_packet_flags_keyframe != 0,
data: &data[ndisys::NDIlib_compressed_packet_version_0 as usize..]
[..data_size as usize],
extra_data: if extra_data_size > 0 {
Some(
&data[ndisys::NDIlib_compressed_packet_version_0 as usize
+ data_size as usize..][..extra_data_size as usize],
)
} else {
None
},
})
}
}
@ -736,7 +804,7 @@ impl<'a> VideoFrame<'a> {
picture_aspect_ratio,
frame_format_type,
timecode,
p_data: frame.plane_data(0).unwrap().as_ptr() as *const i8,
p_data: frame.plane_data(0).unwrap().as_ptr() as *const ::std::os::raw::c_char,
line_stride_or_data_size_in_bytes: frame.plane_stride()[0],
p_metadata: ptr::null(),
timestamp: 0,
@ -749,9 +817,9 @@ impl<'a> VideoFrame<'a> {
impl<'a> Drop for VideoFrame<'a> {
#[allow(irrefutable_let_patterns)]
fn drop(&mut self) {
if let VideoFrame::BorrowedRecv(ref mut frame, ref recv) = *self {
if let VideoFrame::BorrowedRecv(ref mut frame, recv) = *self {
unsafe {
NDIlib_recv_free_video_v2(((recv.0).0).0.as_ptr() as *mut _, frame);
NDIlib_recv_free_video_v2(recv.0.as_ptr() as *mut _, frame);
}
}
}
@ -760,11 +828,11 @@ impl<'a> Drop for VideoFrame<'a> {
#[derive(Debug)]
pub enum AudioFrame<'a> {
Owned(
NDIlib_audio_frame_v2_t,
NDIlib_audio_frame_v3_t,
Option<ffi::CString>,
Option<Vec<f32>>,
),
BorrowedRecv(NDIlib_audio_frame_v2_t, &'a RecvInstance),
BorrowedRecv(NDIlib_audio_frame_v3_t, &'a RecvInstance),
}
impl<'a> AudioFrame<'a> {
@ -800,24 +868,114 @@ impl<'a> AudioFrame<'a> {
}
}
pub fn data(&self) -> &[u8] {
unsafe {
use std::slice;
match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => {
slice::from_raw_parts(
frame.p_data as *const u8,
(frame.no_samples * frame.channel_stride_in_bytes) as usize,
)
}
pub fn fourcc(&self) -> NDIlib_FourCC_audio_type_e {
match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => {
frame.FourCC
}
}
}
pub fn channel_stride_in_bytes(&self) -> i32 {
pub fn data(&self) -> Option<&[u8]> {
unsafe {
use std::slice;
let fourcc = self.fourcc();
if [NDIlib_FourCC_audio_type_FLTp].contains(&fourcc) {
return match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => {
Some(slice::from_raw_parts(
frame.p_data as *const u8,
(frame.no_channels * frame.channel_stride_or_data_size_in_bytes)
as usize,
))
}
};
}
#[cfg(feature = "advanced-sdk")]
if [NDIlib_FourCC_audio_type_Opus].contains(&fourcc) {
return match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => {
Some(slice::from_raw_parts(
frame.p_data as *const u8,
frame.channel_stride_or_data_size_in_bytes as usize,
))
}
};
}
None
}
}
#[cfg(feature = "advanced-sdk")]
pub fn compressed_packet(&self) -> Option<CompressedPacket> {
use byteorder::{LittleEndian, ReadBytesExt};
use std::io::Cursor;
use std::slice;
unsafe {
let fourcc = self.fourcc();
if ![NDIlib_FourCC_audio_type_AAC].contains(&fourcc) {
return None;
}
let data = match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => {
slice::from_raw_parts(
frame.p_data as *const u8,
frame.channel_stride_or_data_size_in_bytes as usize,
)
}
};
let mut cursor = Cursor::new(data);
let version = cursor.read_u32::<LittleEndian>().ok()?;
if version != ndisys::NDIlib_compressed_packet_version_0 {
return None;
}
let fourcc = cursor.read_u32::<LittleEndian>().ok()?;
let pts = cursor.read_i64::<LittleEndian>().ok()?;
let dts = cursor.read_i64::<LittleEndian>().ok()?;
let _reserved = cursor.read_u64::<LittleEndian>().ok()?;
let flags = cursor.read_u32::<LittleEndian>().ok()?;
let data_size = cursor.read_u32::<LittleEndian>().ok()?;
let extra_data_size = cursor.read_u32::<LittleEndian>().ok()?;
let expected_size = (ndisys::NDIlib_compressed_packet_version_0 as usize)
.checked_add(data_size as usize)?
.checked_add(extra_data_size as usize)?;
if data.len() < expected_size {
return None;
}
Some(CompressedPacket {
fourcc,
pts,
dts,
key_frame: flags & ndisys::NDIlib_compressed_packet_flags_keyframe != 0,
data: &data[ndisys::NDIlib_compressed_packet_version_0 as usize..]
[..data_size as usize],
extra_data: if extra_data_size > 0 {
Some(
&data[ndisys::NDIlib_compressed_packet_version_0 as usize
+ data_size as usize..][..extra_data_size as usize],
)
} else {
None
},
})
}
}
pub fn channel_stride_or_data_size_in_bytes(&self) -> i32 {
match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => {
frame.channel_stride_in_bytes
frame.channel_stride_or_data_size_in_bytes
}
}
}
@ -844,73 +1002,55 @@ impl<'a> AudioFrame<'a> {
}
}
pub fn as_ptr(&self) -> *const NDIlib_audio_frame_v2_t {
pub fn as_ptr(&self) -> *const NDIlib_audio_frame_v3_t {
match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => frame,
}
}
pub fn copy_to_interleaved_16s(&self, data: &mut [i16]) {
assert_eq!(
data.len(),
(self.no_samples() * self.no_channels()) as usize
);
let mut dst = NDIlib_audio_frame_interleaved_16s_t {
sample_rate: self.sample_rate(),
no_channels: self.no_channels(),
no_samples: self.no_samples(),
timecode: self.timecode(),
reference_level: 0,
p_data: data.as_mut_ptr(),
};
unsafe {
NDIlib_util_audio_to_interleaved_16s_v2(self.as_ptr(), &mut dst);
}
}
pub fn try_from_interleaved_16s(
pub fn try_from_buffer(
info: &gst_audio::AudioInfo,
buffer: &gst::BufferRef,
timecode: i64,
) -> Result<Self, ()> {
if info.format() != gst_audio::AUDIO_FORMAT_S16 {
if info.format() != gst_audio::AUDIO_FORMAT_F32 {
return Err(());
}
let map = buffer.map_readable().map_err(|_| ())?;
let src_data = map.as_slice_of::<i16>().map_err(|_| ())?;
let src_data = map.as_slice_of::<f32>().map_err(|_| ())?;
let src = NDIlib_audio_frame_interleaved_16s_t {
let no_samples = src_data.len() as i32 / info.channels() as i32;
let channel_stride_or_data_size_in_bytes = no_samples * mem::size_of::<f32>() as i32;
let mut dest_data =
Vec::<f32>::with_capacity(no_samples as usize * info.channels() as usize);
assert_eq!(dest_data.capacity(), src_data.len());
unsafe {
let dest_ptr = dest_data.as_mut_ptr();
for (i, samples) in src_data.chunks_exact(info.channels() as usize).enumerate() {
for (c, sample) in samples.iter().enumerate() {
ptr::write(dest_ptr.add(c * no_samples as usize + i), *sample);
}
}
dest_data.set_len(no_samples as usize * info.channels() as usize);
}
let dest = NDIlib_audio_frame_v3_t {
sample_rate: info.rate() as i32,
no_channels: info.channels() as i32,
no_samples: src_data.len() as i32 / info.channels() as i32,
no_samples,
timecode,
reference_level: 0,
p_data: src_data.as_ptr() as *mut i16,
};
let channel_stride_in_bytes = src.no_samples * mem::size_of::<f32>() as i32;
let mut dest_data =
Vec::with_capacity(channel_stride_in_bytes as usize * info.channels() as usize);
let mut dest = NDIlib_audio_frame_v2_t {
sample_rate: src.sample_rate,
no_channels: src.no_channels,
no_samples: src.no_samples,
timecode: src.timecode,
FourCC: NDIlib_FourCC_audio_type_FLTp,
p_data: dest_data.as_mut_ptr(),
channel_stride_in_bytes,
channel_stride_or_data_size_in_bytes,
p_metadata: ptr::null(),
timestamp: 0,
};
unsafe {
NDIlib_util_audio_from_interleaved_16s_v2(&src, &mut dest);
dest_data.set_len(dest_data.capacity());
}
Ok(AudioFrame::Owned(dest, None, Some(dest_data)))
}
}
@ -918,14 +1058,24 @@ impl<'a> AudioFrame<'a> {
impl<'a> Drop for AudioFrame<'a> {
#[allow(irrefutable_let_patterns)]
fn drop(&mut self) {
if let AudioFrame::BorrowedRecv(ref mut frame, ref recv) = *self {
if let AudioFrame::BorrowedRecv(ref mut frame, recv) = *self {
unsafe {
NDIlib_recv_free_audio_v2(((recv.0).0).0.as_ptr() as *mut _, frame);
NDIlib_recv_free_audio_v3(recv.0.as_ptr() as *mut _, frame);
}
}
}
}
#[cfg(feature = "advanced-sdk")]
pub struct CompressedPacket<'a> {
pub fourcc: ndisys::NDIlib_compressed_FourCC_type_e,
pub pts: i64,
pub dts: i64,
pub key_frame: bool,
pub data: &'a [u8],
pub extra_data: Option<&'a [u8]>,
}
#[derive(Debug)]
pub enum MetadataFrame<'a> {
Owned(NDIlib_metadata_frame_t, Option<ffi::CString>),
@ -1010,9 +1160,9 @@ impl<'a> Default for MetadataFrame<'a> {
impl<'a> Drop for MetadataFrame<'a> {
fn drop(&mut self) {
if let MetadataFrame::Borrowed(ref mut frame, ref recv) = *self {
if let MetadataFrame::Borrowed(ref mut frame, recv) = *self {
unsafe {
NDIlib_recv_free_metadata(((recv.0).0).0.as_ptr() as *mut _, frame);
NDIlib_recv_free_metadata(recv.0.as_ptr() as *mut _, frame);
}
}
}

View file

@ -1,561 +0,0 @@
use glib::subclass;
use gst::prelude::*;
use gst::subclass::prelude::*;
use gst::{gst_debug, gst_element_error, gst_error, gst_error_msg};
use gst_base::prelude::*;
use gst_base::subclass::base_src::CreateSuccess;
use gst_base::subclass::prelude::*;
use std::sync::Mutex;
use std::{i32, u32};
use crate::connect_ndi;
use crate::ndisys;
use crate::AudioReceiver;
use crate::Receiver;
use crate::ReceiverControlHandle;
use crate::ReceiverItem;
use crate::TimestampMode;
use crate::DEFAULT_RECEIVER_NDI_NAME;
#[derive(Debug, Clone)]
struct Settings {
ndi_name: Option<String>,
url_address: Option<String>,
connect_timeout: u32,
timeout: u32,
receiver_ndi_name: String,
bandwidth: ndisys::NDIlib_recv_bandwidth_e,
timestamp_mode: TimestampMode,
}
impl Default for Settings {
fn default() -> Self {
Settings {
ndi_name: None,
url_address: None,
receiver_ndi_name: DEFAULT_RECEIVER_NDI_NAME.clone(),
connect_timeout: 10000,
timeout: 5000,
bandwidth: ndisys::NDIlib_recv_bandwidth_highest,
timestamp_mode: TimestampMode::ReceiveTimeTimecode,
}
}
}
static PROPERTIES: [subclass::Property; 7] = [
subclass::Property("ndi-name", |name| {
glib::ParamSpec::string(
name,
"NDI Name",
"NDI stream name of the sender",
None,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("url-address", |name| {
glib::ParamSpec::string(
name,
"URL/Address",
"URL/address and port of the sender, e.g. 127.0.0.1:5961",
None,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("receiver-ndi-name", |name| {
glib::ParamSpec::string(
name,
"Receiver NDI Name",
"NDI stream name of this receiver",
Some(&*DEFAULT_RECEIVER_NDI_NAME),
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("connect-timeout", |name| {
glib::ParamSpec::uint(
name,
"Connect Timeout",
"Connection timeout in ms",
0,
u32::MAX,
10000,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("timeout", |name| {
glib::ParamSpec::uint(
name,
"Timeout",
"Receive timeout in ms",
0,
u32::MAX,
5000,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("bandwidth", |name| {
glib::ParamSpec::int(
name,
"Bandwidth",
"Bandwidth, -10 metadata-only, 10 audio-only, 100 highest",
-10,
100,
100,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("timestamp-mode", |name| {
glib::ParamSpec::enum_(
name,
"Timestamp Mode",
"Timestamp information to use for outgoing PTS",
TimestampMode::static_type(),
TimestampMode::ReceiveTimeTimecode as i32,
glib::ParamFlags::READWRITE,
)
}),
];
struct State {
info: Option<gst_audio::AudioInfo>,
receiver: Option<Receiver<AudioReceiver>>,
current_latency: gst::ClockTime,
}
impl Default for State {
fn default() -> State {
State {
info: None,
receiver: None,
current_latency: gst::CLOCK_TIME_NONE,
}
}
}
pub(crate) struct NdiAudioSrc {
cat: gst::DebugCategory,
settings: Mutex<Settings>,
state: Mutex<State>,
receiver_controller: Mutex<Option<ReceiverControlHandle<AudioReceiver>>>,
}
impl ObjectSubclass for NdiAudioSrc {
const NAME: &'static str = "NdiAudioSrc";
type ParentType = gst_base::BaseSrc;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
glib::glib_object_subclass!();
fn new() -> Self {
Self {
cat: gst::DebugCategory::new(
"ndiaudiosrc",
gst::DebugColorFlags::empty(),
Some("NewTek NDI Audio Source"),
),
settings: Mutex::new(Default::default()),
state: Mutex::new(Default::default()),
receiver_controller: Mutex::new(None),
}
}
fn class_init(klass: &mut subclass::simple::ClassStruct<Self>) {
klass.set_metadata(
"NewTek NDI Audio Source",
"Source",
"NewTek NDI audio source",
"Ruben Gonzalez <rubenrua@teltek.es>, Daniel Vilar <daniel.peiteado@teltek.es>, Sebastian Dröge <sebastian@centricular.com>",
);
let caps = gst::Caps::new_simple(
"audio/x-raw",
&[
(
"format",
&gst::List::new(&[&gst_audio::AUDIO_FORMAT_S16.to_string()]),
),
("rate", &gst::IntRange::<i32>::new(1, i32::MAX)),
("channels", &gst::IntRange::<i32>::new(1, i32::MAX)),
("layout", &"interleaved"),
],
);
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
klass.add_pad_template(src_pad_template);
klass.install_properties(&PROPERTIES);
}
}
impl ObjectImpl for NdiAudioSrc {
glib::glib_object_impl!();
fn constructed(&self, obj: &glib::Object) {
self.parent_constructed(obj);
let basesrc = obj.downcast_ref::<gst_base::BaseSrc>().unwrap();
// Initialize live-ness and notify the base class that
// we'd like to operate in Time format
basesrc.set_live(true);
basesrc.set_format(gst::Format::Time);
}
fn set_property(&self, obj: &glib::Object, id: usize, value: &glib::Value) {
let prop = &PROPERTIES[id];
let basesrc = obj.downcast_ref::<gst_base::BaseSrc>().unwrap();
match *prop {
subclass::Property("ndi-name", ..) => {
let mut settings = self.settings.lock().unwrap();
let ndi_name = value.get().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing ndi-name from {:?} to {:?}",
settings.ndi_name,
ndi_name,
);
settings.ndi_name = ndi_name;
}
subclass::Property("url-address", ..) => {
let mut settings = self.settings.lock().unwrap();
let url_address = value.get().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing url-address from {:?} to {:?}",
settings.url_address,
url_address,
);
settings.url_address = url_address;
}
subclass::Property("receiver-ndi-name", ..) => {
let mut settings = self.settings.lock().unwrap();
let receiver_ndi_name = value.get().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing receiver-ndi-name from {:?} to {:?}",
settings.receiver_ndi_name,
receiver_ndi_name,
);
settings.receiver_ndi_name =
receiver_ndi_name.unwrap_or_else(|| DEFAULT_RECEIVER_NDI_NAME.clone());
}
subclass::Property("connect-timeout", ..) => {
let mut settings = self.settings.lock().unwrap();
let connect_timeout = value.get_some().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing connect-timeout from {} to {}",
settings.connect_timeout,
connect_timeout,
);
settings.connect_timeout = connect_timeout;
}
subclass::Property("timeout", ..) => {
let mut settings = self.settings.lock().unwrap();
let timeout = value.get_some().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing timeout from {} to {}",
settings.timeout,
timeout,
);
settings.timeout = timeout;
}
subclass::Property("bandwidth", ..) => {
let mut settings = self.settings.lock().unwrap();
let bandwidth = value.get_some().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing bandwidth from {} to {}",
settings.bandwidth,
bandwidth,
);
settings.bandwidth = bandwidth;
}
subclass::Property("timestamp-mode", ..) => {
let mut settings = self.settings.lock().unwrap();
let timestamp_mode = value.get_some().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing timestamp mode from {:?} to {:?}",
settings.timestamp_mode,
timestamp_mode
);
if settings.timestamp_mode != timestamp_mode {
let _ =
basesrc.post_message(gst::message::Latency::builder().src(basesrc).build());
}
settings.timestamp_mode = timestamp_mode;
}
_ => unimplemented!(),
}
}
fn get_property(&self, _obj: &glib::Object, id: usize) -> Result<glib::Value, ()> {
let prop = &PROPERTIES[id];
match *prop {
subclass::Property("ndi-name", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.ndi_name.to_value())
}
subclass::Property("url-address", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.url_address.to_value())
}
subclass::Property("receiver-ndi-name", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.receiver_ndi_name.to_value())
}
subclass::Property("connect-timeout", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.connect_timeout.to_value())
}
subclass::Property("timeout", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.timeout.to_value())
}
subclass::Property("bandwidth", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.bandwidth.to_value())
}
subclass::Property("timestamp-mode", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.timestamp_mode.to_value())
}
_ => unimplemented!(),
}
}
}
impl ElementImpl for NdiAudioSrc {
fn change_state(
&self,
element: &gst::Element,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
match transition {
gst::StateChange::PausedToPlaying => {
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_playing(true);
}
}
gst::StateChange::PlayingToPaused => {
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_playing(false);
}
}
gst::StateChange::PausedToReady => {
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.shutdown();
}
}
_ => (),
}
self.parent_change_state(element, transition)
}
}
impl BaseSrcImpl for NdiAudioSrc {
fn negotiate(&self, _element: &gst_base::BaseSrc) -> Result<(), gst::LoggableError> {
// Always succeed here without doing anything: we will set the caps once we received a
// buffer, there's nothing we can negotiate
Ok(())
}
fn unlock(&self, element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
gst_debug!(self.cat, obj: element, "Unlocking",);
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_flushing(true);
}
Ok(())
}
fn unlock_stop(&self, element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
gst_debug!(self.cat, obj: element, "Stop unlocking",);
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_flushing(false);
}
Ok(())
}
fn start(&self, element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
*self.state.lock().unwrap() = Default::default();
let settings = self.settings.lock().unwrap().clone();
if settings.ndi_name.is_none() && settings.url_address.is_none() {
return Err(gst_error_msg!(
gst::LibraryError::Settings,
["No NDI name or URL/address given"]
));
}
let receiver = connect_ndi(
self.cat,
element,
settings.ndi_name.as_deref(),
settings.url_address.as_deref(),
&settings.receiver_ndi_name,
settings.connect_timeout,
settings.bandwidth,
settings.timestamp_mode,
settings.timeout,
);
// settings.id_receiver exists
match receiver {
None => Err(gst_error_msg!(
gst::ResourceError::NotFound,
["Could not connect to this source"]
)),
Some(receiver) => {
*self.receiver_controller.lock().unwrap() =
Some(receiver.receiver_control_handle());
let mut state = self.state.lock().unwrap();
state.receiver = Some(receiver);
Ok(())
}
}
}
fn stop(&self, _element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
if let Some(ref controller) = self.receiver_controller.lock().unwrap().take() {
controller.shutdown();
}
*self.state.lock().unwrap() = State::default();
Ok(())
}
fn query(&self, element: &gst_base::BaseSrc, query: &mut gst::QueryRef) -> bool {
use gst::QueryView;
match query.view_mut() {
QueryView::Scheduling(ref mut q) => {
q.set(gst::SchedulingFlags::SEQUENTIAL, 1, -1, 0);
q.add_scheduling_modes(&[gst::PadMode::Push]);
true
}
QueryView::Latency(ref mut q) => {
let state = self.state.lock().unwrap();
let settings = self.settings.lock().unwrap();
if state.current_latency.is_some() {
let min = if settings.timestamp_mode != TimestampMode::Timecode {
state.current_latency
} else {
0.into()
};
let max = 5 * state.current_latency;
gst_debug!(
self.cat,
obj: element,
"Returning latency min {} max {}",
min,
max
);
q.set(true, min, max);
true
} else {
false
}
}
_ => BaseSrcImplExt::parent_query(self, element, query),
}
}
fn fixate(&self, element: &gst_base::BaseSrc, mut caps: gst::Caps) -> gst::Caps {
caps.truncate();
{
let caps = caps.make_mut();
let s = caps.get_mut_structure(0).unwrap();
s.fixate_field_nearest_int("rate", 48_000);
s.fixate_field_nearest_int("channels", 2);
}
self.parent_fixate(element, caps)
}
fn create(
&self,
element: &gst_base::BaseSrc,
_offset: u64,
_buffer: Option<&mut gst::BufferRef>,
_length: u32,
) -> Result<CreateSuccess, gst::FlowError> {
let recv = {
let mut state = self.state.lock().unwrap();
match state.receiver.take() {
Some(recv) => recv,
None => {
gst_error!(self.cat, obj: element, "Have no receiver");
return Err(gst::FlowError::Error);
}
}
};
match recv.capture() {
ReceiverItem::Buffer(buffer, info) => {
let mut state = self.state.lock().unwrap();
state.receiver = Some(recv);
if state.info.as_ref() != Some(&info) {
let caps = info.to_caps().map_err(|_| {
gst_element_error!(
element,
gst::ResourceError::Settings,
["Invalid audio info received: {:?}", info]
);
gst::FlowError::NotNegotiated
})?;
state.info = Some(info);
state.current_latency = buffer.get_duration();
drop(state);
gst_debug!(self.cat, obj: element, "Configuring for caps {}", caps);
element.set_caps(&caps).map_err(|_| {
gst_element_error!(
element,
gst::CoreError::Negotiation,
["Failed to negotiate caps: {:?}", caps]
);
gst::FlowError::NotNegotiated
})?;
let _ =
element.post_message(gst::message::Latency::builder().src(element).build());
}
Ok(CreateSuccess::NewBuffer(buffer))
}
ReceiverItem::Flushing => Err(gst::FlowError::Flushing),
ReceiverItem::Timeout => Err(gst::FlowError::Eos),
ReceiverItem::Error(err) => Err(err),
}
}
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"ndiaudiosrc",
gst::Rank::None,
NdiAudioSrc::get_type(),
)
}

View file

@ -1,15 +1,15 @@
use glib::subclass;
use glib::subclass::prelude::*;
use gst::prelude::*;
use gst::subclass::prelude::*;
use gst::{gst_debug, gst_error, gst_error_msg, gst_info, gst_loggable_error, gst_trace};
use gst_base::{subclass::prelude::*, BaseSinkExtManual};
use gst::{gst_debug, gst_error, gst_info, gst_trace};
use gst_base::prelude::*;
use gst_base::subclass::prelude::*;
use std::sync::Mutex;
use once_cell::sync::Lazy;
use super::ndi::SendInstance;
use crate::ndi::SendInstance;
static DEFAULT_SENDER_NDI_NAME: Lazy<String> = Lazy::new(|| {
format!(
@ -32,16 +32,6 @@ impl Default for Settings {
}
}
static PROPERTIES: [subclass::Property; 1] = [subclass::Property("ndi-name", |name| {
glib::ParamSpec::string(
name,
"NDI Name",
"NDI Name to use",
Some(DEFAULT_SENDER_NDI_NAME.as_ref()),
glib::ParamFlags::READWRITE,
)
})];
struct State {
send: SendInstance,
video_info: Option<gst_video::VideoInfo>,
@ -57,13 +47,11 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new("ndisink", gst::DebugColorFlags::empty(), Some("NDI Sink"))
});
#[glib::object_subclass]
impl ObjectSubclass for NdiSink {
const NAME: &'static str = "NdiSink";
type Type = super::NdiSink;
type ParentType = gst_base::BaseSink;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
glib::glib_object_subclass!();
fn new() -> Self {
Self {
@ -71,106 +59,129 @@ impl ObjectSubclass for NdiSink {
state: Mutex::new(Default::default()),
}
}
fn class_init(klass: &mut subclass::simple::ClassStruct<Self>) {
klass.set_metadata(
"NDI Sink",
"Sink/Audio/Video",
"Render as an NDI stream",
"Sebastian Dröge <sebastian@centricular.com>",
);
let caps = gst::Caps::builder_full()
.structure(
gst::Structure::builder("video/x-raw")
.field(
"format",
&gst::List::new(&[
&gst_video::VideoFormat::Uyvy.to_str(),
&gst_video::VideoFormat::I420.to_str(),
&gst_video::VideoFormat::Nv12.to_str(),
&gst_video::VideoFormat::Nv21.to_str(),
&gst_video::VideoFormat::Yv12.to_str(),
&gst_video::VideoFormat::Bgra.to_str(),
&gst_video::VideoFormat::Bgrx.to_str(),
&gst_video::VideoFormat::Rgba.to_str(),
&gst_video::VideoFormat::Rgbx.to_str(),
]),
)
.field("width", &gst::IntRange::<i32>::new(1, std::i32::MAX))
.field("height", &gst::IntRange::<i32>::new(1, std::i32::MAX))
.field(
"framerate",
&gst::FractionRange::new(
gst::Fraction::new(0, 1),
gst::Fraction::new(std::i32::MAX, 1),
),
)
.build(),
)
.structure(
gst::Structure::builder("audio/x-raw")
.field("format", &gst_audio::AUDIO_FORMAT_S16.to_str())
.field("rate", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("channels", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("layout", &"interleaved")
.build(),
)
.build();
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
klass.add_pad_template(sink_pad_template);
klass.install_properties(&PROPERTIES);
}
}
impl ObjectImpl for NdiSink {
glib::glib_object_impl!();
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
vec![glib::ParamSpec::new_string(
"ndi-name",
"NDI Name",
"NDI Name to use",
Some(DEFAULT_SENDER_NDI_NAME.as_ref()),
glib::ParamFlags::READWRITE,
)]
});
fn set_property(&self, _obj: &glib::Object, id: usize, value: &glib::Value) {
let prop = &PROPERTIES[id];
match *prop {
subclass::Property("ndi-name", ..) => {
PROPERTIES.as_ref()
}
fn set_property(
&self,
_obj: &Self::Type,
_id: usize,
value: &glib::Value,
pspec: &glib::ParamSpec,
) {
match pspec.name() {
"ndi-name" => {
let mut settings = self.settings.lock().unwrap();
settings.ndi_name = value
.get::<String>()
.unwrap()
.unwrap_or_else(|| DEFAULT_SENDER_NDI_NAME.clone());
.unwrap_or_else(|_| DEFAULT_SENDER_NDI_NAME.clone());
}
_ => unimplemented!(),
};
}
fn get_property(&self, _obj: &glib::Object, id: usize) -> Result<glib::Value, ()> {
let prop = &PROPERTIES[id];
match *prop {
subclass::Property("ndi-name", ..) => {
fn property(&self, _obj: &Self::Type, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
match pspec.name() {
"ndi-name" => {
let settings = self.settings.lock().unwrap();
Ok(settings.ndi_name.to_value())
settings.ndi_name.to_value()
}
_ => unimplemented!(),
}
}
}
impl ElementImpl for NdiSink {}
impl ElementImpl for NdiSink {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"NDI Sink",
"Sink/Audio/Video",
"Render as an NDI stream",
"Sebastian Dröge <sebastian@centricular.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::builder_full()
.structure(
gst::Structure::builder("video/x-raw")
.field(
"format",
&gst::List::new(&[
&gst_video::VideoFormat::Uyvy.to_str(),
&gst_video::VideoFormat::I420.to_str(),
&gst_video::VideoFormat::Nv12.to_str(),
&gst_video::VideoFormat::Nv21.to_str(),
&gst_video::VideoFormat::Yv12.to_str(),
&gst_video::VideoFormat::Bgra.to_str(),
&gst_video::VideoFormat::Bgrx.to_str(),
&gst_video::VideoFormat::Rgba.to_str(),
&gst_video::VideoFormat::Rgbx.to_str(),
]),
)
.field("width", &gst::IntRange::<i32>::new(1, std::i32::MAX))
.field("height", &gst::IntRange::<i32>::new(1, std::i32::MAX))
.field(
"framerate",
&gst::FractionRange::new(
gst::Fraction::new(0, 1),
gst::Fraction::new(std::i32::MAX, 1),
),
)
.build(),
)
.structure(
gst::Structure::builder("audio/x-raw")
.field("format", &gst_audio::AUDIO_FORMAT_F32.to_str())
.field("rate", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("channels", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("layout", &"interleaved")
.build(),
)
.build();
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
vec![sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl BaseSinkImpl for NdiSink {
fn start(&self, element: &gst_base::BaseSink) -> Result<(), gst::ErrorMessage> {
fn start(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
let mut state_storage = self.state.lock().unwrap();
let settings = self.settings.lock().unwrap();
let send = SendInstance::builder(&settings.ndi_name)
.build()
.ok_or_else(|| {
gst_error_msg!(
gst::error_msg!(
gst::ResourceError::OpenWrite,
["Could not create send instance"]
)
@ -187,7 +198,7 @@ impl BaseSinkImpl for NdiSink {
Ok(())
}
fn stop(&self, element: &gst_base::BaseSink) -> Result<(), gst::ErrorMessage> {
fn stop(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
let mut state_storage = self.state.lock().unwrap();
*state_storage = None;
@ -196,37 +207,33 @@ impl BaseSinkImpl for NdiSink {
Ok(())
}
fn unlock(&self, _element: &gst_base::BaseSink) -> Result<(), gst::ErrorMessage> {
fn unlock(&self, _element: &Self::Type) -> Result<(), gst::ErrorMessage> {
Ok(())
}
fn unlock_stop(&self, _element: &gst_base::BaseSink) -> Result<(), gst::ErrorMessage> {
fn unlock_stop(&self, _element: &Self::Type) -> Result<(), gst::ErrorMessage> {
Ok(())
}
fn set_caps(
&self,
element: &gst_base::BaseSink,
caps: &gst::Caps,
) -> Result<(), gst::LoggableError> {
fn set_caps(&self, element: &Self::Type, caps: &gst::Caps) -> Result<(), gst::LoggableError> {
gst_debug!(CAT, obj: element, "Setting caps {}", caps);
let mut state_storage = self.state.lock().unwrap();
let state = match &mut *state_storage {
None => return Err(gst_loggable_error!(CAT, "Sink not started yet")),
None => return Err(gst::loggable_error!(CAT, "Sink not started yet")),
Some(ref mut state) => state,
};
let s = caps.get_structure(0).unwrap();
if s.get_name() == "video/x-raw" {
let s = caps.structure(0).unwrap();
if s.name() == "video/x-raw" {
let info = gst_video::VideoInfo::from_caps(caps)
.map_err(|_| gst_loggable_error!(CAT, "Couldn't parse caps {}", caps))?;
.map_err(|_| gst::loggable_error!(CAT, "Couldn't parse caps {}", caps))?;
state.video_info = Some(info);
state.audio_info = None;
} else {
let info = gst_audio::AudioInfo::from_caps(caps)
.map_err(|_| gst_loggable_error!(CAT, "Couldn't parse caps {}", caps))?;
.map_err(|_| gst::loggable_error!(CAT, "Couldn't parse caps {}", caps))?;
state.audio_info = Some(info);
state.video_info = None;
@ -237,7 +244,7 @@ impl BaseSinkImpl for NdiSink {
fn render(
&self,
element: &gst_base::BaseSink,
element: &Self::Type,
buffer: &gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut state_storage = self.state.lock().unwrap();
@ -247,11 +254,10 @@ impl BaseSinkImpl for NdiSink {
};
if let Some(ref info) = state.video_info {
if let Some(audio_meta) = buffer.get_meta::<crate::ndisinkmeta::NdiSinkAudioMeta>() {
if let Some(audio_meta) = buffer.meta::<crate::ndisinkmeta::NdiSinkAudioMeta>() {
for (buffer, info, timecode) in audio_meta.buffers() {
let frame =
crate::ndi::AudioFrame::try_from_interleaved_16s(info, buffer, *timecode)
.map_err(|_| {
let frame = crate::ndi::AudioFrame::try_from_buffer(info, buffer, *timecode)
.map_err(|_| {
gst_error!(CAT, obj: element, "Unsupported audio frame");
gst::FlowError::NotNegotiated
})?;
@ -262,9 +268,9 @@ impl BaseSinkImpl for NdiSink {
"Sending audio buffer {:?} with timecode {} and format {:?}",
buffer,
if *timecode < 0 {
gst::CLOCK_TIME_NONE
gst::ClockTime::NONE.display()
} else {
gst::ClockTime::from(*timecode as u64 * 100)
Some(gst::ClockTime::from_nseconds(*timecode as u64 * 100)).display()
},
info,
);
@ -273,15 +279,18 @@ impl BaseSinkImpl for NdiSink {
}
// Skip empty/gap buffers from ndisinkcombiner
if buffer.get_size() != 0 {
if buffer.size() != 0 {
let timecode = element
.get_segment()
.segment()
.downcast::<gst::ClockTime>()
.ok()
.and_then(|segment| {
*(segment.to_running_time(buffer.get_pts()) + element.get_base_time())
segment
.to_running_time(buffer.pts())
.zip(element.base_time())
})
.map(|time| (time / 100) as i64)
.and_then(|(running_time, base_time)| running_time.checked_add(base_time))
.map(|time| (time.nseconds() / 100) as i64)
.unwrap_or(crate::ndisys::NDIlib_send_timecode_synthesize);
let frame = gst_video::VideoFrameRef::from_buffer_ref_readable(buffer, info)
@ -302,9 +311,9 @@ impl BaseSinkImpl for NdiSink {
"Sending video buffer {:?} with timecode {} and format {:?}",
buffer,
if timecode < 0 {
gst::CLOCK_TIME_NONE
gst::ClockTime::NONE.display()
} else {
gst::ClockTime::from(timecode as u64 * 100)
Some(gst::ClockTime::from_nseconds(timecode as u64 * 100)).display()
},
info
);
@ -312,17 +321,20 @@ impl BaseSinkImpl for NdiSink {
}
} else if let Some(ref info) = state.audio_info {
let timecode = element
.get_segment()
.segment()
.downcast::<gst::ClockTime>()
.ok()
.and_then(|segment| {
*(segment.to_running_time(buffer.get_pts()) + element.get_base_time())
segment
.to_running_time(buffer.pts())
.zip(element.base_time())
})
.map(|time| (time / 100) as i64)
.and_then(|(running_time, base_time)| running_time.checked_add(base_time))
.map(|time| (time.nseconds() / 100) as i64)
.unwrap_or(crate::ndisys::NDIlib_send_timecode_synthesize);
let frame = crate::ndi::AudioFrame::try_from_interleaved_16s(info, buffer, timecode)
.map_err(|_| {
let frame =
crate::ndi::AudioFrame::try_from_buffer(info, buffer, timecode).map_err(|_| {
gst_error!(CAT, obj: element, "Unsupported audio frame");
gst::FlowError::NotNegotiated
})?;
@ -333,9 +345,9 @@ impl BaseSinkImpl for NdiSink {
"Sending audio buffer {:?} with timecode {} and format {:?}",
buffer,
if timecode < 0 {
gst::CLOCK_TIME_NONE
gst::ClockTime::NONE.display()
} else {
gst::ClockTime::from(timecode as u64 * 100)
Some(gst::ClockTime::from_nseconds(timecode as u64 * 100)).display()
},
info,
);
@ -347,12 +359,3 @@ impl BaseSinkImpl for NdiSink {
Ok(gst::FlowSuccess::Ok)
}
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"ndisink",
gst::Rank::None,
NdiSink::get_type(),
)
}

19
src/ndisink/mod.rs Normal file
View file

@ -0,0 +1,19 @@
use glib::prelude::*;
mod imp;
glib::wrapper! {
pub struct NdiSink(ObjectSubclass<imp::NdiSink>) @extends gst_base::BaseSink, gst::Element, gst::Object;
}
unsafe impl Send for NdiSink {}
unsafe impl Sync for NdiSink {}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"ndisink",
gst::Rank::None,
NdiSink::static_type(),
)
}

View file

@ -1,5 +1,4 @@
use glib::prelude::*;
use glib::subclass;
use glib::subclass::prelude::*;
use gst::prelude::*;
use gst::subclass::prelude::*;
@ -7,6 +6,8 @@ use gst::{gst_debug, gst_error, gst_trace, gst_warning};
use gst_base::prelude::*;
use gst_base::subclass::prelude::*;
use once_cell::sync::Lazy;
use std::mem;
use std::sync::Mutex;
@ -27,92 +28,20 @@ struct State {
current_audio_buffers: Vec<(gst::Buffer, gst_audio::AudioInfo, i64)>,
}
struct NdiSinkCombiner {
pub struct NdiSinkCombiner {
video_pad: gst_base::AggregatorPad,
audio_pad: Mutex<Option<gst_base::AggregatorPad>>,
state: Mutex<Option<State>>,
}
#[glib::object_subclass]
impl ObjectSubclass for NdiSinkCombiner {
const NAME: &'static str = "NdiSinkCombiner";
type Type = super::NdiSinkCombiner;
type ParentType = gst_base::Aggregator;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
glib::glib_object_subclass!();
fn class_init(klass: &mut subclass::simple::ClassStruct<Self>) {
klass.set_metadata(
"NDI Sink Combiner",
"Combiner/Audio/Video",
"NDI sink audio/video combiner",
"Sebastian Dröge <sebastian@centricular.com>",
);
let caps = gst::Caps::builder("video/x-raw")
.field(
"format",
&gst::List::new(&[
&gst_video::VideoFormat::Uyvy.to_str(),
&gst_video::VideoFormat::I420.to_str(),
&gst_video::VideoFormat::Nv12.to_str(),
&gst_video::VideoFormat::Nv21.to_str(),
&gst_video::VideoFormat::Yv12.to_str(),
&gst_video::VideoFormat::Bgra.to_str(),
&gst_video::VideoFormat::Bgrx.to_str(),
&gst_video::VideoFormat::Rgba.to_str(),
&gst_video::VideoFormat::Rgbx.to_str(),
]),
)
.field("width", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("height", &gst::IntRange::<i32>::new(1, i32::MAX))
.field(
"framerate",
&gst::FractionRange::new(
gst::Fraction::new(1, i32::MAX),
gst::Fraction::new(i32::MAX, 1),
),
)
.build();
let src_pad_template = gst::PadTemplate::with_gtype(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
gst_base::AggregatorPad::static_type(),
)
.unwrap();
klass.add_pad_template(src_pad_template);
let sink_pad_template = gst::PadTemplate::with_gtype(
"video",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
gst_base::AggregatorPad::static_type(),
)
.unwrap();
klass.add_pad_template(sink_pad_template);
let caps = gst::Caps::builder("audio/x-raw")
.field("format", &gst_audio::AUDIO_FORMAT_S16.to_str())
.field("rate", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("channels", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("layout", &"interleaved")
.build();
let sink_pad_template = gst::PadTemplate::with_gtype(
"audio",
gst::PadDirection::Sink,
gst::PadPresence::Request,
&caps,
gst_base::AggregatorPad::static_type(),
)
.unwrap();
klass.add_pad_template(sink_pad_template);
}
fn with_class(klass: &Self::Class) -> Self {
let templ = klass.get_pad_template("video").unwrap();
let templ = klass.pad_template("video").unwrap();
let video_pad =
gst::PadBuilder::<gst_base::AggregatorPad>::from_template(&templ, Some("video"))
.build();
@ -126,18 +55,97 @@ impl ObjectSubclass for NdiSinkCombiner {
}
impl ObjectImpl for NdiSinkCombiner {
glib::glib_object_impl!();
fn constructed(&self, obj: &glib::Object) {
let element = obj.downcast_ref::<gst::Element>().unwrap();
element.add_pad(&self.video_pad).unwrap();
fn constructed(&self, obj: &Self::Type) {
obj.add_pad(&self.video_pad).unwrap();
self.parent_constructed(obj);
}
}
impl ElementImpl for NdiSinkCombiner {
fn release_pad(&self, element: &gst::Element, pad: &gst::Pad) {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"NDI Sink Combiner",
"Combiner/Audio/Video",
"NDI sink audio/video combiner",
"Sebastian Dröge <sebastian@centricular.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::builder("video/x-raw")
.field(
"format",
&gst::List::new(&[
&gst_video::VideoFormat::Uyvy.to_str(),
&gst_video::VideoFormat::I420.to_str(),
&gst_video::VideoFormat::Nv12.to_str(),
&gst_video::VideoFormat::Nv21.to_str(),
&gst_video::VideoFormat::Yv12.to_str(),
&gst_video::VideoFormat::Bgra.to_str(),
&gst_video::VideoFormat::Bgrx.to_str(),
&gst_video::VideoFormat::Rgba.to_str(),
&gst_video::VideoFormat::Rgbx.to_str(),
]),
)
.field("width", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("height", &gst::IntRange::<i32>::new(1, i32::MAX))
.field(
"framerate",
&gst::FractionRange::new(
gst::Fraction::new(1, i32::MAX),
gst::Fraction::new(i32::MAX, 1),
),
)
.build();
let src_pad_template = gst::PadTemplate::with_gtype(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
gst_base::AggregatorPad::static_type(),
)
.unwrap();
let video_sink_pad_template = gst::PadTemplate::with_gtype(
"video",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
gst_base::AggregatorPad::static_type(),
)
.unwrap();
let caps = gst::Caps::builder("audio/x-raw")
.field("format", &gst_audio::AUDIO_FORMAT_F32.to_str())
.field("rate", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("channels", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("layout", &"interleaved")
.build();
let audio_sink_pad_template = gst::PadTemplate::with_gtype(
"audio",
gst::PadDirection::Sink,
gst::PadPresence::Request,
&caps,
gst_base::AggregatorPad::static_type(),
)
.unwrap();
vec![
src_pad_template,
video_sink_pad_template,
audio_sink_pad_template,
]
});
PAD_TEMPLATES.as_ref()
}
fn release_pad(&self, element: &Self::Type, pad: &gst::Pad) {
let mut audio_pad_storage = self.audio_pad.lock().unwrap();
if audio_pad_storage.as_ref().map(|p| p.upcast_ref()) == Some(pad) {
@ -151,7 +159,7 @@ impl ElementImpl for NdiSinkCombiner {
impl AggregatorImpl for NdiSinkCombiner {
fn create_new_pad(
&self,
agg: &gst_base::Aggregator,
agg: &Self::Type,
templ: &gst::PadTemplate,
_req_name: Option<&str>,
_caps: Option<&gst::Caps>,
@ -163,7 +171,7 @@ impl AggregatorImpl for NdiSinkCombiner {
return None;
}
let sink_templ = agg.get_pad_template("audio").unwrap();
let sink_templ = agg.pad_template("audio").unwrap();
if templ != &sink_templ {
gst_error!(CAT, obj: agg, "Wrong pad template");
return None;
@ -178,7 +186,7 @@ impl AggregatorImpl for NdiSinkCombiner {
Some(pad)
}
fn start(&self, agg: &gst_base::Aggregator) -> Result<(), gst::ErrorMessage> {
fn start(&self, agg: &Self::Type) -> Result<(), gst::ErrorMessage> {
let mut state_storage = self.state.lock().unwrap();
*state_storage = Some(State {
audio_info: None,
@ -192,7 +200,7 @@ impl AggregatorImpl for NdiSinkCombiner {
Ok(())
}
fn stop(&self, agg: &gst_base::Aggregator) -> Result<(), gst::ErrorMessage> {
fn stop(&self, agg: &Self::Type) -> Result<(), gst::ErrorMessage> {
// Drop our state now
let _ = self.state.lock().unwrap().take();
@ -201,18 +209,18 @@ impl AggregatorImpl for NdiSinkCombiner {
Ok(())
}
fn get_next_time(&self, _agg: &gst_base::Aggregator) -> gst::ClockTime {
fn next_time(&self, _agg: &Self::Type) -> Option<gst::ClockTime> {
// FIXME: What to do here? We don't really know when the next buffer is expected
gst::CLOCK_TIME_NONE
gst::ClockTime::NONE
}
fn clip(
&self,
agg: &gst_base::Aggregator,
agg: &Self::Type,
agg_pad: &gst_base::AggregatorPad,
mut buffer: gst::Buffer,
) -> Option<gst::Buffer> {
let segment = match agg_pad.get_segment().downcast::<gst::ClockTime>() {
let segment = match agg_pad.segment().downcast::<gst::ClockTime>() {
Ok(segment) => segment,
Err(_) => {
gst_error!(CAT, obj: agg, "Only TIME segments supported");
@ -220,25 +228,21 @@ impl AggregatorImpl for NdiSinkCombiner {
}
};
let pts = buffer.get_pts();
let pts = buffer.pts();
if pts.is_none() {
gst_error!(CAT, obj: agg, "Only buffers with PTS supported");
return Some(buffer);
}
let duration = if buffer.get_duration().is_some() {
buffer.get_duration()
} else {
gst::CLOCK_TIME_NONE
};
let duration = buffer.duration();
gst_trace!(
CAT,
obj: agg_pad,
"Clipping buffer {:?} with PTS {} and duration {}",
buffer,
pts,
duration
pts.display(),
duration.display(),
);
let state_storage = self.state.lock().unwrap();
@ -247,25 +251,21 @@ impl AggregatorImpl for NdiSinkCombiner {
None => return None,
};
let duration = if buffer.get_duration().is_some() {
buffer.get_duration()
let duration = if duration.is_some() {
duration
} else if let Some(ref audio_info) = state.audio_info {
gst::SECOND
.mul_div_floor(
buffer.get_size() as u64,
audio_info.rate() as u64 * audio_info.bpf() as u64,
)
.unwrap()
gst::ClockTime::SECOND.mul_div_floor(
buffer.size() as u64,
audio_info.rate() as u64 * audio_info.bpf() as u64,
)
} else if let Some(ref video_info) = state.video_info {
if *video_info.fps().numer() > 0 {
gst::SECOND
.mul_div_floor(
*video_info.fps().denom() as u64,
*video_info.fps().numer() as u64,
)
.unwrap()
gst::ClockTime::SECOND.mul_div_floor(
*video_info.fps().denom() as u64,
*video_info.fps().numer() as u64,
)
} else {
gst::CLOCK_TIME_NONE
gst::ClockTime::NONE
}
} else {
unreachable!()
@ -276,18 +276,23 @@ impl AggregatorImpl for NdiSinkCombiner {
obj: agg_pad,
"Clipping buffer {:?} with PTS {} and duration {}",
buffer,
pts,
duration
pts.display(),
duration.display(),
);
if agg_pad == &self.video_pad {
segment.clip(pts, pts + duration).map(|(start, stop)| {
let end_pts = pts
.zip(duration)
.and_then(|(pts, duration)| pts.checked_add(duration));
segment.clip(pts, end_pts).map(|(start, stop)| {
{
let buffer = buffer.make_mut();
buffer.set_pts(start);
if duration.is_some() {
buffer.set_duration(stop - start);
}
buffer.set_duration(
stop.zip(start)
.and_then(|(stop, start)| stop.checked_sub(start)),
);
}
buffer
@ -307,7 +312,7 @@ impl AggregatorImpl for NdiSinkCombiner {
fn aggregate(
&self,
agg: &gst_base::Aggregator,
agg: &Self::Type,
timeout: bool,
) -> Result<gst::FlowSuccess, gst::FlowError> {
// FIXME: Can't really happen because we always return NONE from get_next_time() but that
@ -318,7 +323,7 @@ impl AggregatorImpl for NdiSinkCombiner {
// first try getting buffers from both pads here
let video_buffer_and_segment = match self.video_pad.peek_buffer() {
Some(video_buffer) => {
let video_segment = self.video_pad.get_segment();
let video_segment = self.video_pad.segment();
let video_segment = match video_segment.downcast::<gst::ClockTime>() {
Ok(video_segment) => video_segment,
Err(video_segment) => {
@ -326,7 +331,7 @@ impl AggregatorImpl for NdiSinkCombiner {
CAT,
obj: agg,
"Video segment of wrong format {:?}",
video_segment.get_format()
video_segment.format()
);
return Err(gst::FlowError::Error);
}
@ -344,14 +349,14 @@ impl AggregatorImpl for NdiSinkCombiner {
let audio_buffer_segment_and_pad;
if let Some(audio_pad) = self.audio_pad.lock().unwrap().clone() {
audio_buffer_segment_and_pad = match audio_pad.peek_buffer() {
Some(audio_buffer) if audio_buffer.get_size() == 0 => {
Some(audio_buffer) if audio_buffer.size() == 0 => {
// Skip empty/gap audio buffer
audio_pad.drop_buffer();
gst_trace!(CAT, obj: agg, "Empty audio buffer, waiting for next");
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
Some(audio_buffer) => {
let audio_segment = audio_pad.get_segment();
let audio_segment = audio_pad.segment();
let audio_segment = match audio_segment.downcast::<gst::ClockTime>() {
Ok(audio_segment) => audio_segment,
Err(audio_segment) => {
@ -359,7 +364,7 @@ impl AggregatorImpl for NdiSinkCombiner {
CAT,
obj: agg,
"Audio segment of wrong format {:?}",
audio_segment.get_format()
audio_segment.format()
);
return Err(gst::FlowError::Error);
}
@ -385,8 +390,7 @@ impl AggregatorImpl for NdiSinkCombiner {
let (mut current_video_buffer, current_video_running_time_end, next_video_buffer) =
if let Some((video_buffer, video_segment)) = video_buffer_and_segment {
let video_running_time = video_segment.to_running_time(video_buffer.get_pts());
assert!(video_running_time.is_some());
let video_running_time = video_segment.to_running_time(video_buffer.pts()).unwrap();
match state.current_video_buffer {
None => {
@ -398,7 +402,7 @@ impl AggregatorImpl for NdiSinkCombiner {
}
Some((ref buffer, _)) => (
buffer.clone(),
video_running_time,
Some(video_running_time),
Some((video_buffer, video_running_time)),
),
}
@ -416,10 +420,9 @@ impl AggregatorImpl for NdiSinkCombiner {
// Create an empty dummy buffer for attaching the audio. This is going to
// be dropped by the sink later.
let audio_running_time =
audio_segment.to_running_time(audio_buffer.get_pts());
assert!(audio_running_time.is_some());
audio_segment.to_running_time(audio_buffer.pts()).unwrap();
let video_segment = self.video_pad.get_segment();
let video_segment = self.video_pad.segment();
let video_segment = match video_segment.downcast::<gst::ClockTime>() {
Ok(video_segment) => video_segment,
Err(video_segment) => {
@ -427,7 +430,7 @@ impl AggregatorImpl for NdiSinkCombiner {
CAT,
obj: agg,
"Video segment of wrong format {:?}",
video_segment.get_format()
video_segment.format()
);
return Err(gst::FlowError::Error);
}
@ -445,9 +448,9 @@ impl AggregatorImpl for NdiSinkCombiner {
buffer.set_pts(video_pts);
}
(buffer, gst::CLOCK_TIME_NONE, None)
(buffer, gst::ClockTime::NONE, None)
}
(Some((ref buffer, _)), _) => (buffer.clone(), gst::CLOCK_TIME_NONE, None),
(Some((ref buffer, _)), _) => (buffer.clone(), gst::ClockTime::NONE, None),
}
};
@ -460,22 +463,26 @@ impl AggregatorImpl for NdiSinkCombiner {
}
};
let audio_running_time = audio_segment.to_running_time(audio_buffer.get_pts());
assert!(audio_running_time.is_some());
let duration = gst::SECOND
.mul_div_floor(
audio_buffer.get_size() as u64 / audio_info.bpf() as u64,
audio_info.rate() as u64,
)
.unwrap_or(gst::CLOCK_TIME_NONE);
let audio_running_time_end = audio_running_time + duration;
assert!(audio_running_time_end.is_some());
let audio_running_time = audio_segment.to_running_time(audio_buffer.pts());
let duration = gst::ClockTime::SECOND.mul_div_floor(
audio_buffer.size() as u64 / audio_info.bpf() as u64,
audio_info.rate() as u64,
);
let audio_running_time_end = audio_running_time
.zip(duration)
.and_then(|(running_time, duration)| running_time.checked_add(duration));
if audio_running_time_end <= current_video_running_time_end
|| current_video_running_time_end.is_none()
if audio_running_time_end
.zip(current_video_running_time_end)
.map(|(audio, video)| audio <= video)
.unwrap_or(true)
{
let timecode = (audio_running_time + agg.get_base_time())
.map(|t| (t / 100) as i64)
let timecode = agg
.base_time()
.zip(audio_running_time)
.map(|(base_time, audio_running_time)| {
((base_time.nseconds() + audio_running_time.nseconds()) / 100) as i64
})
.unwrap_or(crate::ndisys::NDIlib_send_timecode_synthesize);
gst_trace!(
@ -484,8 +491,8 @@ impl AggregatorImpl for NdiSinkCombiner {
"Including audio buffer {:?} with timecode {}: {} <= {}",
audio_buffer,
timecode,
audio_running_time_end,
current_video_running_time_end,
audio_running_time_end.display(),
current_video_running_time_end.display(),
);
state
.current_audio_buffers
@ -503,7 +510,7 @@ impl AggregatorImpl for NdiSinkCombiner {
// far
}
let audio_buffers = mem::replace(&mut state.current_audio_buffers, Vec::new());
let audio_buffers = mem::take(&mut state.current_audio_buffers);
if !audio_buffers.is_empty() {
let current_video_buffer = current_video_buffer.make_mut();
@ -530,7 +537,7 @@ impl AggregatorImpl for NdiSinkCombiner {
fn sink_event(
&self,
agg: &gst_base::Aggregator,
agg: &Self::Type,
pad: &gst_base::AggregatorPad,
event: gst::Event,
) -> bool {
@ -538,7 +545,7 @@ impl AggregatorImpl for NdiSinkCombiner {
match event.view() {
EventView::Caps(caps) => {
let caps = caps.get_caps_owned();
let caps = caps.caps_owned();
let mut state_storage = self.state.lock().unwrap();
let state = match &mut *state_storage {
@ -558,22 +565,22 @@ impl AggregatorImpl for NdiSinkCombiner {
// 2 frames latency because we queue 1 frame and wait until audio
// up to the end of that frame has arrived.
let latency = if *info.fps().numer() > 0 {
gst::SECOND
gst::ClockTime::SECOND
.mul_div_floor(
2 * *info.fps().denom() as u64,
*info.fps().numer() as u64,
)
.unwrap_or(80 * gst::MSECOND)
.unwrap_or(80 * gst::ClockTime::MSECOND)
} else {
// let's assume 25fps and 2 frames latency
80 * gst::MSECOND
80 * gst::ClockTime::MSECOND
};
state.video_info = Some(info);
drop(state_storage);
agg.set_latency(latency, gst::CLOCK_TIME_NONE);
agg.set_latency(latency, gst::ClockTime::NONE);
// The video caps are passed through as the audio is included only in a meta
agg.set_src_caps(&caps);
@ -591,7 +598,7 @@ impl AggregatorImpl for NdiSinkCombiner {
}
// The video segment is passed through as-is and the video timestamps are preserved
EventView::Segment(segment) if pad == &self.video_pad => {
let segment = segment.get_segment();
let segment = segment.segment();
gst_debug!(CAT, obj: agg, "Updating segment {:?}", segment);
agg.update_segment(segment);
}
@ -603,7 +610,7 @@ impl AggregatorImpl for NdiSinkCombiner {
fn sink_query(
&self,
agg: &gst_base::Aggregator,
agg: &Self::Type,
pad: &gst_base::AggregatorPad,
query: &mut gst::QueryRef,
) -> bool {
@ -612,7 +619,7 @@ impl AggregatorImpl for NdiSinkCombiner {
match query.view_mut() {
QueryView::Caps(_) if pad == &self.video_pad => {
// Directly forward caps queries
let srcpad = agg.get_static_pad("src").unwrap();
let srcpad = agg.static_pad("src").unwrap();
return srcpad.peer_query(query);
}
_ => (),
@ -621,17 +628,8 @@ impl AggregatorImpl for NdiSinkCombiner {
self.parent_sink_query(agg, pad, query)
}
fn negotiate(&self, _agg: &gst_base::Aggregator) -> bool {
fn negotiate(&self, _agg: &Self::Type) -> bool {
// No negotiation needed as the video caps are just passed through
true
}
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"ndisinkcombiner",
gst::Rank::None,
NdiSinkCombiner::get_type(),
)
}

View file

@ -0,0 +1,19 @@
use glib::prelude::*;
mod imp;
glib::wrapper! {
pub struct NdiSinkCombiner(ObjectSubclass<imp::NdiSinkCombiner>) @extends gst_base::Aggregator, gst::Element, gst::Object;
}
unsafe impl Send for NdiSinkCombiner {}
unsafe impl Sync for NdiSinkCombiner {}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"ndisinkcombiner",
gst::Rank::None,
NdiSinkCombiner::static_type(),
)
}

View file

@ -1,4 +1,3 @@
use gst::gst_sys;
use gst::prelude::*;
use std::fmt;
use std::mem;
@ -19,10 +18,10 @@ impl NdiSinkAudioMeta {
// content of the struct
let mut params = mem::ManuallyDrop::new(imp::NdiSinkAudioMetaParams { buffers });
let meta = gst_sys::gst_buffer_add_meta(
let meta = gst::ffi::gst_buffer_add_meta(
buffer.as_mut_ptr(),
imp::ndi_sink_audio_meta_get_info(),
&mut *params as *mut imp::NdiSinkAudioMetaParams as glib::glib_sys::gpointer,
&mut *params as *mut imp::NdiSinkAudioMetaParams as glib::ffi::gpointer,
) as *mut imp::NdiSinkAudioMeta;
Self::from_mut_ptr(buffer, meta)
@ -37,7 +36,7 @@ impl NdiSinkAudioMeta {
unsafe impl MetaAPI for NdiSinkAudioMeta {
type GstType = imp::NdiSinkAudioMeta;
fn get_meta_api() -> glib::Type {
fn meta_api() -> glib::Type {
imp::ndi_sink_audio_meta_api_get_type()
}
}
@ -51,9 +50,7 @@ impl fmt::Debug for NdiSinkAudioMeta {
}
mod imp {
use glib::glib_sys;
use glib::translate::*;
use gst::gst_sys;
use once_cell::sync::Lazy;
use std::mem;
use std::ptr;
@ -64,18 +61,18 @@ mod imp {
#[repr(C)]
pub struct NdiSinkAudioMeta {
parent: gst_sys::GstMeta,
parent: gst::ffi::GstMeta,
pub(super) buffers: Vec<(gst::Buffer, gst_audio::AudioInfo, i64)>,
}
pub(super) fn ndi_sink_audio_meta_api_get_type() -> glib::Type {
static TYPE: Lazy<glib::Type> = Lazy::new(|| unsafe {
let t = from_glib(gst_sys::gst_meta_api_type_register(
let t = from_glib(gst::ffi::gst_meta_api_type_register(
b"GstNdiSinkAudioMetaAPI\0".as_ptr() as *const _,
[ptr::null::<std::os::raw::c_char>()].as_ptr() as *mut *const _,
));
assert_ne!(t, glib::Type::Invalid);
assert_ne!(t, glib::Type::INVALID);
t
});
@ -84,10 +81,10 @@ mod imp {
}
unsafe extern "C" fn ndi_sink_audio_meta_init(
meta: *mut gst_sys::GstMeta,
params: glib_sys::gpointer,
_buffer: *mut gst_sys::GstBuffer,
) -> glib_sys::gboolean {
meta: *mut gst::ffi::GstMeta,
params: glib::ffi::gpointer,
_buffer: *mut gst::ffi::GstBuffer,
) -> glib::ffi::gboolean {
assert!(!params.is_null());
let meta = &mut *(meta as *mut NdiSinkAudioMeta);
@ -95,12 +92,12 @@ mod imp {
ptr::write(&mut meta.buffers, params.buffers);
true.to_glib()
true.into_glib()
}
unsafe extern "C" fn ndi_sink_audio_meta_free(
meta: *mut gst_sys::GstMeta,
_buffer: *mut gst_sys::GstBuffer,
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
) {
let meta = &mut *(meta as *mut NdiSinkAudioMeta);
@ -108,34 +105,34 @@ mod imp {
}
unsafe extern "C" fn ndi_sink_audio_meta_transform(
dest: *mut gst_sys::GstBuffer,
meta: *mut gst_sys::GstMeta,
_buffer: *mut gst_sys::GstBuffer,
_type_: glib_sys::GQuark,
_data: glib_sys::gpointer,
) -> glib_sys::gboolean {
dest: *mut gst::ffi::GstBuffer,
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
_type_: glib::ffi::GQuark,
_data: glib::ffi::gpointer,
) -> glib::ffi::gboolean {
let meta = &*(meta as *mut NdiSinkAudioMeta);
super::NdiSinkAudioMeta::add(gst::BufferRef::from_mut_ptr(dest), meta.buffers.clone());
true.to_glib()
true.into_glib()
}
pub(super) fn ndi_sink_audio_meta_get_info() -> *const gst_sys::GstMetaInfo {
struct MetaInfo(ptr::NonNull<gst_sys::GstMetaInfo>);
pub(super) fn ndi_sink_audio_meta_get_info() -> *const gst::ffi::GstMetaInfo {
struct MetaInfo(ptr::NonNull<gst::ffi::GstMetaInfo>);
unsafe impl Send for MetaInfo {}
unsafe impl Sync for MetaInfo {}
static META_INFO: Lazy<MetaInfo> = Lazy::new(|| unsafe {
MetaInfo(
ptr::NonNull::new(gst_sys::gst_meta_register(
ndi_sink_audio_meta_api_get_type().to_glib(),
ptr::NonNull::new(gst::ffi::gst_meta_register(
ndi_sink_audio_meta_api_get_type().into_glib(),
b"GstNdiSinkAudioMeta\0".as_ptr() as *const _,
mem::size_of::<NdiSinkAudioMeta>(),
Some(ndi_sink_audio_meta_init),
Some(ndi_sink_audio_meta_free),
Some(ndi_sink_audio_meta_transform),
) as *mut gst_sys::GstMetaInfo)
) as *mut gst::ffi::GstMetaInfo)
.expect("Failed to register meta API"),
)
});

630
src/ndisrc/imp.rs Normal file
View file

@ -0,0 +1,630 @@
use gst::prelude::*;
use gst::subclass::prelude::*;
use gst::{gst_debug, gst_error};
use gst_base::prelude::*;
use gst_base::subclass::base_src::CreateSuccess;
use gst_base::subclass::prelude::*;
use std::sync::Mutex;
use std::{i32, u32};
use once_cell::sync::Lazy;
use crate::ndisys;
use crate::ndisrcmeta;
use crate::Buffer;
use crate::Receiver;
use crate::ReceiverControlHandle;
use crate::ReceiverItem;
use crate::RecvColorFormat;
use crate::TimestampMode;
use crate::DEFAULT_RECEIVER_NDI_NAME;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"ndisrc",
gst::DebugColorFlags::empty(),
Some("NewTek NDI Source"),
)
});
#[derive(Debug, Clone)]
struct Settings {
ndi_name: Option<String>,
url_address: Option<String>,
connect_timeout: u32,
timeout: u32,
max_queue_length: u32,
receiver_ndi_name: String,
bandwidth: ndisys::NDIlib_recv_bandwidth_e,
color_format: RecvColorFormat,
timestamp_mode: TimestampMode,
}
impl Default for Settings {
fn default() -> Self {
Settings {
ndi_name: None,
url_address: None,
receiver_ndi_name: DEFAULT_RECEIVER_NDI_NAME.clone(),
connect_timeout: 10000,
timeout: 5000,
max_queue_length: 10,
bandwidth: ndisys::NDIlib_recv_bandwidth_highest,
color_format: RecvColorFormat::UyvyBgra,
timestamp_mode: TimestampMode::ReceiveTimeTimecode,
}
}
}
struct State {
video_info: Option<crate::VideoInfo>,
video_caps: Option<gst::Caps>,
audio_info: Option<crate::AudioInfo>,
audio_caps: Option<gst::Caps>,
current_latency: Option<gst::ClockTime>,
receiver: Option<Receiver>,
}
impl Default for State {
fn default() -> State {
State {
video_info: None,
video_caps: None,
audio_info: None,
audio_caps: None,
current_latency: gst::ClockTime::NONE,
receiver: None,
}
}
}
pub struct NdiSrc {
settings: Mutex<Settings>,
state: Mutex<State>,
receiver_controller: Mutex<Option<ReceiverControlHandle>>,
}
#[glib::object_subclass]
impl ObjectSubclass for NdiSrc {
const NAME: &'static str = "NdiSrc";
type Type = super::NdiSrc;
type ParentType = gst_base::BaseSrc;
fn new() -> Self {
Self {
settings: Mutex::new(Default::default()),
state: Mutex::new(Default::default()),
receiver_controller: Mutex::new(None),
}
}
}
impl ObjectImpl for NdiSrc {
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
vec![
glib::ParamSpec::new_string(
"ndi-name",
"NDI Name",
"NDI stream name of the sender",
None,
glib::ParamFlags::READWRITE,
),
glib::ParamSpec::new_string(
"url-address",
"URL/Address",
"URL/address and port of the sender, e.g. 127.0.0.1:5961",
None,
glib::ParamFlags::READWRITE,
),
glib::ParamSpec::new_string(
"receiver-ndi-name",
"Receiver NDI Name",
"NDI stream name of this receiver",
Some(&*DEFAULT_RECEIVER_NDI_NAME),
glib::ParamFlags::READWRITE,
),
glib::ParamSpec::new_uint(
"connect-timeout",
"Connect Timeout",
"Connection timeout in ms",
0,
u32::MAX,
10000,
glib::ParamFlags::READWRITE,
),
glib::ParamSpec::new_uint(
"timeout",
"Timeout",
"Receive timeout in ms",
0,
u32::MAX,
5000,
glib::ParamFlags::READWRITE,
),
glib::ParamSpec::new_uint(
"max-queue-length",
"Max Queue Length",
"Maximum receive queue length",
0,
u32::MAX,
10,
glib::ParamFlags::READWRITE,
),
glib::ParamSpec::new_int(
"bandwidth",
"Bandwidth",
"Bandwidth, -10 metadata-only, 10 audio-only, 100 highest",
-10,
100,
100,
glib::ParamFlags::READWRITE,
),
glib::ParamSpec::new_enum(
"color-format",
"Color Format",
"Receive color format",
RecvColorFormat::static_type(),
RecvColorFormat::UyvyBgra as u32 as i32,
glib::ParamFlags::READWRITE,
),
glib::ParamSpec::new_enum(
"timestamp-mode",
"Timestamp Mode",
"Timestamp information to use for outgoing PTS",
TimestampMode::static_type(),
TimestampMode::ReceiveTimeTimecode as i32,
glib::ParamFlags::READWRITE,
),
]
});
PROPERTIES.as_ref()
}
fn constructed(&self, obj: &Self::Type) {
self.parent_constructed(obj);
// Initialize live-ness and notify the base class that
// we'd like to operate in Time format
obj.set_live(true);
obj.set_format(gst::Format::Time);
}
fn set_property(
&self,
obj: &Self::Type,
_id: usize,
value: &glib::Value,
pspec: &glib::ParamSpec,
) {
match pspec.name() {
"ndi-name" => {
let mut settings = self.settings.lock().unwrap();
let ndi_name = value.get().unwrap();
gst_debug!(
CAT,
obj: obj,
"Changing ndi-name from {:?} to {:?}",
settings.ndi_name,
ndi_name,
);
settings.ndi_name = ndi_name;
}
"url-address" => {
let mut settings = self.settings.lock().unwrap();
let url_address = value.get().unwrap();
gst_debug!(
CAT,
obj: obj,
"Changing url-address from {:?} to {:?}",
settings.url_address,
url_address,
);
settings.url_address = url_address;
}
"receiver-ndi-name" => {
let mut settings = self.settings.lock().unwrap();
let receiver_ndi_name = value.get::<Option<String>>().unwrap();
gst_debug!(
CAT,
obj: obj,
"Changing receiver-ndi-name from {:?} to {:?}",
settings.receiver_ndi_name,
receiver_ndi_name,
);
settings.receiver_ndi_name =
receiver_ndi_name.unwrap_or_else(|| DEFAULT_RECEIVER_NDI_NAME.clone());
}
"connect-timeout" => {
let mut settings = self.settings.lock().unwrap();
let connect_timeout = value.get().unwrap();
gst_debug!(
CAT,
obj: obj,
"Changing connect-timeout from {} to {}",
settings.connect_timeout,
connect_timeout,
);
settings.connect_timeout = connect_timeout;
}
"timeout" => {
let mut settings = self.settings.lock().unwrap();
let timeout = value.get().unwrap();
gst_debug!(
CAT,
obj: obj,
"Changing timeout from {} to {}",
settings.timeout,
timeout,
);
settings.timeout = timeout;
}
"max-queue-length" => {
let mut settings = self.settings.lock().unwrap();
let max_queue_length = value.get().unwrap();
gst_debug!(
CAT,
obj: obj,
"Changing max-queue-length from {} to {}",
settings.max_queue_length,
max_queue_length,
);
settings.max_queue_length = max_queue_length;
}
"bandwidth" => {
let mut settings = self.settings.lock().unwrap();
let bandwidth = value.get().unwrap();
gst_debug!(
CAT,
obj: obj,
"Changing bandwidth from {} to {}",
settings.bandwidth,
bandwidth,
);
settings.bandwidth = bandwidth;
}
"color-format" => {
let mut settings = self.settings.lock().unwrap();
let color_format = value.get().unwrap();
gst_debug!(
CAT,
obj: obj,
"Changing color format from {:?} to {:?}",
settings.color_format,
color_format,
);
settings.color_format = color_format;
}
"timestamp-mode" => {
let mut settings = self.settings.lock().unwrap();
let timestamp_mode = value.get().unwrap();
gst_debug!(
CAT,
obj: obj,
"Changing timestamp mode from {:?} to {:?}",
settings.timestamp_mode,
timestamp_mode
);
if settings.timestamp_mode != timestamp_mode {
let _ = obj.post_message(gst::message::Latency::builder().src(obj).build());
}
settings.timestamp_mode = timestamp_mode;
}
_ => unimplemented!(),
}
}
fn property(&self, _obj: &Self::Type, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
match pspec.name() {
"ndi-name" => {
let settings = self.settings.lock().unwrap();
settings.ndi_name.to_value()
}
"url-address" => {
let settings = self.settings.lock().unwrap();
settings.url_address.to_value()
}
"receiver-ndi-name" => {
let settings = self.settings.lock().unwrap();
settings.receiver_ndi_name.to_value()
}
"connect-timeout" => {
let settings = self.settings.lock().unwrap();
settings.connect_timeout.to_value()
}
"timeout" => {
let settings = self.settings.lock().unwrap();
settings.timeout.to_value()
}
"max-queue-length" => {
let settings = self.settings.lock().unwrap();
settings.max_queue_length.to_value()
}
"bandwidth" => {
let settings = self.settings.lock().unwrap();
settings.bandwidth.to_value()
}
"color-format" => {
let settings = self.settings.lock().unwrap();
settings.color_format.to_value()
}
"timestamp-mode" => {
let settings = self.settings.lock().unwrap();
settings.timestamp_mode.to_value()
}
_ => unimplemented!(),
}
}
}
impl ElementImpl for NdiSrc {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"NewTek NDI Source",
"Source/Audio/Video/Network",
"NewTek NDI source",
"Ruben Gonzalez <rubenrua@teltek.es>, Daniel Vilar <daniel.peiteado@teltek.es>, Sebastian Dröge <sebastian@centricular.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&gst::Caps::builder("application/x-ndi").build(),
)
.unwrap();
vec![src_pad_template]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
element: &Self::Type,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
match transition {
gst::StateChange::PausedToPlaying => {
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_playing(true);
}
}
gst::StateChange::PlayingToPaused => {
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_playing(false);
}
}
gst::StateChange::PausedToReady => {
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.shutdown();
}
}
_ => (),
}
self.parent_change_state(element, transition)
}
}
impl BaseSrcImpl for NdiSrc {
fn negotiate(&self, element: &Self::Type) -> Result<(), gst::LoggableError> {
element
.set_caps(&gst::Caps::builder("application/x-ndi").build())
.map_err(|_| gst::loggable_error!(CAT, "Failed to negotiate caps",))
}
fn unlock(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
gst_debug!(CAT, obj: element, "Unlocking",);
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_flushing(true);
}
Ok(())
}
fn unlock_stop(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
gst_debug!(CAT, obj: element, "Stop unlocking",);
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_flushing(false);
}
Ok(())
}
fn start(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
*self.state.lock().unwrap() = Default::default();
let settings = self.settings.lock().unwrap().clone();
if settings.ndi_name.is_none() && settings.url_address.is_none() {
return Err(gst::error_msg!(
gst::LibraryError::Settings,
["No NDI name or URL/address given"]
));
}
let receiver = Receiver::connect(
element.upcast_ref(),
settings.ndi_name.as_deref(),
settings.url_address.as_deref(),
&settings.receiver_ndi_name,
settings.connect_timeout,
settings.bandwidth,
settings.color_format.into(),
settings.timestamp_mode,
settings.timeout,
settings.max_queue_length as usize,
);
match receiver {
None => Err(gst::error_msg!(
gst::ResourceError::NotFound,
["Could not connect to this source"]
)),
Some(receiver) => {
*self.receiver_controller.lock().unwrap() =
Some(receiver.receiver_control_handle());
let mut state = self.state.lock().unwrap();
state.receiver = Some(receiver);
Ok(())
}
}
}
fn stop(&self, _element: &Self::Type) -> Result<(), gst::ErrorMessage> {
if let Some(ref controller) = self.receiver_controller.lock().unwrap().take() {
controller.shutdown();
}
*self.state.lock().unwrap() = State::default();
Ok(())
}
fn query(&self, element: &Self::Type, query: &mut gst::QueryRef) -> bool {
use gst::QueryView;
match query.view_mut() {
QueryView::Scheduling(ref mut q) => {
q.set(gst::SchedulingFlags::SEQUENTIAL, 1, -1, 0);
q.add_scheduling_modes(&[gst::PadMode::Push]);
true
}
QueryView::Latency(ref mut q) => {
let state = self.state.lock().unwrap();
let settings = self.settings.lock().unwrap();
if let Some(latency) = state.current_latency {
let min = if matches!(
settings.timestamp_mode,
TimestampMode::ReceiveTimeTimecode | TimestampMode::ReceiveTimeTimestamp
) {
latency
} else {
gst::ClockTime::ZERO
};
let max = settings.max_queue_length as u64 * latency;
gst_debug!(
CAT,
obj: element,
"Returning latency min {} max {}",
min,
max
);
q.set(true, min, max);
true
} else {
false
}
}
_ => BaseSrcImplExt::parent_query(self, element, query),
}
}
fn create(
&self,
element: &Self::Type,
_offset: u64,
_buffer: Option<&mut gst::BufferRef>,
_length: u32,
) -> Result<CreateSuccess, gst::FlowError> {
let recv = {
let mut state = self.state.lock().unwrap();
match state.receiver.take() {
Some(recv) => recv,
None => {
gst_error!(CAT, obj: element, "Have no receiver");
return Err(gst::FlowError::Error);
}
}
};
let res = recv.capture();
let mut state = self.state.lock().unwrap();
state.receiver = Some(recv);
match res {
ReceiverItem::Buffer(buffer) => {
let buffer = match buffer {
Buffer::Audio(mut buffer, info) => {
if state.audio_info.as_ref() != Some(&info) {
let caps = info.to_caps().map_err(|_| {
gst::element_error!(
element,
gst::ResourceError::Settings,
["Invalid audio info received: {:?}", info]
);
gst::FlowError::NotNegotiated
})?;
state.audio_info = Some(info);
state.audio_caps = Some(caps);
}
{
let buffer = buffer.get_mut().unwrap();
ndisrcmeta::NdiSrcMeta::add(
buffer,
ndisrcmeta::StreamType::Audio,
state.audio_caps.as_ref().unwrap(),
);
}
buffer
}
Buffer::Video(mut buffer, info) => {
let mut latency_changed = false;
if state.video_info.as_ref() != Some(&info) {
let caps = info.to_caps().map_err(|_| {
gst::element_error!(
element,
gst::ResourceError::Settings,
["Invalid audio info received: {:?}", info]
);
gst::FlowError::NotNegotiated
})?;
state.video_info = Some(info);
state.video_caps = Some(caps);
latency_changed = state.current_latency != buffer.duration();
state.current_latency = buffer.duration();
}
{
let buffer = buffer.get_mut().unwrap();
ndisrcmeta::NdiSrcMeta::add(
buffer,
ndisrcmeta::StreamType::Video,
state.video_caps.as_ref().unwrap(),
);
}
drop(state);
if latency_changed {
let _ = element.post_message(
gst::message::Latency::builder().src(element).build(),
);
}
buffer
}
};
Ok(CreateSuccess::NewBuffer(buffer))
}
ReceiverItem::Timeout => Err(gst::FlowError::Eos),
ReceiverItem::Flushing => Err(gst::FlowError::Flushing),
ReceiverItem::Error(err) => Err(err),
}
}
}

19
src/ndisrc/mod.rs Normal file
View file

@ -0,0 +1,19 @@
use glib::prelude::*;
mod imp;
glib::wrapper! {
pub struct NdiSrc(ObjectSubclass<imp::NdiSrc>) @extends gst_base::BaseSrc, gst::Element, gst::Object;
}
unsafe impl Send for NdiSrc {}
unsafe impl Sync for NdiSrc {}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"ndisrc",
gst::Rank::None,
NdiSrc::static_type(),
)
}

280
src/ndisrcdemux/imp.rs Normal file
View file

@ -0,0 +1,280 @@
use gst::prelude::*;
use gst::subclass::prelude::*;
use gst::{gst_debug, gst_error, gst_log};
use std::sync::Mutex;
use once_cell::sync::Lazy;
use crate::ndisrcmeta;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"ndisrcdemux",
gst::DebugColorFlags::empty(),
Some("NewTek NDI Source Demuxer"),
)
});
#[derive(Default)]
struct State {
combiner: gst_base::UniqueFlowCombiner,
video_pad: Option<gst::Pad>,
video_caps: Option<gst::Caps>,
audio_pad: Option<gst::Pad>,
audio_caps: Option<gst::Caps>,
}
pub struct NdiSrcDemux {
sinkpad: gst::Pad,
state: Mutex<State>,
}
#[glib::object_subclass]
impl ObjectSubclass for NdiSrcDemux {
const NAME: &'static str = "NdiSrcDemux";
type Type = super::NdiSrcDemux;
type ParentType = gst::Element;
fn with_class(klass: &Self::Class) -> Self {
let templ = klass.pad_template("sink").unwrap();
let sinkpad = gst::Pad::builder_with_template(&templ, Some("sink"))
.flags(gst::PadFlags::FIXED_CAPS)
.chain_function(|pad, parent, buffer| {
NdiSrcDemux::catch_panic_pad_function(
parent,
|| Err(gst::FlowError::Error),
|self_, element| self_.sink_chain(pad, element, buffer),
)
})
.build();
Self {
sinkpad,
state: Mutex::new(State::default()),
}
}
}
impl ObjectImpl for NdiSrcDemux {
fn constructed(&self, obj: &Self::Type) {
self.parent_constructed(obj);
obj.add_pad(&self.sinkpad).unwrap();
}
}
impl ElementImpl for NdiSrcDemux {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"NewTek NDI Source Demuxer",
"Demuxer/Audio/Video",
"NewTek NDI source demuxer",
"Sebastian Dröge <sebastian@centricular.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&gst::Caps::builder("application/x-ndi").build(),
)
.unwrap();
let audio_src_pad_template = gst::PadTemplate::new(
"audio",
gst::PadDirection::Src,
gst::PadPresence::Sometimes,
&gst::Caps::builder("audio/x-raw").build(),
)
.unwrap();
let video_src_pad_template = gst::PadTemplate::new(
"video",
gst::PadDirection::Src,
gst::PadPresence::Sometimes,
&gst::Caps::builder("video/x-raw").build(),
)
.unwrap();
vec![
sink_pad_template,
audio_src_pad_template,
video_src_pad_template,
]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
element: &Self::Type,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
let res = self.parent_change_state(element, transition)?;
match transition {
gst::StateChange::PausedToReady => {
let mut state = self.state.lock().unwrap();
for pad in [state.audio_pad.take(), state.video_pad.take()]
.iter()
.flatten()
{
element.remove_pad(pad).unwrap();
}
*state = State::default();
}
_ => (),
}
Ok(res)
}
}
impl NdiSrcDemux {
fn sink_chain(
&self,
pad: &gst::Pad,
element: &super::NdiSrcDemux,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst_log!(CAT, obj: pad, "Handling buffer {:?}", buffer);
let meta = buffer.meta::<ndisrcmeta::NdiSrcMeta>().ok_or_else(|| {
gst_error!(CAT, obj: element, "Buffer without NDI source meta");
gst::FlowError::Error
})?;
let mut events = vec![];
let srcpad;
let mut add_pad = false;
let mut state = self.state.lock().unwrap();
let caps = meta.caps();
match meta.stream_type() {
ndisrcmeta::StreamType::Audio => {
if let Some(ref pad) = state.audio_pad {
srcpad = pad.clone();
} else {
gst_debug!(CAT, obj: element, "Adding audio pad with caps {}", caps);
let klass = element.element_class();
let templ = klass.pad_template("audio").unwrap();
let pad = gst::Pad::builder_with_template(&templ, Some("audio"))
.flags(gst::PadFlags::FIXED_CAPS)
.build();
let mut caps_event = Some(gst::event::Caps::new(&caps));
self.sinkpad.sticky_events_foreach(|ev| {
if ev.type_() < gst::EventType::Caps {
events.push(ev.clone());
} else {
if let Some(ev) = caps_event.take() {
events.push(ev);
}
if ev.type_() != gst::EventType::Caps {
events.push(ev.clone());
}
}
Ok(Some(ev))
});
state.audio_caps = Some(caps.clone());
state.audio_pad = Some(pad.clone());
let _ = pad.set_active(true);
for ev in events.drain(..) {
let _ = pad.store_sticky_event(&ev);
}
state.combiner.add_pad(&pad);
add_pad = true;
srcpad = pad;
}
if state.audio_caps.as_ref() != Some(&caps) {
gst_debug!(CAT, obj: element, "Audio caps changed to {}", caps);
events.push(gst::event::Caps::new(&caps));
state.audio_caps = Some(caps);
}
}
ndisrcmeta::StreamType::Video => {
if let Some(ref pad) = state.video_pad {
srcpad = pad.clone();
} else {
gst_debug!(CAT, obj: element, "Adding video pad with caps {}", caps);
let klass = element.element_class();
let templ = klass.pad_template("video").unwrap();
let pad = gst::Pad::builder_with_template(&templ, Some("video"))
.flags(gst::PadFlags::FIXED_CAPS)
.build();
let mut caps_event = Some(gst::event::Caps::new(&caps));
self.sinkpad.sticky_events_foreach(|ev| {
if ev.type_() < gst::EventType::Caps {
events.push(ev.clone());
} else {
if let Some(ev) = caps_event.take() {
events.push(ev);
}
if ev.type_() != gst::EventType::Caps {
events.push(ev.clone());
}
}
Ok(Some(ev))
});
state.video_caps = Some(caps.clone());
state.video_pad = Some(pad.clone());
let _ = pad.set_active(true);
for ev in events.drain(..) {
let _ = pad.store_sticky_event(&ev);
}
state.combiner.add_pad(&pad);
add_pad = true;
srcpad = pad;
}
if state.video_caps.as_ref() != Some(&caps) {
gst_debug!(CAT, obj: element, "Video caps changed to {}", caps);
events.push(gst::event::Caps::new(&caps));
state.video_caps = Some(caps);
}
}
}
drop(state);
if add_pad {
element.add_pad(&srcpad).unwrap();
}
for ev in events {
srcpad.push_event(ev);
}
let res = srcpad.push(buffer);
let mut state = self.state.lock().unwrap();
state.combiner.update_pad_flow(&srcpad, res)
}
}

19
src/ndisrcdemux/mod.rs Normal file
View file

@ -0,0 +1,19 @@
use glib::prelude::*;
mod imp;
glib::wrapper! {
pub struct NdiSrcDemux(ObjectSubclass<imp::NdiSrcDemux>) @extends gst::Element, gst::Object;
}
unsafe impl Send for NdiSrcDemux {}
unsafe impl Sync for NdiSrcDemux {}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"ndisrcdemux",
gst::Rank::Primary,
NdiSrcDemux::static_type(),
)
}

158
src/ndisrcmeta.rs Normal file
View file

@ -0,0 +1,158 @@
use gst::prelude::*;
use std::fmt;
use std::mem;
#[repr(transparent)]
pub struct NdiSrcMeta(imp::NdiSrcMeta);
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum StreamType {
Audio,
Video,
}
unsafe impl Send for NdiSrcMeta {}
unsafe impl Sync for NdiSrcMeta {}
impl NdiSrcMeta {
pub fn add<'a>(
buffer: &'a mut gst::BufferRef,
stream_type: StreamType,
caps: &gst::Caps,
) -> gst::MetaRefMut<'a, Self, gst::meta::Standalone> {
unsafe {
// Manually dropping because gst_buffer_add_meta() takes ownership of the
// content of the struct
let mut params = mem::ManuallyDrop::new(imp::NdiSrcMetaParams {
caps: caps.clone(),
stream_type,
});
let meta = gst::ffi::gst_buffer_add_meta(
buffer.as_mut_ptr(),
imp::ndi_src_meta_get_info(),
&mut *params as *mut imp::NdiSrcMetaParams as glib::ffi::gpointer,
) as *mut imp::NdiSrcMeta;
Self::from_mut_ptr(buffer, meta)
}
}
pub fn stream_type(&self) -> StreamType {
self.0.stream_type
}
pub fn caps(&self) -> gst::Caps {
self.0.caps.clone()
}
}
unsafe impl MetaAPI for NdiSrcMeta {
type GstType = imp::NdiSrcMeta;
fn meta_api() -> glib::Type {
imp::ndi_src_meta_api_get_type()
}
}
impl fmt::Debug for NdiSrcMeta {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("NdiSrcMeta")
.field("stream_type", &self.stream_type())
.field("caps", &self.caps())
.finish()
}
}
mod imp {
use super::StreamType;
use glib::translate::*;
use once_cell::sync::Lazy;
use std::mem;
use std::ptr;
pub(super) struct NdiSrcMetaParams {
pub caps: gst::Caps,
pub stream_type: StreamType,
}
#[repr(C)]
pub struct NdiSrcMeta {
parent: gst::ffi::GstMeta,
pub(super) caps: gst::Caps,
pub(super) stream_type: StreamType,
}
pub(super) fn ndi_src_meta_api_get_type() -> glib::Type {
static TYPE: Lazy<glib::Type> = Lazy::new(|| unsafe {
let t = from_glib(gst::ffi::gst_meta_api_type_register(
b"GstNdiSrcMetaAPI\0".as_ptr() as *const _,
[ptr::null::<std::os::raw::c_char>()].as_ptr() as *mut *const _,
));
assert_ne!(t, glib::Type::INVALID);
t
});
*TYPE
}
unsafe extern "C" fn ndi_src_meta_init(
meta: *mut gst::ffi::GstMeta,
params: glib::ffi::gpointer,
_buffer: *mut gst::ffi::GstBuffer,
) -> glib::ffi::gboolean {
assert!(!params.is_null());
let meta = &mut *(meta as *mut NdiSrcMeta);
let params = ptr::read(params as *const NdiSrcMetaParams);
ptr::write(&mut meta.stream_type, params.stream_type);
ptr::write(&mut meta.caps, params.caps);
true.into_glib()
}
unsafe extern "C" fn ndi_src_meta_free(
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
) {
let meta = &mut *(meta as *mut NdiSrcMeta);
ptr::drop_in_place(&mut meta.stream_type);
ptr::drop_in_place(&mut meta.caps);
}
unsafe extern "C" fn ndi_src_meta_transform(
_dest: *mut gst::ffi::GstBuffer,
_meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
_type_: glib::ffi::GQuark,
_data: glib::ffi::gpointer,
) -> glib::ffi::gboolean {
false.into_glib()
}
pub(super) fn ndi_src_meta_get_info() -> *const gst::ffi::GstMetaInfo {
struct MetaInfo(ptr::NonNull<gst::ffi::GstMetaInfo>);
unsafe impl Send for MetaInfo {}
unsafe impl Sync for MetaInfo {}
static META_INFO: Lazy<MetaInfo> = Lazy::new(|| unsafe {
MetaInfo(
ptr::NonNull::new(gst::ffi::gst_meta_register(
ndi_src_meta_api_get_type().into_glib(),
b"GstNdiSrcMeta\0".as_ptr() as *const _,
mem::size_of::<NdiSrcMeta>(),
Some(ndi_src_meta_init),
Some(ndi_src_meta_free),
Some(ndi_src_meta_transform),
) as *mut gst::ffi::GstMetaInfo)
.expect("Failed to register meta API"),
)
});
META_INFO.0.as_ptr()
}
}

View file

@ -39,10 +39,10 @@ extern "C" {
p_instance: NDIlib_recv_instance_t,
p_metadata: *const NDIlib_metadata_frame_t,
) -> bool;
pub fn NDIlib_recv_capture_v2(
pub fn NDIlib_recv_capture_v3(
p_instance: NDIlib_recv_instance_t,
p_video_data: *mut NDIlib_video_frame_v2_t,
p_audio_data: *mut NDIlib_audio_frame_v2_t,
p_audio_data: *mut NDIlib_audio_frame_v3_t,
p_metadata: *mut NDIlib_metadata_frame_t,
timeout_in_ms: u32,
) -> NDIlib_frame_type_e;
@ -50,9 +50,9 @@ extern "C" {
p_instance: NDIlib_recv_instance_t,
p_video_data: *mut NDIlib_video_frame_v2_t,
);
pub fn NDIlib_recv_free_audio_v2(
pub fn NDIlib_recv_free_audio_v3(
p_instance: NDIlib_recv_instance_t,
p_audio_data: *mut NDIlib_audio_frame_v2_t,
p_audio_data: *mut NDIlib_audio_frame_v3_t,
);
pub fn NDIlib_recv_free_metadata(
p_instance: NDIlib_recv_instance_t,
@ -70,9 +70,9 @@ extern "C" {
p_instance: NDIlib_send_instance_t,
p_video_data: *const NDIlib_video_frame_v2_t,
);
pub fn NDIlib_send_send_audio_v2(
pub fn NDIlib_send_send_audio_v3(
p_instance: NDIlib_send_instance_t,
p_audio_data: *const NDIlib_audio_frame_v2_t,
p_audio_data: *const NDIlib_audio_frame_v3_t,
);
}
@ -111,29 +111,110 @@ pub const NDIlib_recv_bandwidth_audio_only: NDIlib_recv_bandwidth_e = 10;
pub const NDIlib_recv_bandwidth_lowest: NDIlib_recv_bandwidth_e = 0;
pub const NDIlib_recv_bandwidth_highest: NDIlib_recv_bandwidth_e = 100;
#[repr(u32)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum NDIlib_recv_color_format_e {
NDIlib_recv_color_format_BGRX_BGRA = 0,
NDIlib_recv_color_format_UYVY_BGRA = 1,
NDIlib_recv_color_format_RGBX_RGBA = 2,
NDIlib_recv_color_format_UYVY_RGBA = 3,
NDIlib_recv_color_format_fastest = 100,
NDIlib_recv_color_format_best = 101,
pub type NDIlib_recv_color_format_e = u32;
pub const NDIlib_recv_color_format_BGRX_BGRA: NDIlib_recv_color_format_e = 0;
pub const NDIlib_recv_color_format_UYVY_BGRA: NDIlib_recv_color_format_e = 1;
pub const NDIlib_recv_color_format_RGBX_RGBA: NDIlib_recv_color_format_e = 2;
pub const NDIlib_recv_color_format_UYVY_RGBA: NDIlib_recv_color_format_e = 3;
pub const NDIlib_recv_color_format_fastest: NDIlib_recv_color_format_e = 100;
pub const NDIlib_recv_color_format_best: NDIlib_recv_color_format_e = 101;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_recv_color_format_ex_compressed: NDIlib_recv_color_format_e = 300;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_recv_color_format_ex_compressed_v2: NDIlib_recv_color_format_e = 301;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_recv_color_format_ex_compressed_v3: NDIlib_recv_color_format_e = 302;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_recv_color_format_ex_compressed_v3_with_audio: NDIlib_recv_color_format_e = 304;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_recv_color_format_ex_compressed_v4: NDIlib_recv_color_format_e = 303;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_recv_color_format_ex_compressed_v4_with_audio: NDIlib_recv_color_format_e = 305;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_recv_color_format_ex_compressed_v5: NDIlib_recv_color_format_e = 307;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_recv_color_format_ex_compressed_v5_with_audio: NDIlib_recv_color_format_e = 308;
const fn make_fourcc(fourcc: &[u8; 4]) -> u32 {
((fourcc[0] as u32) << 0)
| ((fourcc[1] as u32) << 8)
| ((fourcc[2] as u32) << 16)
| ((fourcc[3] as u32) << 24)
}
pub type NDIlib_FourCC_video_type_e = u32;
pub const NDIlib_FourCC_video_type_UYVY: NDIlib_FourCC_video_type_e = 0x59_56_59_55;
pub const NDIlib_FourCC_video_type_UYVA: NDIlib_FourCC_video_type_e = 0x41_56_56_55;
pub const NDIlib_FourCC_video_type_P216: NDIlib_FourCC_video_type_e = 0x36_31_32_50;
pub const NDIlib_FourCC_video_type_PA16: NDIlib_FourCC_video_type_e = 0x36_31_41_50;
pub const NDIlib_FourCC_video_type_YV12: NDIlib_FourCC_video_type_e = 0x32_31_56_59;
pub const NDIlib_FourCC_video_type_I420: NDIlib_FourCC_video_type_e = 0x30_32_34_49;
pub const NDIlib_FourCC_video_type_NV12: NDIlib_FourCC_video_type_e = 0x32_31_56_4e;
pub const NDIlib_FourCC_video_type_BGRA: NDIlib_FourCC_video_type_e = 0x41_52_47_42;
pub const NDIlib_FourCC_video_type_BGRX: NDIlib_FourCC_video_type_e = 0x58_52_47_42;
pub const NDIlib_FourCC_video_type_RGBA: NDIlib_FourCC_video_type_e = 0x41_42_47_52;
pub const NDIlib_FourCC_video_type_RGBX: NDIlib_FourCC_video_type_e = 0x58_42_47_52;
pub const NDIlib_FourCC_video_type_UYVY: NDIlib_FourCC_video_type_e = make_fourcc(b"UYVY");
pub const NDIlib_FourCC_video_type_UYVA: NDIlib_FourCC_video_type_e = make_fourcc(b"UYVA");
pub const NDIlib_FourCC_video_type_P216: NDIlib_FourCC_video_type_e = make_fourcc(b"P216");
pub const NDIlib_FourCC_video_type_PA16: NDIlib_FourCC_video_type_e = make_fourcc(b"PA16");
pub const NDIlib_FourCC_video_type_YV12: NDIlib_FourCC_video_type_e = make_fourcc(b"YV12");
pub const NDIlib_FourCC_video_type_I420: NDIlib_FourCC_video_type_e = make_fourcc(b"I420");
pub const NDIlib_FourCC_video_type_NV12: NDIlib_FourCC_video_type_e = make_fourcc(b"NV12");
pub const NDIlib_FourCC_video_type_BGRA: NDIlib_FourCC_video_type_e = make_fourcc(b"BGRA");
pub const NDIlib_FourCC_video_type_BGRX: NDIlib_FourCC_video_type_e = make_fourcc(b"BGRX");
pub const NDIlib_FourCC_video_type_RGBA: NDIlib_FourCC_video_type_e = make_fourcc(b"RGBA");
pub const NDIlib_FourCC_video_type_RGBX: NDIlib_FourCC_video_type_e = make_fourcc(b"RGBX");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_SHQ0_highest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"SHQ0");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_SHQ2_highest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"SHQ2");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_SHQ7_highest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"SHQ7");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_SHQ0_lowest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"shq0");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_SHQ2_lowest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"shq2");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_SHQ7_lowest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"shq7");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_H264_highest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"H264");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_H264_lowest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"h264");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_HEVC_highest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"HEVC");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_HEVC_lowest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"hevc");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_H264_alpha_highest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"A264");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_H264_alpha_lowest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"a264");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_HEVC_alpha_highest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"AEVC");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_video_type_ex_HEVC_alpha_lowest_bandwidth: NDIlib_FourCC_video_type_e =
make_fourcc(b"aevc");
pub type NDIlib_FourCC_audio_type_e = u32;
pub const NDIlib_FourCC_audio_type_FLTp: NDIlib_FourCC_video_type_e = make_fourcc(b"FLTp");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_audio_type_AAC: NDIlib_FourCC_audio_type_e = 0x000000ff;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_FourCC_audio_type_Opus: NDIlib_FourCC_audio_type_e = make_fourcc(b"Opus");
#[cfg(feature = "advanced-sdk")]
pub type NDIlib_compressed_FourCC_type_e = u32;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_compressed_FourCC_type_H264: NDIlib_compressed_FourCC_type_e =
make_fourcc(b"H264");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_compressed_FourCC_type_HEVC: NDIlib_compressed_FourCC_type_e =
make_fourcc(b"HEVC");
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_compressed_FourCC_type_AAC: NDIlib_compressed_FourCC_type_e = 0x000000ff;
#[repr(u32)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@ -212,36 +293,34 @@ pub struct NDIlib_video_frame_v2_t {
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct NDIlib_audio_frame_v2_t {
pub struct NDIlib_audio_frame_v3_t {
pub sample_rate: ::std::os::raw::c_int,
pub no_channels: ::std::os::raw::c_int,
pub no_samples: ::std::os::raw::c_int,
pub timecode: i64,
pub FourCC: NDIlib_FourCC_audio_type_e,
pub p_data: *const ::std::os::raw::c_float,
pub channel_stride_in_bytes: ::std::os::raw::c_int,
pub channel_stride_or_data_size_in_bytes: ::std::os::raw::c_int,
pub p_metadata: *const ::std::os::raw::c_char,
pub timestamp: i64,
}
extern "C" {
pub fn NDIlib_util_audio_to_interleaved_16s_v2(
p_src: *const NDIlib_audio_frame_v2_t,
p_dst: *mut NDIlib_audio_frame_interleaved_16s_t,
);
pub fn NDIlib_util_audio_from_interleaved_16s_v2(
p_src: *const NDIlib_audio_frame_interleaved_16s_t,
p_dst: *mut NDIlib_audio_frame_v2_t,
);
}
#[repr(C)]
#[cfg(feature = "advanced-sdk")]
#[repr(packed)]
#[derive(Debug, Copy, Clone)]
pub struct NDIlib_audio_frame_interleaved_16s_t {
pub sample_rate: ::std::os::raw::c_int,
pub no_channels: ::std::os::raw::c_int,
pub no_samples: ::std::os::raw::c_int,
pub timecode: i64,
pub reference_level: ::std::os::raw::c_int,
pub p_data: *mut i16,
pub struct NDIlib_compressed_packet_t {
pub version: u32,
pub fourcc: NDIlib_compressed_FourCC_type_e,
pub pts: i64,
pub dts: i64,
pub reserved: u64,
pub flags: u32,
pub data_size: u32,
pub extra_data_size: u32,
}
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_compressed_packet_flags_keyframe: u32 = 1;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_compressed_packet_version_0: u32 = 44;

View file

@ -1,602 +0,0 @@
use glib::subclass;
use gst::prelude::*;
use gst::subclass::prelude::*;
use gst::{gst_debug, gst_element_error, gst_error, gst_error_msg};
use gst_base::prelude::*;
use gst_base::subclass::base_src::CreateSuccess;
use gst_base::subclass::prelude::*;
use std::sync::Mutex;
use std::{i32, u32};
use crate::ndisys;
use crate::connect_ndi;
use crate::Receiver;
use crate::ReceiverControlHandle;
use crate::ReceiverItem;
use crate::TimestampMode;
use crate::VideoReceiver;
use crate::DEFAULT_RECEIVER_NDI_NAME;
#[derive(Debug, Clone)]
struct Settings {
ndi_name: Option<String>,
url_address: Option<String>,
connect_timeout: u32,
timeout: u32,
receiver_ndi_name: String,
bandwidth: ndisys::NDIlib_recv_bandwidth_e,
timestamp_mode: TimestampMode,
}
impl Default for Settings {
fn default() -> Self {
Settings {
ndi_name: None,
url_address: None,
receiver_ndi_name: DEFAULT_RECEIVER_NDI_NAME.clone(),
connect_timeout: 10000,
timeout: 5000,
bandwidth: ndisys::NDIlib_recv_bandwidth_highest,
timestamp_mode: TimestampMode::ReceiveTimeTimecode,
}
}
}
static PROPERTIES: [subclass::Property; 7] = [
subclass::Property("ndi-name", |name| {
glib::ParamSpec::string(
name,
"NDI Name",
"NDI stream name of the sender",
None,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("url-address", |name| {
glib::ParamSpec::string(
name,
"URL/Address",
"URL/address and port of the sender, e.g. 127.0.0.1:5961",
None,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("receiver-ndi-name", |name| {
glib::ParamSpec::string(
name,
"Receiver NDI Name",
"NDI stream name of this receiver",
Some(&*DEFAULT_RECEIVER_NDI_NAME),
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("connect-timeout", |name| {
glib::ParamSpec::uint(
name,
"Connect Timeout",
"Connection timeout in ms",
0,
u32::MAX,
10000,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("timeout", |name| {
glib::ParamSpec::uint(
name,
"Timeout",
"Receive timeout in ms",
0,
u32::MAX,
5000,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("bandwidth", |name| {
glib::ParamSpec::int(
name,
"Bandwidth",
"Bandwidth, -10 metadata-only, 10 audio-only, 100 highest",
-10,
100,
100,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("timestamp-mode", |name| {
glib::ParamSpec::enum_(
name,
"Timestamp Mode",
"Timestamp information to use for outgoing PTS",
TimestampMode::static_type(),
TimestampMode::ReceiveTimeTimecode as i32,
glib::ParamFlags::READWRITE,
)
}),
];
struct State {
info: Option<gst_video::VideoInfo>,
current_latency: gst::ClockTime,
receiver: Option<Receiver<VideoReceiver>>,
}
impl Default for State {
fn default() -> State {
State {
info: None,
current_latency: gst::CLOCK_TIME_NONE,
receiver: None,
}
}
}
pub(crate) struct NdiVideoSrc {
cat: gst::DebugCategory,
settings: Mutex<Settings>,
state: Mutex<State>,
receiver_controller: Mutex<Option<ReceiverControlHandle<VideoReceiver>>>,
}
impl ObjectSubclass for NdiVideoSrc {
const NAME: &'static str = "NdiVideoSrc";
type ParentType = gst_base::BaseSrc;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
glib::glib_object_subclass!();
fn new() -> Self {
Self {
cat: gst::DebugCategory::new(
"ndivideosrc",
gst::DebugColorFlags::empty(),
Some("NewTek NDI Video Source"),
),
settings: Mutex::new(Default::default()),
state: Mutex::new(Default::default()),
receiver_controller: Mutex::new(None),
}
}
fn class_init(klass: &mut subclass::simple::ClassStruct<Self>) {
klass.set_metadata(
"NewTek NDI Video Source",
"Source",
"NewTek NDI video source",
"Ruben Gonzalez <rubenrua@teltek.es>, Daniel Vilar <daniel.peiteado@teltek.es>, Sebastian Dröge <sebastian@centricular.com>",
);
// On the src pad, we can produce F32/F64 with any sample rate
// and any number of channels
let caps = gst::Caps::new_simple(
"video/x-raw",
&[
(
"format",
&gst::List::new(&[
&gst_video::VideoFormat::Uyvy.to_string(),
&gst_video::VideoFormat::Yv12.to_string(),
&gst_video::VideoFormat::Nv12.to_string(),
&gst_video::VideoFormat::I420.to_string(),
&gst_video::VideoFormat::Bgra.to_string(),
&gst_video::VideoFormat::Bgrx.to_string(),
&gst_video::VideoFormat::Rgba.to_string(),
&gst_video::VideoFormat::Rgbx.to_string(),
]),
),
("width", &gst::IntRange::<i32>::new(0, i32::MAX)),
("height", &gst::IntRange::<i32>::new(0, i32::MAX)),
(
"framerate",
&gst::FractionRange::new(
gst::Fraction::new(0, 1),
gst::Fraction::new(i32::MAX, 1),
),
),
],
);
#[cfg(feature = "interlaced-fields")]
let caps = {
let mut tmp = caps.copy();
{
let tmp = tmp.get_mut().unwrap();
tmp.set_features_simple(Some(gst::CapsFeatures::new(&["format:Interlaced"])));
}
let mut caps = caps;
{
let caps = caps.get_mut().unwrap();
caps.append(tmp);
}
caps
};
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
klass.add_pad_template(src_pad_template);
klass.install_properties(&PROPERTIES);
}
}
impl ObjectImpl for NdiVideoSrc {
glib::glib_object_impl!();
fn constructed(&self, obj: &glib::Object) {
self.parent_constructed(obj);
let basesrc = obj.downcast_ref::<gst_base::BaseSrc>().unwrap();
// Initialize live-ness and notify the base class that
// we'd like to operate in Time format
basesrc.set_live(true);
basesrc.set_format(gst::Format::Time);
}
fn set_property(&self, obj: &glib::Object, id: usize, value: &glib::Value) {
let prop = &PROPERTIES[id];
let basesrc = obj.downcast_ref::<gst_base::BaseSrc>().unwrap();
match *prop {
subclass::Property("ndi-name", ..) => {
let mut settings = self.settings.lock().unwrap();
let ndi_name = value.get().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing ndi-name from {:?} to {:?}",
settings.ndi_name,
ndi_name,
);
settings.ndi_name = ndi_name;
}
subclass::Property("url-address", ..) => {
let mut settings = self.settings.lock().unwrap();
let url_address = value.get().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing url-address from {:?} to {:?}",
settings.url_address,
url_address,
);
settings.url_address = url_address;
}
subclass::Property("receiver-ndi-name", ..) => {
let mut settings = self.settings.lock().unwrap();
let receiver_ndi_name = value.get().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing receiver-ndi-name from {:?} to {:?}",
settings.receiver_ndi_name,
receiver_ndi_name,
);
settings.receiver_ndi_name =
receiver_ndi_name.unwrap_or_else(|| DEFAULT_RECEIVER_NDI_NAME.clone());
}
subclass::Property("connect-timeout", ..) => {
let mut settings = self.settings.lock().unwrap();
let connect_timeout = value.get_some().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing connect-timeout from {} to {}",
settings.connect_timeout,
connect_timeout,
);
settings.connect_timeout = connect_timeout;
}
subclass::Property("timeout", ..) => {
let mut settings = self.settings.lock().unwrap();
let timeout = value.get_some().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing timeout from {} to {}",
settings.timeout,
timeout,
);
settings.timeout = timeout;
}
subclass::Property("bandwidth", ..) => {
let mut settings = self.settings.lock().unwrap();
let bandwidth = value.get_some().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing bandwidth from {} to {}",
settings.bandwidth,
bandwidth,
);
settings.bandwidth = bandwidth;
}
subclass::Property("timestamp-mode", ..) => {
let mut settings = self.settings.lock().unwrap();
let timestamp_mode = value.get_some().unwrap();
gst_debug!(
self.cat,
obj: basesrc,
"Changing timestamp mode from {:?} to {:?}",
settings.timestamp_mode,
timestamp_mode
);
if settings.timestamp_mode != timestamp_mode {
let _ =
basesrc.post_message(gst::message::Latency::builder().src(basesrc).build());
}
settings.timestamp_mode = timestamp_mode;
}
_ => unimplemented!(),
}
}
fn get_property(&self, _obj: &glib::Object, id: usize) -> Result<glib::Value, ()> {
let prop = &PROPERTIES[id];
match *prop {
subclass::Property("ndi-name", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.ndi_name.to_value())
}
subclass::Property("url-address", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.url_address.to_value())
}
subclass::Property("receiver-ndi-name", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.receiver_ndi_name.to_value())
}
subclass::Property("connect-timeout", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.connect_timeout.to_value())
}
subclass::Property("timeout", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.timeout.to_value())
}
subclass::Property("bandwidth", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.bandwidth.to_value())
}
subclass::Property("timestamp-mode", ..) => {
let settings = self.settings.lock().unwrap();
Ok(settings.timestamp_mode.to_value())
}
_ => unimplemented!(),
}
}
}
impl ElementImpl for NdiVideoSrc {
fn change_state(
&self,
element: &gst::Element,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
match transition {
gst::StateChange::PausedToPlaying => {
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_playing(true);
}
}
gst::StateChange::PlayingToPaused => {
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_playing(false);
}
}
gst::StateChange::PausedToReady => {
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.shutdown();
}
}
_ => (),
}
self.parent_change_state(element, transition)
}
}
impl BaseSrcImpl for NdiVideoSrc {
fn negotiate(&self, _element: &gst_base::BaseSrc) -> Result<(), gst::LoggableError> {
// Always succeed here without doing anything: we will set the caps once we received a
// buffer, there's nothing we can negotiate
Ok(())
}
fn unlock(&self, element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
gst_debug!(self.cat, obj: element, "Unlocking",);
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_flushing(true);
}
Ok(())
}
fn unlock_stop(&self, element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
gst_debug!(self.cat, obj: element, "Stop unlocking",);
if let Some(ref controller) = *self.receiver_controller.lock().unwrap() {
controller.set_flushing(false);
}
Ok(())
}
fn start(&self, element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
*self.state.lock().unwrap() = Default::default();
let settings = self.settings.lock().unwrap().clone();
if settings.ndi_name.is_none() && settings.url_address.is_none() {
return Err(gst_error_msg!(
gst::LibraryError::Settings,
["No NDI name or URL/address given"]
));
}
let receiver = connect_ndi(
self.cat,
element,
settings.ndi_name.as_deref(),
settings.url_address.as_deref(),
&settings.receiver_ndi_name,
settings.connect_timeout,
settings.bandwidth,
settings.timestamp_mode,
settings.timeout,
);
// settings.id_receiver exists
match receiver {
None => Err(gst_error_msg!(
gst::ResourceError::NotFound,
["Could not connect to this source"]
)),
Some(receiver) => {
*self.receiver_controller.lock().unwrap() =
Some(receiver.receiver_control_handle());
let mut state = self.state.lock().unwrap();
state.receiver = Some(receiver);
Ok(())
}
}
}
fn stop(&self, _element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
if let Some(ref controller) = self.receiver_controller.lock().unwrap().take() {
controller.shutdown();
}
*self.state.lock().unwrap() = State::default();
Ok(())
}
fn query(&self, element: &gst_base::BaseSrc, query: &mut gst::QueryRef) -> bool {
use gst::QueryView;
match query.view_mut() {
QueryView::Scheduling(ref mut q) => {
q.set(gst::SchedulingFlags::SEQUENTIAL, 1, -1, 0);
q.add_scheduling_modes(&[gst::PadMode::Push]);
true
}
QueryView::Latency(ref mut q) => {
let state = self.state.lock().unwrap();
let settings = self.settings.lock().unwrap();
if state.current_latency.is_some() {
let min = if settings.timestamp_mode != TimestampMode::Timecode {
state.current_latency
} else {
0.into()
};
let max = 5 * state.current_latency;
println!("Returning latency min {} max {}", min, max,);
gst_debug!(
self.cat,
obj: element,
"Returning latency min {} max {}",
min,
max
);
q.set(true, min, max);
true
} else {
false
}
}
_ => BaseSrcImplExt::parent_query(self, element, query),
}
}
fn fixate(&self, element: &gst_base::BaseSrc, mut caps: gst::Caps) -> gst::Caps {
caps.truncate();
{
let caps = caps.make_mut();
let s = caps.get_mut_structure(0).unwrap();
s.fixate_field_nearest_int("width", 1920);
s.fixate_field_nearest_int("height", 1080);
if s.has_field("pixel-aspect-ratio") {
s.fixate_field_nearest_fraction("pixel-aspect-ratio", gst::Fraction::new(1, 1));
}
}
self.parent_fixate(element, caps)
}
//Creates the video buffers
fn create(
&self,
element: &gst_base::BaseSrc,
_offset: u64,
_buffer: Option<&mut gst::BufferRef>,
_length: u32,
) -> Result<CreateSuccess, gst::FlowError> {
let recv = {
let mut state = self.state.lock().unwrap();
match state.receiver.take() {
Some(recv) => recv,
None => {
gst_error!(self.cat, obj: element, "Have no receiver");
return Err(gst::FlowError::Error);
}
}
};
match recv.capture() {
ReceiverItem::Buffer(buffer, info) => {
let mut state = self.state.lock().unwrap();
state.receiver = Some(recv);
if state.info.as_ref() != Some(&info) {
let caps = info.to_caps().map_err(|_| {
gst_element_error!(
element,
gst::ResourceError::Settings,
["Invalid audio info received: {:?}", info]
);
gst::FlowError::NotNegotiated
})?;
state.info = Some(info);
state.current_latency = buffer.get_duration();
drop(state);
gst_debug!(self.cat, obj: element, "Configuring for caps {}", caps);
element.set_caps(&caps).map_err(|_| {
gst_element_error!(
element,
gst::CoreError::Negotiation,
["Failed to negotiate caps: {:?}", caps]
);
gst::FlowError::NotNegotiated
})?;
let _ =
element.post_message(gst::message::Latency::builder().src(element).build());
}
Ok(CreateSuccess::NewBuffer(buffer))
}
ReceiverItem::Timeout => Err(gst::FlowError::Eos),
ReceiverItem::Flushing => Err(gst::FlowError::Flushing),
ReceiverItem::Error(err) => Err(err),
}
}
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"ndivideosrc",
gst::Rank::None,
NdiVideoSrc::get_type(),
)
}

File diff suppressed because it is too large Load diff