Move to NDIlib_recv_capture_v3() and NDIlib_send_send_audio_v3()

These allow more control over the data that is being sent/received, but
require NDI SDK 4.0 or newer.
This commit is contained in:
Sebastian Dröge 2021-09-30 22:44:31 +03:00
parent 7483a66b66
commit 8cf682d72b
5 changed files with 150 additions and 123 deletions

View file

@ -292,7 +292,7 @@ impl RecvInstance {
let mut audio_frame = mem::zeroed();
let mut metadata_frame = mem::zeroed();
let res = NDIlib_recv_capture_v2(
let res = NDIlib_recv_capture_v3(
ptr,
&mut video_frame,
&mut audio_frame,
@ -386,7 +386,7 @@ impl SendInstance {
pub fn send_audio(&mut self, frame: &AudioFrame) {
unsafe {
NDIlib_send_send_audio_v2(self.0.as_ptr(), frame.as_ptr());
NDIlib_send_send_audio_v3(self.0.as_ptr(), frame.as_ptr());
}
}
}
@ -501,7 +501,26 @@ impl<'a> VideoFrame<'a> {
}
}
pub fn data(&self) -> &[u8] {
pub fn data(&self) -> Option<&[u8]> {
let fourcc = self.fourcc();
if ![
NDIlib_FourCC_video_type_UYVY,
NDIlib_FourCC_video_type_UYVA,
NDIlib_FourCC_video_type_P216,
NDIlib_FourCC_video_type_PA16,
NDIlib_FourCC_video_type_YV12,
NDIlib_FourCC_video_type_I420,
NDIlib_FourCC_video_type_NV12,
NDIlib_FourCC_video_type_BGRA,
NDIlib_FourCC_video_type_BGRX,
NDIlib_FourCC_video_type_RGBA,
NDIlib_FourCC_video_type_RGBX,
]
.contains(&fourcc)
{
return None;
}
// FIXME: Unclear if this is correct. Needs to be validated against an actual
// interlaced stream
let frame_size = if self.frame_format_type()
@ -518,7 +537,10 @@ impl<'a> VideoFrame<'a> {
use std::slice;
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
slice::from_raw_parts(frame.p_data as *const u8, frame_size as usize)
Some(slice::from_raw_parts(
frame.p_data as *const u8,
frame_size as usize,
))
}
}
}
@ -710,11 +732,11 @@ impl<'a> Drop for VideoFrame<'a> {
#[derive(Debug)]
pub enum AudioFrame<'a> {
Owned(
NDIlib_audio_frame_v2_t,
NDIlib_audio_frame_v3_t,
Option<ffi::CString>,
Option<Vec<f32>>,
),
BorrowedRecv(NDIlib_audio_frame_v2_t, &'a RecvInstance),
BorrowedRecv(NDIlib_audio_frame_v3_t, &'a RecvInstance),
}
impl<'a> AudioFrame<'a> {
@ -750,24 +772,39 @@ impl<'a> AudioFrame<'a> {
}
}
pub fn data(&self) -> &[u8] {
pub fn fourcc(&self) -> NDIlib_FourCC_audio_type_e {
match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => {
frame.FourCC
}
}
}
pub fn data(&self) -> Option<&[u8]> {
unsafe {
use std::slice;
let fourcc = self.fourcc();
if ![NDIlib_FourCC_audio_type_FLTp].contains(&fourcc) {
return None;
}
match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => {
slice::from_raw_parts(
Some(slice::from_raw_parts(
frame.p_data as *const u8,
(frame.no_samples * frame.channel_stride_in_bytes) as usize,
)
(frame.no_channels * frame.channel_stride_or_data_size_in_bytes) as usize,
))
}
}
}
}
pub fn channel_stride_in_bytes(&self) -> i32 {
pub fn channel_stride_or_data_size_in_bytes(&self) -> i32 {
match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => {
frame.channel_stride_in_bytes
frame.channel_stride_or_data_size_in_bytes
}
}
}
@ -794,73 +831,55 @@ impl<'a> AudioFrame<'a> {
}
}
pub fn as_ptr(&self) -> *const NDIlib_audio_frame_v2_t {
pub fn as_ptr(&self) -> *const NDIlib_audio_frame_v3_t {
match self {
AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => frame,
}
}
pub fn copy_to_interleaved_16s(&self, data: &mut [i16]) {
assert_eq!(
data.len(),
(self.no_samples() * self.no_channels()) as usize
);
let mut dst = NDIlib_audio_frame_interleaved_16s_t {
sample_rate: self.sample_rate(),
no_channels: self.no_channels(),
no_samples: self.no_samples(),
timecode: self.timecode(),
reference_level: 0,
p_data: data.as_mut_ptr(),
};
unsafe {
NDIlib_util_audio_to_interleaved_16s_v2(self.as_ptr(), &mut dst);
}
}
pub fn try_from_interleaved_16s(
pub fn try_from_buffer(
info: &gst_audio::AudioInfo,
buffer: &gst::BufferRef,
timecode: i64,
) -> Result<Self, ()> {
if info.format() != gst_audio::AUDIO_FORMAT_S16 {
if info.format() != gst_audio::AUDIO_FORMAT_F32 {
return Err(());
}
let map = buffer.map_readable().map_err(|_| ())?;
let src_data = map.as_slice_of::<i16>().map_err(|_| ())?;
let src_data = map.as_slice_of::<f32>().map_err(|_| ())?;
let src = NDIlib_audio_frame_interleaved_16s_t {
let no_samples = src_data.len() as i32 / info.channels() as i32;
let channel_stride_or_data_size_in_bytes = no_samples * mem::size_of::<f32>() as i32;
let mut dest_data =
Vec::<f32>::with_capacity(no_samples as usize * info.channels() as usize);
assert_eq!(dest_data.capacity(), src_data.len());
unsafe {
let dest_ptr = dest_data.as_mut_ptr();
for (i, samples) in src_data.chunks_exact(info.channels() as usize).enumerate() {
for (c, sample) in samples.into_iter().enumerate() {
ptr::write(dest_ptr.add(c * no_samples as usize + i), *sample);
}
}
dest_data.set_len(no_samples as usize * info.channels() as usize);
}
let dest = NDIlib_audio_frame_v3_t {
sample_rate: info.rate() as i32,
no_channels: info.channels() as i32,
no_samples: src_data.len() as i32 / info.channels() as i32,
no_samples,
timecode,
reference_level: 0,
p_data: src_data.as_ptr() as *mut i16,
};
let channel_stride_in_bytes = src.no_samples * mem::size_of::<f32>() as i32;
let mut dest_data =
Vec::with_capacity(channel_stride_in_bytes as usize * info.channels() as usize);
let mut dest = NDIlib_audio_frame_v2_t {
sample_rate: src.sample_rate,
no_channels: src.no_channels,
no_samples: src.no_samples,
timecode: src.timecode,
FourCC: NDIlib_FourCC_audio_type_FLTp,
p_data: dest_data.as_mut_ptr(),
channel_stride_in_bytes,
channel_stride_or_data_size_in_bytes,
p_metadata: ptr::null(),
timestamp: 0,
};
unsafe {
NDIlib_util_audio_from_interleaved_16s_v2(&src, &mut dest);
dest_data.set_len(dest_data.capacity());
}
Ok(AudioFrame::Owned(dest, None, Some(dest_data)))
}
}
@ -870,7 +889,7 @@ impl<'a> Drop for AudioFrame<'a> {
fn drop(&mut self) {
if let AudioFrame::BorrowedRecv(ref mut frame, recv) = *self {
unsafe {
NDIlib_recv_free_audio_v2(recv.0.as_ptr() as *mut _, frame);
NDIlib_recv_free_audio_v3(recv.0.as_ptr() as *mut _, frame);
}
}
}

View file

@ -151,7 +151,7 @@ impl ElementImpl for NdiSink {
)
.structure(
gst::Structure::builder("audio/x-raw")
.field("format", &gst_audio::AUDIO_FORMAT_S16.to_str())
.field("format", &gst_audio::AUDIO_FORMAT_F32.to_str())
.field("rate", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("channels", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("layout", &"interleaved")
@ -256,9 +256,8 @@ impl BaseSinkImpl for NdiSink {
if let Some(ref info) = state.video_info {
if let Some(audio_meta) = buffer.meta::<crate::ndisinkmeta::NdiSinkAudioMeta>() {
for (buffer, info, timecode) in audio_meta.buffers() {
let frame =
crate::ndi::AudioFrame::try_from_interleaved_16s(info, buffer, *timecode)
.map_err(|_| {
let frame = crate::ndi::AudioFrame::try_from_buffer(info, buffer, *timecode)
.map_err(|_| {
gst_error!(CAT, obj: element, "Unsupported audio frame");
gst::FlowError::NotNegotiated
})?;
@ -334,8 +333,8 @@ impl BaseSinkImpl for NdiSink {
.map(|time| (time.nseconds() / 100) as i64)
.unwrap_or(crate::ndisys::NDIlib_send_timecode_synthesize);
let frame = crate::ndi::AudioFrame::try_from_interleaved_16s(info, buffer, timecode)
.map_err(|_| {
let frame =
crate::ndi::AudioFrame::try_from_buffer(info, buffer, timecode).map_err(|_| {
gst_error!(CAT, obj: element, "Unsupported audio frame");
gst::FlowError::NotNegotiated
})?;

View file

@ -122,7 +122,7 @@ impl ElementImpl for NdiSinkCombiner {
.unwrap();
let caps = gst::Caps::builder("audio/x-raw")
.field("format", &gst_audio::AUDIO_FORMAT_S16.to_str())
.field("format", &gst_audio::AUDIO_FORMAT_F32.to_str())
.field("rate", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("channels", &gst::IntRange::<i32>::new(1, i32::MAX))
.field("layout", &"interleaved")

View file

@ -39,10 +39,10 @@ extern "C" {
p_instance: NDIlib_recv_instance_t,
p_metadata: *const NDIlib_metadata_frame_t,
) -> bool;
pub fn NDIlib_recv_capture_v2(
pub fn NDIlib_recv_capture_v3(
p_instance: NDIlib_recv_instance_t,
p_video_data: *mut NDIlib_video_frame_v2_t,
p_audio_data: *mut NDIlib_audio_frame_v2_t,
p_audio_data: *mut NDIlib_audio_frame_v3_t,
p_metadata: *mut NDIlib_metadata_frame_t,
timeout_in_ms: u32,
) -> NDIlib_frame_type_e;
@ -50,9 +50,9 @@ extern "C" {
p_instance: NDIlib_recv_instance_t,
p_video_data: *mut NDIlib_video_frame_v2_t,
);
pub fn NDIlib_recv_free_audio_v2(
pub fn NDIlib_recv_free_audio_v3(
p_instance: NDIlib_recv_instance_t,
p_audio_data: *mut NDIlib_audio_frame_v2_t,
p_audio_data: *mut NDIlib_audio_frame_v3_t,
);
pub fn NDIlib_recv_free_metadata(
p_instance: NDIlib_recv_instance_t,
@ -70,9 +70,9 @@ extern "C" {
p_instance: NDIlib_send_instance_t,
p_video_data: *const NDIlib_video_frame_v2_t,
);
pub fn NDIlib_send_send_audio_v2(
pub fn NDIlib_send_send_audio_v3(
p_instance: NDIlib_send_instance_t,
p_audio_data: *const NDIlib_audio_frame_v2_t,
p_audio_data: *const NDIlib_audio_frame_v3_t,
);
}
@ -139,6 +139,9 @@ pub const NDIlib_FourCC_video_type_BGRX: NDIlib_FourCC_video_type_e = make_fourc
pub const NDIlib_FourCC_video_type_RGBA: NDIlib_FourCC_video_type_e = make_fourcc(b"RGBA");
pub const NDIlib_FourCC_video_type_RGBX: NDIlib_FourCC_video_type_e = make_fourcc(b"RGBX");
pub type NDIlib_FourCC_audio_type_e = u32;
pub const NDIlib_FourCC_audio_type_FLTp: NDIlib_FourCC_video_type_e = make_fourcc(b"FLTp");
#[repr(u32)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum NDIlib_frame_format_type_e {
@ -216,36 +219,14 @@ pub struct NDIlib_video_frame_v2_t {
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct NDIlib_audio_frame_v2_t {
pub struct NDIlib_audio_frame_v3_t {
pub sample_rate: ::std::os::raw::c_int,
pub no_channels: ::std::os::raw::c_int,
pub no_samples: ::std::os::raw::c_int,
pub timecode: i64,
pub FourCC: NDIlib_FourCC_audio_type_e,
pub p_data: *const ::std::os::raw::c_float,
pub channel_stride_in_bytes: ::std::os::raw::c_int,
pub channel_stride_or_data_size_in_bytes: ::std::os::raw::c_int,
pub p_metadata: *const ::std::os::raw::c_char,
pub timestamp: i64,
}
extern "C" {
pub fn NDIlib_util_audio_to_interleaved_16s_v2(
p_src: *const NDIlib_audio_frame_v2_t,
p_dst: *mut NDIlib_audio_frame_interleaved_16s_t,
);
pub fn NDIlib_util_audio_from_interleaved_16s_v2(
p_src: *const NDIlib_audio_frame_interleaved_16s_t,
p_dst: *mut NDIlib_audio_frame_v2_t,
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct NDIlib_audio_frame_interleaved_16s_t {
pub sample_rate: ::std::os::raw::c_int,
pub no_channels: ::std::os::raw::c_int,
pub no_samples: ::std::os::raw::c_int,
pub timecode: i64,
pub reference_level: ::std::os::raw::c_int,
pub p_data: *mut i16,
}

View file

@ -3,7 +3,7 @@ use gst::prelude::*;
use gst::{gst_debug, gst_error, gst_log, gst_trace, gst_warning};
use gst_video::prelude::*;
use byte_slice_cast::AsMutSliceOf;
use byte_slice_cast::*;
use std::cmp;
use std::collections::VecDeque;
@ -731,7 +731,7 @@ impl Receiver {
let info = self.create_video_info(element, &video_frame)?;
let mut buffer = self.create_video_buffer(element, pts, duration, &info, &video_frame);
let mut buffer = self.create_video_buffer(element, pts, duration, &info, &video_frame)?;
if discont {
buffer
.get_mut()
@ -888,8 +888,8 @@ impl Receiver {
duration: Option<gst::ClockTime>,
info: &gst_video::VideoInfo,
video_frame: &VideoFrame,
) -> gst::Buffer {
let mut buffer = gst::Buffer::with_size(info.size()).unwrap();
) -> Result<gst::Buffer, gst::FlowError> {
let mut buffer = self.copy_video_frame(element, info, video_frame)?;
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(pts);
@ -950,16 +950,18 @@ impl Receiver {
}
}
self.copy_video_frame(element, info, buffer, video_frame)
Ok(buffer)
}
fn copy_video_frame(
&self,
_element: &gst_base::BaseSrc,
info: &gst_video::VideoInfo,
buffer: gst::Buffer,
video_frame: &VideoFrame,
) -> gst::Buffer {
) -> Result<gst::Buffer, gst::FlowError> {
let src = video_frame.data().ok_or(gst::FlowError::NotNegotiated)?;
let buffer = gst::Buffer::with_size(info.size()).unwrap();
let mut vframe = gst_video::VideoFrame::from_buffer_writable(buffer, info).unwrap();
match info.format() {
@ -976,7 +978,6 @@ impl Receiver {
let dest_stride = vframe.plane_stride()[0] as usize;
let dest = vframe.plane_data_mut(0).unwrap();
let src_stride = video_frame.line_stride_or_data_size_in_bytes() as usize;
let src = video_frame.data();
for (dest, src) in dest
.chunks_exact_mut(dest_stride)
@ -993,7 +994,6 @@ impl Receiver {
let dest_stride = vframe.plane_stride()[0] as usize;
let dest = vframe.plane_data_mut(0).unwrap();
let src_stride = video_frame.line_stride_or_data_size_in_bytes() as usize;
let src = video_frame.data();
for (dest, src) in dest
.chunks_exact_mut(dest_stride)
@ -1009,7 +1009,7 @@ impl Receiver {
let dest_stride = vframe.plane_stride()[1] as usize;
let dest = vframe.plane_data_mut(1).unwrap();
let src_stride = video_frame.line_stride_or_data_size_in_bytes() as usize;
let src = &video_frame.data()[(video_frame.yres() as usize * src_stride)..];
let src = &src[(video_frame.yres() as usize * src_stride)..];
for (dest, src) in dest
.chunks_exact_mut(dest_stride)
@ -1026,7 +1026,6 @@ impl Receiver {
let dest_stride = vframe.plane_stride()[0] as usize;
let dest = vframe.plane_data_mut(0).unwrap();
let src_stride = video_frame.line_stride_or_data_size_in_bytes() as usize;
let src = video_frame.data();
for (dest, src) in dest
.chunks_exact_mut(dest_stride)
@ -1043,7 +1042,7 @@ impl Receiver {
let dest = vframe.plane_data_mut(1).unwrap();
let src_stride = video_frame.line_stride_or_data_size_in_bytes() as usize;
let src_stride1 = video_frame.line_stride_or_data_size_in_bytes() as usize / 2;
let src = &video_frame.data()[(video_frame.yres() as usize * src_stride)..];
let src = &src[(video_frame.yres() as usize * src_stride)..];
for (dest, src) in dest
.chunks_exact_mut(dest_stride)
@ -1060,7 +1059,7 @@ impl Receiver {
let dest = vframe.plane_data_mut(2).unwrap();
let src_stride = video_frame.line_stride_or_data_size_in_bytes() as usize;
let src_stride1 = video_frame.line_stride_or_data_size_in_bytes() as usize / 2;
let src = &video_frame.data()[(video_frame.yres() as usize * src_stride
let src = &src[(video_frame.yres() as usize * src_stride
+ (video_frame.yres() as usize + 1) / 2 * src_stride1)..];
for (dest, src) in dest
@ -1074,7 +1073,7 @@ impl Receiver {
_ => unreachable!(),
}
vframe.into_buffer()
Ok(vframe.into_buffer())
}
fn create_audio_buffer_and_info(
@ -1093,7 +1092,7 @@ impl Receiver {
let info = self.create_audio_info(element, &audio_frame)?;
let mut buffer = self.create_audio_buffer(element, pts, duration, &info, &audio_frame);
let mut buffer = self.create_audio_buffer(element, pts, duration, &info, &audio_frame)?;
if discont {
buffer
.get_mut()
@ -1129,13 +1128,22 @@ impl Receiver {
element: &gst_base::BaseSrc,
audio_frame: &AudioFrame,
) -> Result<gst_audio::AudioInfo, gst::FlowError> {
if audio_frame.fourcc() != NDIlib_FourCC_audio_type_FLTp {
gst::element_error!(
element,
gst::StreamError::Format,
["Unsupported audio fourcc {:08x}", audio_frame.fourcc()]
);
return Err(gst::FlowError::NotNegotiated);
}
let builder = gst_audio::AudioInfo::builder(
gst_audio::AUDIO_FORMAT_S16,
gst_audio::AUDIO_FORMAT_F32,
audio_frame.sample_rate() as u32,
audio_frame.no_channels() as u32,
);
builder.build().map_err(|_| {
let info = builder.build().map_err(|_| {
gst::element_error!(
element,
gst::StreamError::Format,
@ -1143,7 +1151,9 @@ impl Receiver {
);
gst::FlowError::NotNegotiated
})
})?;
Ok(info)
}
fn create_audio_buffer(
@ -1153,9 +1163,10 @@ impl Receiver {
duration: Option<gst::ClockTime>,
info: &gst_audio::AudioInfo,
audio_frame: &AudioFrame,
) -> gst::Buffer {
// We multiply by 2 because is the size in bytes of an i16 variable
) -> Result<gst::Buffer, gst::FlowError> {
let src = audio_frame.data().ok_or(gst::FlowError::NotNegotiated)?;
let buff_size = (audio_frame.no_samples() as u32 * info.bpf()) as usize;
let mut buffer = gst::Buffer::with_size(buff_size).unwrap();
{
let buffer = buffer.get_mut().unwrap();
@ -1181,15 +1192,32 @@ impl Receiver {
}
}
audio_frame.copy_to_interleaved_16s(
buffer
.map_writable()
.unwrap()
.as_mut_slice_of::<i16>()
.unwrap(),
let mut dest = buffer.map_writable().unwrap();
let dest = dest
.as_mut_slice_of::<f32>()
.map_err(|_| gst::FlowError::NotNegotiated)?;
assert!(
dest.len()
== audio_frame.no_samples() as usize * audio_frame.no_channels() as usize
);
for (channel, samples) in src
.chunks_exact(audio_frame.channel_stride_or_data_size_in_bytes() as usize)
.enumerate()
{
let samples = samples
.as_slice_of::<f32>()
.map_err(|_| gst::FlowError::NotNegotiated)?;
for (i, sample) in samples[..audio_frame.no_samples() as usize]
.into_iter()
.enumerate()
{
dest[i * (audio_frame.no_channels() as usize) + channel] = *sample;
}
}
}
buffer
Ok(buffer)
}
}