2022-10-12 16:45:52 +00:00
|
|
|
// SPDX-License-Identifier: MPL-2.0
|
|
|
|
|
2019-07-17 16:10:20 +00:00
|
|
|
use glib::prelude::*;
|
|
|
|
use gst::prelude::*;
|
|
|
|
use gst_video::prelude::*;
|
|
|
|
|
2021-09-30 19:44:31 +00:00
|
|
|
use byte_slice_cast::*;
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2019-07-18 21:05:31 +00:00
|
|
|
use std::cmp;
|
2019-07-17 16:10:20 +00:00
|
|
|
use std::collections::VecDeque;
|
|
|
|
use std::sync::{Arc, Condvar, Mutex, Weak};
|
|
|
|
use std::thread;
|
2022-10-12 18:57:34 +00:00
|
|
|
use std::time;
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2022-06-06 11:17:40 +00:00
|
|
|
use atomic_refcell::AtomicRefCell;
|
|
|
|
|
2023-07-06 13:43:37 +00:00
|
|
|
use gst::glib::once_cell::sync::Lazy;
|
2022-10-12 18:57:34 +00:00
|
|
|
|
|
|
|
use crate::ndi::*;
|
2023-10-11 19:25:29 +00:00
|
|
|
use crate::ndi_cc_meta::NDICCMetaDecoder;
|
2022-10-12 18:57:34 +00:00
|
|
|
use crate::ndisys;
|
|
|
|
use crate::ndisys::*;
|
|
|
|
use crate::TimestampMode;
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
|
|
|
|
gst::DebugCategory::new(
|
|
|
|
"ndireceiver",
|
|
|
|
gst::DebugColorFlags::empty(),
|
|
|
|
Some("NewTek NDI receiver"),
|
|
|
|
)
|
2021-02-23 10:29:04 +00:00
|
|
|
});
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
pub struct Receiver(Arc<ReceiverInner>);
|
2019-07-19 08:32:04 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
#[derive(Debug, PartialEq, Eq)]
|
2022-05-31 11:28:46 +00:00
|
|
|
#[allow(clippy::large_enum_variant)]
|
2021-10-01 09:47:43 +00:00
|
|
|
pub enum AudioInfo {
|
2022-10-12 18:57:34 +00:00
|
|
|
Audio(gst_audio::AudioInfo),
|
2021-10-01 09:47:43 +00:00
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
#[allow(dead_code)]
|
|
|
|
Opus {
|
2021-10-01 09:47:43 +00:00
|
|
|
sample_rate: i32,
|
|
|
|
no_channels: i32,
|
|
|
|
},
|
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
Aac {
|
2021-10-01 09:47:43 +00:00
|
|
|
sample_rate: i32,
|
|
|
|
no_channels: i32,
|
|
|
|
codec_data: [u8; 2],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AudioInfo {
|
|
|
|
pub fn to_caps(&self) -> Result<gst::Caps, glib::BoolError> {
|
|
|
|
match self {
|
2022-10-12 18:57:34 +00:00
|
|
|
AudioInfo::Audio(ref info) => info.to_caps(),
|
2021-10-01 09:47:43 +00:00
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
AudioInfo::Opus {
|
2021-10-01 09:47:43 +00:00
|
|
|
sample_rate,
|
|
|
|
no_channels,
|
|
|
|
} => Ok(gst::Caps::builder("audio/x-opus")
|
|
|
|
.field("channels", *no_channels)
|
|
|
|
.field("rate", *sample_rate)
|
|
|
|
.field("channel-mapping-family", 0i32)
|
|
|
|
.build()),
|
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
AudioInfo::Aac {
|
2021-10-01 09:47:43 +00:00
|
|
|
sample_rate,
|
|
|
|
no_channels,
|
|
|
|
codec_data,
|
|
|
|
} => Ok(gst::Caps::builder("audio/mpeg")
|
|
|
|
.field("channels", *no_channels)
|
|
|
|
.field("rate", *sample_rate)
|
|
|
|
.field("mpegversion", 4i32)
|
|
|
|
.field("stream-format", "raw")
|
|
|
|
.field("codec_data", gst::Buffer::from_mut_slice(*codec_data))
|
|
|
|
.build()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, PartialEq, Eq)]
|
|
|
|
pub enum VideoInfo {
|
2022-10-12 18:57:34 +00:00
|
|
|
Video(gst_video::VideoInfo),
|
2021-10-01 09:47:43 +00:00
|
|
|
#[cfg(feature = "advanced-sdk")]
|
|
|
|
SpeedHQInfo {
|
|
|
|
variant: String,
|
|
|
|
xres: i32,
|
|
|
|
yres: i32,
|
|
|
|
fps_n: i32,
|
|
|
|
fps_d: i32,
|
|
|
|
par_n: i32,
|
|
|
|
par_d: i32,
|
|
|
|
interlace_mode: gst_video::VideoInterlaceMode,
|
|
|
|
},
|
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
H264 {
|
2021-10-01 09:47:43 +00:00
|
|
|
xres: i32,
|
|
|
|
yres: i32,
|
|
|
|
fps_n: i32,
|
|
|
|
fps_d: i32,
|
|
|
|
par_n: i32,
|
|
|
|
par_d: i32,
|
|
|
|
interlace_mode: gst_video::VideoInterlaceMode,
|
|
|
|
},
|
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
H265 {
|
2021-10-01 09:47:43 +00:00
|
|
|
xres: i32,
|
|
|
|
yres: i32,
|
|
|
|
fps_n: i32,
|
|
|
|
fps_d: i32,
|
|
|
|
par_n: i32,
|
|
|
|
par_d: i32,
|
|
|
|
interlace_mode: gst_video::VideoInterlaceMode,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
impl VideoInfo {
|
|
|
|
pub fn to_caps(&self) -> Result<gst::Caps, glib::BoolError> {
|
|
|
|
match self {
|
2022-10-12 18:57:34 +00:00
|
|
|
VideoInfo::Video(ref info) => info.to_caps(),
|
2021-10-01 09:47:43 +00:00
|
|
|
#[cfg(feature = "advanced-sdk")]
|
|
|
|
VideoInfo::SpeedHQInfo {
|
|
|
|
ref variant,
|
|
|
|
xres,
|
|
|
|
yres,
|
|
|
|
fps_n,
|
|
|
|
fps_d,
|
|
|
|
par_n,
|
|
|
|
par_d,
|
|
|
|
interlace_mode,
|
|
|
|
} => Ok(gst::Caps::builder("video/x-speedhq")
|
|
|
|
.field("width", *xres)
|
|
|
|
.field("height", *yres)
|
|
|
|
.field("framerate", gst::Fraction::new(*fps_n, *fps_d))
|
|
|
|
.field("pixel-aspect-ratio", gst::Fraction::new(*par_n, *par_d))
|
|
|
|
.field("interlace-mode", interlace_mode.to_str())
|
|
|
|
.field("variant", variant)
|
|
|
|
.build()),
|
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
VideoInfo::H264 {
|
2021-10-01 09:47:43 +00:00
|
|
|
xres,
|
|
|
|
yres,
|
|
|
|
fps_n,
|
|
|
|
fps_d,
|
|
|
|
par_n,
|
|
|
|
par_d,
|
|
|
|
interlace_mode,
|
|
|
|
..
|
|
|
|
} => Ok(gst::Caps::builder("video/x-h264")
|
|
|
|
.field("width", *xres)
|
|
|
|
.field("height", *yres)
|
|
|
|
.field("framerate", gst::Fraction::new(*fps_n, *fps_d))
|
|
|
|
.field("pixel-aspect-ratio", gst::Fraction::new(*par_n, *par_d))
|
|
|
|
.field("interlace-mode", interlace_mode.to_str())
|
|
|
|
.field("stream-format", "byte-stream")
|
|
|
|
.field("alignment", "au")
|
|
|
|
.build()),
|
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
VideoInfo::H265 {
|
2021-10-01 09:47:43 +00:00
|
|
|
xres,
|
|
|
|
yres,
|
|
|
|
fps_n,
|
|
|
|
fps_d,
|
|
|
|
par_n,
|
|
|
|
par_d,
|
|
|
|
interlace_mode,
|
|
|
|
..
|
|
|
|
} => Ok(gst::Caps::builder("video/x-h265")
|
|
|
|
.field("width", *xres)
|
|
|
|
.field("height", *yres)
|
|
|
|
.field("framerate", gst::Fraction::new(*fps_n, *fps_d))
|
|
|
|
.field("pixel-aspect-ratio", gst::Fraction::new(*par_n, *par_d))
|
|
|
|
.field("interlace-mode", interlace_mode.to_str())
|
|
|
|
.field("stream-format", "byte-stream")
|
|
|
|
.field("alignment", "au")
|
|
|
|
.build()),
|
|
|
|
}
|
|
|
|
}
|
2023-10-11 19:25:29 +00:00
|
|
|
|
|
|
|
pub fn width(&self) -> u32 {
|
|
|
|
match self {
|
|
|
|
VideoInfo::Video(ref info) => info.width(),
|
|
|
|
#[cfg(feature = "advanced-sdk")]
|
|
|
|
VideoInfo::SpeedHQInfo { xres, .. }
|
|
|
|
| VideoInfo::H264 { xres, .. }
|
|
|
|
| VideoInfo::H265 { xres, .. } => *xres as u32,
|
|
|
|
}
|
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
}
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
#[derive(Debug)]
|
2022-05-31 11:28:46 +00:00
|
|
|
#[allow(clippy::large_enum_variant)]
|
2021-09-29 10:45:04 +00:00
|
|
|
pub enum Buffer {
|
2021-10-01 09:47:43 +00:00
|
|
|
Audio(gst::Buffer, AudioInfo),
|
|
|
|
Video(gst::Buffer, VideoInfo),
|
2019-07-19 08:32:04 +00:00
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
|
|
|
|
#[derive(Debug)]
|
2022-05-31 11:28:46 +00:00
|
|
|
#[allow(clippy::large_enum_variant)]
|
2021-09-29 10:45:04 +00:00
|
|
|
pub enum ReceiverItem {
|
|
|
|
Buffer(Buffer),
|
2019-07-17 16:10:20 +00:00
|
|
|
Flushing,
|
|
|
|
Timeout,
|
|
|
|
Error(gst::FlowError),
|
|
|
|
}
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
pub struct ReceiverInner {
|
|
|
|
queue: ReceiverQueue,
|
2021-08-02 05:45:32 +00:00
|
|
|
max_queue_length: usize,
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2022-06-06 12:48:12 +00:00
|
|
|
// Audio/video time observations
|
|
|
|
observations_timestamp: [Observations; 2],
|
|
|
|
observations_timecode: [Observations; 2],
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2022-10-13 08:51:55 +00:00
|
|
|
element: glib::WeakRef<gst::Element>,
|
2019-07-17 16:10:20 +00:00
|
|
|
timestamp_mode: TimestampMode,
|
2020-01-16 09:14:10 +00:00
|
|
|
|
2019-07-17 16:10:20 +00:00
|
|
|
timeout: u32,
|
2020-01-16 09:14:10 +00:00
|
|
|
connect_timeout: u32,
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2023-10-11 19:25:29 +00:00
|
|
|
ndi_cc_decoder: AtomicRefCell<Option<NDICCMetaDecoder>>,
|
2019-07-17 16:10:20 +00:00
|
|
|
thread: Mutex<Option<std::thread::JoinHandle<()>>>,
|
|
|
|
}
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
#[derive(Clone)]
|
|
|
|
struct ReceiverQueue(Arc<(Mutex<ReceiverQueueInner>, Condvar)>);
|
2019-07-19 08:32:04 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
struct ReceiverQueueInner {
|
|
|
|
// Set to true when the capture thread should be stopped
|
|
|
|
shutdown: bool,
|
2019-07-17 16:10:20 +00:00
|
|
|
|
|
|
|
// If we're flushing right now and all buffers should simply be discarded
|
|
|
|
// and capture() directly returns Flushing
|
|
|
|
flushing: bool,
|
|
|
|
|
|
|
|
// If we're playing right now or not: if not we simply discard everything captured
|
|
|
|
playing: bool,
|
|
|
|
// Queue containing our buffers. This holds at most 5 buffers at a time.
|
|
|
|
//
|
|
|
|
// On timeout/error will contain a single item and then never be filled again
|
2021-09-29 10:45:04 +00:00
|
|
|
buffer_queue: VecDeque<Buffer>,
|
2019-07-17 16:10:20 +00:00
|
|
|
|
|
|
|
error: Option<gst::FlowError>,
|
|
|
|
timeout: bool,
|
|
|
|
}
|
|
|
|
|
2022-06-07 09:47:21 +00:00
|
|
|
const PREFILL_WINDOW_LENGTH: usize = 12;
|
2021-09-30 10:24:46 +00:00
|
|
|
const WINDOW_LENGTH: u64 = 512;
|
|
|
|
const WINDOW_DURATION: u64 = 2_000_000_000;
|
|
|
|
|
2022-06-06 12:48:12 +00:00
|
|
|
#[derive(Default)]
|
2022-06-06 11:17:40 +00:00
|
|
|
struct Observations(AtomicRefCell<ObservationsInner>);
|
2021-09-30 10:24:46 +00:00
|
|
|
|
2019-07-18 21:05:31 +00:00
|
|
|
struct ObservationsInner {
|
2021-09-30 10:24:46 +00:00
|
|
|
base_remote_time: Option<u64>,
|
|
|
|
base_local_time: Option<u64>,
|
|
|
|
deltas: VecDeque<i64>,
|
|
|
|
min_delta: i64,
|
|
|
|
skew: i64,
|
|
|
|
filling: bool,
|
|
|
|
window_size: usize,
|
2022-06-07 09:47:21 +00:00
|
|
|
|
|
|
|
// Remote/local times for workaround around fundamentally wrong slopes
|
|
|
|
// This is not reset below and has a bigger window.
|
|
|
|
times: VecDeque<(u64, u64)>,
|
|
|
|
slope_correction: (u64, u64),
|
2021-09-30 10:24:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for ObservationsInner {
|
|
|
|
fn default() -> ObservationsInner {
|
|
|
|
ObservationsInner {
|
|
|
|
base_local_time: None,
|
|
|
|
base_remote_time: None,
|
|
|
|
deltas: VecDeque::new(),
|
|
|
|
min_delta: 0,
|
|
|
|
skew: 0,
|
|
|
|
filling: true,
|
|
|
|
window_size: 0,
|
2022-06-07 09:47:21 +00:00
|
|
|
times: VecDeque::new(),
|
|
|
|
slope_correction: (1, 1),
|
2021-09-30 10:24:46 +00:00
|
|
|
}
|
|
|
|
}
|
2019-07-18 21:05:31 +00:00
|
|
|
}
|
|
|
|
|
2022-06-07 09:47:21 +00:00
|
|
|
impl ObservationsInner {
|
|
|
|
fn reset(&mut self) {
|
|
|
|
self.base_local_time = None;
|
|
|
|
self.base_remote_time = None;
|
|
|
|
self.deltas = VecDeque::new();
|
|
|
|
self.min_delta = 0;
|
|
|
|
self.skew = 0;
|
|
|
|
self.filling = true;
|
|
|
|
self.window_size = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-18 21:05:31 +00:00
|
|
|
impl Observations {
|
2021-09-30 10:24:46 +00:00
|
|
|
// Based on the algorithm used in GStreamer's rtpjitterbuffer, which comes from
|
|
|
|
// Fober, Orlarey and Letz, 2005, "Real Time Clock Skew Estimation over Network Delays":
|
|
|
|
// http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.102.1546
|
2019-07-18 21:05:31 +00:00
|
|
|
fn process(
|
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2022-06-06 12:48:12 +00:00
|
|
|
remote_time: Option<gst::ClockTime>,
|
|
|
|
local_time: gst::ClockTime,
|
2021-09-13 09:26:56 +00:00
|
|
|
duration: Option<gst::ClockTime>,
|
2022-06-07 09:47:21 +00:00
|
|
|
) -> Option<(gst::ClockTime, Option<gst::ClockTime>, bool)> {
|
|
|
|
let remote_time = remote_time?.nseconds();
|
2022-06-06 12:48:12 +00:00
|
|
|
let local_time = local_time.nseconds();
|
2021-09-30 10:24:46 +00:00
|
|
|
|
2022-06-07 09:47:21 +00:00
|
|
|
let mut inner = self.0.borrow_mut();
|
|
|
|
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::trace!(
|
2021-09-30 10:24:46 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
2022-06-07 09:47:21 +00:00
|
|
|
"Local time {}, remote time {}, slope correct {}/{}",
|
2022-10-17 17:48:43 +00:00
|
|
|
local_time.nseconds(),
|
|
|
|
remote_time.nseconds(),
|
2022-06-07 09:47:21 +00:00
|
|
|
inner.slope_correction.0,
|
|
|
|
inner.slope_correction.1,
|
2021-09-30 10:24:46 +00:00
|
|
|
);
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2022-06-07 09:47:21 +00:00
|
|
|
inner.times.push_back((remote_time, local_time));
|
|
|
|
while inner
|
|
|
|
.times
|
|
|
|
.back()
|
|
|
|
.unwrap()
|
|
|
|
.1
|
|
|
|
.saturating_sub(inner.times.front().unwrap().1)
|
|
|
|
> WINDOW_DURATION
|
|
|
|
{
|
|
|
|
let _ = inner.times.pop_front();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Static remote times
|
|
|
|
if inner.slope_correction.1 == 0 {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
let remote_time =
|
|
|
|
remote_time.mul_div_round(inner.slope_correction.0, inner.slope_correction.1)?;
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2021-09-30 10:24:46 +00:00
|
|
|
let (base_remote_time, base_local_time) =
|
|
|
|
match (inner.base_remote_time, inner.base_local_time) {
|
|
|
|
(Some(remote), Some(local)) => (remote, local),
|
|
|
|
_ => {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(
|
2021-09-30 10:24:46 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Initializing base time: local {}, remote {}",
|
2022-10-17 17:48:43 +00:00
|
|
|
local_time.nseconds(),
|
|
|
|
remote_time.nseconds(),
|
2021-09-30 10:24:46 +00:00
|
|
|
);
|
|
|
|
inner.base_remote_time = Some(remote_time);
|
|
|
|
inner.base_local_time = Some(local_time);
|
|
|
|
|
2022-10-17 17:48:43 +00:00
|
|
|
return Some((local_time.nseconds(), duration, true));
|
2019-07-18 21:05:31 +00:00
|
|
|
}
|
2021-09-30 10:24:46 +00:00
|
|
|
};
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2022-06-07 09:47:21 +00:00
|
|
|
if inner.times.len() < PREFILL_WINDOW_LENGTH {
|
2022-10-17 17:48:43 +00:00
|
|
|
return Some((local_time.nseconds(), duration, false));
|
2022-06-07 09:47:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Check if the slope is simply wrong and try correcting
|
|
|
|
{
|
|
|
|
let local_diff = inner
|
|
|
|
.times
|
|
|
|
.back()
|
|
|
|
.unwrap()
|
|
|
|
.1
|
|
|
|
.saturating_sub(inner.times.front().unwrap().1);
|
|
|
|
let remote_diff = inner
|
|
|
|
.times
|
|
|
|
.back()
|
|
|
|
.unwrap()
|
|
|
|
.0
|
|
|
|
.saturating_sub(inner.times.front().unwrap().0);
|
|
|
|
|
|
|
|
if remote_diff == 0 {
|
|
|
|
inner.reset();
|
|
|
|
inner.base_remote_time = Some(remote_time);
|
|
|
|
inner.base_local_time = Some(local_time);
|
|
|
|
|
|
|
|
// Static remote times
|
|
|
|
inner.slope_correction = (0, 0);
|
|
|
|
return None;
|
|
|
|
} else {
|
|
|
|
let slope = local_diff as f64 / remote_diff as f64;
|
|
|
|
let scaled_slope =
|
|
|
|
slope * (inner.slope_correction.1 as f64) / (inner.slope_correction.0 as f64);
|
|
|
|
|
|
|
|
// Check for some obviously wrong slopes and try to correct for that
|
|
|
|
if !(0.5..1.5).contains(&scaled_slope) {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::warning!(
|
2022-06-07 09:47:21 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Too small/big slope {}, resetting",
|
|
|
|
scaled_slope
|
|
|
|
);
|
|
|
|
|
|
|
|
let discont = !inner.deltas.is_empty();
|
|
|
|
inner.reset();
|
|
|
|
|
|
|
|
if (0.0005..0.0015).contains(&slope) {
|
|
|
|
// Remote unit was actually 0.1ns
|
|
|
|
inner.slope_correction = (1, 1000);
|
|
|
|
} else if (0.005..0.015).contains(&slope) {
|
|
|
|
// Remote unit was actually 1ns
|
|
|
|
inner.slope_correction = (1, 100);
|
|
|
|
} else if (0.05..0.15).contains(&slope) {
|
|
|
|
// Remote unit was actually 10ns
|
|
|
|
inner.slope_correction = (1, 10);
|
|
|
|
} else if (5.0..15.0).contains(&slope) {
|
|
|
|
// Remote unit was actually 1us
|
|
|
|
inner.slope_correction = (10, 1);
|
|
|
|
} else if (50.0..150.0).contains(&slope) {
|
|
|
|
// Remote unit was actually 10us
|
|
|
|
inner.slope_correction = (100, 1);
|
|
|
|
} else if (50.0..150.0).contains(&slope) {
|
|
|
|
// Remote unit was actually 100us
|
|
|
|
inner.slope_correction = (1000, 1);
|
|
|
|
} else if (50.0..150.0).contains(&slope) {
|
|
|
|
// Remote unit was actually 1ms
|
|
|
|
inner.slope_correction = (10000, 1);
|
|
|
|
} else {
|
|
|
|
inner.slope_correction = (1, 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
let remote_time = inner
|
|
|
|
.times
|
|
|
|
.back()
|
|
|
|
.unwrap()
|
|
|
|
.0
|
|
|
|
.mul_div_round(inner.slope_correction.0, inner.slope_correction.1)?;
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(
|
2022-06-07 09:47:21 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Initializing base time: local {}, remote {}, slope correction {}/{}",
|
2022-10-17 17:48:43 +00:00
|
|
|
local_time.nseconds(),
|
|
|
|
remote_time.nseconds(),
|
2022-06-07 09:47:21 +00:00
|
|
|
inner.slope_correction.0,
|
|
|
|
inner.slope_correction.1,
|
|
|
|
);
|
|
|
|
inner.base_remote_time = Some(remote_time);
|
|
|
|
inner.base_local_time = Some(local_time);
|
|
|
|
|
2022-10-17 17:48:43 +00:00
|
|
|
return Some((local_time.nseconds(), duration, discont));
|
2022-06-07 09:47:21 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-30 10:24:46 +00:00
|
|
|
let remote_diff = remote_time.saturating_sub(base_remote_time);
|
|
|
|
let local_diff = local_time.saturating_sub(base_local_time);
|
|
|
|
let delta = (local_diff as i64) - (remote_diff as i64);
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::trace!(
|
2021-09-30 10:24:46 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Local diff {}, remote diff {}, delta {}",
|
2022-10-17 17:48:43 +00:00
|
|
|
local_diff.nseconds(),
|
|
|
|
remote_diff.nseconds(),
|
2021-09-30 10:24:46 +00:00
|
|
|
delta,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (delta > inner.skew && delta - inner.skew > 1_000_000_000)
|
|
|
|
|| (delta < inner.skew && inner.skew - delta > 1_000_000_000)
|
|
|
|
{
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::warning!(
|
2021-09-30 10:24:46 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Delta {} too far from skew {}, resetting",
|
|
|
|
delta,
|
|
|
|
inner.skew
|
2019-07-19 09:51:06 +00:00
|
|
|
);
|
2021-09-30 10:37:30 +00:00
|
|
|
|
|
|
|
let discont = !inner.deltas.is_empty();
|
|
|
|
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(
|
2021-09-30 10:24:46 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Initializing base time: local {}, remote {}",
|
2022-10-17 17:48:43 +00:00
|
|
|
local_time.nseconds(),
|
|
|
|
remote_time.nseconds(),
|
2019-07-18 21:05:31 +00:00
|
|
|
);
|
2022-06-07 09:47:21 +00:00
|
|
|
|
|
|
|
inner.reset();
|
2021-09-30 10:24:46 +00:00
|
|
|
inner.base_remote_time = Some(remote_time);
|
|
|
|
inner.base_local_time = Some(local_time);
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2022-10-17 17:48:43 +00:00
|
|
|
return Some((local_time.nseconds(), duration, discont));
|
2021-09-30 10:24:46 +00:00
|
|
|
}
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2021-09-30 10:24:46 +00:00
|
|
|
if inner.filling {
|
|
|
|
if inner.deltas.is_empty() || delta < inner.min_delta {
|
|
|
|
inner.min_delta = delta;
|
|
|
|
}
|
|
|
|
inner.deltas.push_back(delta);
|
2019-07-19 09:51:06 +00:00
|
|
|
|
2021-09-30 10:24:46 +00:00
|
|
|
if remote_diff > WINDOW_DURATION || inner.deltas.len() as u64 == WINDOW_LENGTH {
|
|
|
|
inner.window_size = inner.deltas.len();
|
|
|
|
inner.skew = inner.min_delta;
|
|
|
|
inner.filling = false;
|
2019-07-18 21:05:31 +00:00
|
|
|
} else {
|
2021-09-30 10:24:46 +00:00
|
|
|
let perc_time = remote_diff.mul_div_floor(100, WINDOW_DURATION).unwrap() as i64;
|
|
|
|
let perc_window = (inner.deltas.len() as u64)
|
|
|
|
.mul_div_floor(100, WINDOW_LENGTH)
|
|
|
|
.unwrap() as i64;
|
|
|
|
let perc = cmp::max(perc_time, perc_window);
|
|
|
|
|
|
|
|
inner.skew = (perc * inner.min_delta + ((10_000 - perc) * inner.skew)) / 10_000;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
let old = inner.deltas.pop_front().unwrap();
|
|
|
|
inner.deltas.push_back(delta);
|
|
|
|
|
|
|
|
if delta <= inner.min_delta {
|
|
|
|
inner.min_delta = delta;
|
|
|
|
} else if old == inner.min_delta {
|
|
|
|
inner.min_delta = inner.deltas.iter().copied().min().unwrap();
|
2019-07-18 21:05:31 +00:00
|
|
|
}
|
2021-09-30 10:24:46 +00:00
|
|
|
|
|
|
|
inner.skew = (inner.min_delta + (124 * inner.skew)) / 125;
|
2019-07-18 21:05:31 +00:00
|
|
|
}
|
|
|
|
|
2021-09-30 10:24:46 +00:00
|
|
|
let out_time = base_local_time + remote_diff;
|
|
|
|
let out_time = if inner.skew < 0 {
|
|
|
|
out_time.saturating_sub((-inner.skew) as u64)
|
|
|
|
} else {
|
|
|
|
out_time + (inner.skew as u64)
|
|
|
|
};
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::trace!(
|
2021-09-30 10:24:46 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Skew {}, min delta {}",
|
|
|
|
inner.skew,
|
|
|
|
inner.min_delta
|
|
|
|
);
|
2022-10-17 17:48:43 +00:00
|
|
|
gst::trace!(CAT, obj: element, "Outputting {}", out_time.nseconds());
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2022-10-17 17:48:43 +00:00
|
|
|
Some((out_time.nseconds(), duration, false))
|
2019-07-18 21:05:31 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct ReceiverControlHandle {
|
|
|
|
queue: ReceiverQueue,
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
impl ReceiverControlHandle {
|
2019-07-17 16:10:20 +00:00
|
|
|
pub fn set_flushing(&self, flushing: bool) {
|
|
|
|
let mut queue = (self.queue.0).0.lock().unwrap();
|
|
|
|
queue.flushing = flushing;
|
|
|
|
(self.queue.0).1.notify_all();
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn set_playing(&self, playing: bool) {
|
|
|
|
let mut queue = (self.queue.0).0.lock().unwrap();
|
|
|
|
queue.playing = playing;
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn shutdown(&self) {
|
|
|
|
let mut queue = (self.queue.0).0.lock().unwrap();
|
2021-09-29 10:45:04 +00:00
|
|
|
queue.shutdown = true;
|
2019-07-17 16:10:20 +00:00
|
|
|
(self.queue.0).1.notify_all();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
impl Drop for ReceiverInner {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
// Will shut down the receiver thread on the next iteration
|
|
|
|
let mut queue = (self.queue.0).0.lock().unwrap();
|
|
|
|
queue.shutdown = true;
|
|
|
|
drop(queue);
|
|
|
|
|
|
|
|
let element = self.element.upgrade();
|
|
|
|
|
|
|
|
if let Some(ref element) = element {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(CAT, obj: element, "Closed NDI connection");
|
2021-09-29 10:45:04 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Receiver {
|
2019-07-17 16:10:20 +00:00
|
|
|
fn new(
|
2021-09-29 10:45:04 +00:00
|
|
|
recv: RecvInstance,
|
2019-07-17 16:10:20 +00:00
|
|
|
timestamp_mode: TimestampMode,
|
|
|
|
timeout: u32,
|
2020-01-16 09:14:10 +00:00
|
|
|
connect_timeout: u32,
|
2021-08-02 05:45:32 +00:00
|
|
|
max_queue_length: usize,
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2021-09-29 10:45:04 +00:00
|
|
|
) -> Self {
|
2019-07-17 16:10:20 +00:00
|
|
|
let receiver = Receiver(Arc::new(ReceiverInner {
|
|
|
|
queue: ReceiverQueue(Arc::new((
|
|
|
|
Mutex::new(ReceiverQueueInner {
|
2021-09-29 10:45:04 +00:00
|
|
|
shutdown: false,
|
2019-07-17 16:10:20 +00:00
|
|
|
playing: false,
|
|
|
|
flushing: false,
|
2021-08-02 05:45:32 +00:00
|
|
|
buffer_queue: VecDeque::with_capacity(max_queue_length),
|
2019-07-17 16:10:20 +00:00
|
|
|
error: None,
|
|
|
|
timeout: false,
|
|
|
|
}),
|
|
|
|
Condvar::new(),
|
|
|
|
))),
|
2021-08-02 05:45:32 +00:00
|
|
|
max_queue_length,
|
2022-06-06 12:48:12 +00:00
|
|
|
observations_timestamp: Default::default(),
|
|
|
|
observations_timecode: Default::default(),
|
2019-07-17 16:10:20 +00:00
|
|
|
element: element.downgrade(),
|
|
|
|
timestamp_mode,
|
|
|
|
timeout,
|
2020-01-16 09:14:10 +00:00
|
|
|
connect_timeout,
|
2023-10-11 19:25:29 +00:00
|
|
|
ndi_cc_decoder: AtomicRefCell::new(None),
|
2019-07-17 16:10:20 +00:00
|
|
|
thread: Mutex::new(None),
|
|
|
|
}));
|
|
|
|
|
|
|
|
let weak = Arc::downgrade(&receiver.0);
|
|
|
|
let thread = thread::spawn(move || {
|
|
|
|
use std::panic;
|
|
|
|
|
|
|
|
let weak_clone = weak.clone();
|
2021-09-29 10:45:04 +00:00
|
|
|
match panic::catch_unwind(panic::AssertUnwindSafe(move || {
|
|
|
|
Self::receive_thread(&weak_clone, recv)
|
|
|
|
})) {
|
2019-07-17 16:10:20 +00:00
|
|
|
Ok(_) => (),
|
|
|
|
Err(_) => {
|
|
|
|
if let Some(receiver) = weak.upgrade().map(Receiver) {
|
|
|
|
if let Some(element) = receiver.0.element.upgrade() {
|
2021-09-13 09:26:56 +00:00
|
|
|
gst::element_error!(
|
2019-07-17 16:10:20 +00:00
|
|
|
element,
|
|
|
|
gst::LibraryError::Failed,
|
|
|
|
["Panic while connecting to NDI source"]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut queue = (receiver.0.queue.0).0.lock().unwrap();
|
|
|
|
queue.error = Some(gst::FlowError::Error);
|
|
|
|
(receiver.0.queue.0).1.notify_one();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
*receiver.0.thread.lock().unwrap() = Some(thread);
|
|
|
|
|
|
|
|
receiver
|
|
|
|
}
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
pub fn receiver_control_handle(&self) -> ReceiverControlHandle {
|
2019-07-17 16:10:20 +00:00
|
|
|
ReceiverControlHandle {
|
|
|
|
queue: self.0.queue.clone(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-12 18:57:34 +00:00
|
|
|
#[allow(dead_code)]
|
2019-07-17 16:10:20 +00:00
|
|
|
pub fn set_flushing(&self, flushing: bool) {
|
|
|
|
let mut queue = (self.0.queue.0).0.lock().unwrap();
|
|
|
|
queue.flushing = flushing;
|
|
|
|
(self.0.queue.0).1.notify_all();
|
|
|
|
}
|
|
|
|
|
2022-10-12 18:57:34 +00:00
|
|
|
#[allow(dead_code)]
|
2019-07-17 16:10:20 +00:00
|
|
|
pub fn set_playing(&self, playing: bool) {
|
|
|
|
let mut queue = (self.0.queue.0).0.lock().unwrap();
|
|
|
|
queue.playing = playing;
|
|
|
|
}
|
|
|
|
|
2022-10-12 18:57:34 +00:00
|
|
|
#[allow(dead_code)]
|
2019-07-17 16:10:20 +00:00
|
|
|
pub fn shutdown(&self) {
|
|
|
|
let mut queue = (self.0.queue.0).0.lock().unwrap();
|
2021-09-29 10:45:04 +00:00
|
|
|
queue.shutdown = true;
|
2019-07-17 16:10:20 +00:00
|
|
|
(self.0.queue.0).1.notify_all();
|
|
|
|
}
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
pub fn capture(&self) -> ReceiverItem {
|
2019-07-17 16:10:20 +00:00
|
|
|
let mut queue = (self.0.queue.0).0.lock().unwrap();
|
|
|
|
loop {
|
|
|
|
if let Some(err) = queue.error {
|
|
|
|
return ReceiverItem::Error(err);
|
|
|
|
} else if queue.buffer_queue.is_empty() && queue.timeout {
|
|
|
|
return ReceiverItem::Timeout;
|
2021-09-29 10:45:04 +00:00
|
|
|
} else if queue.flushing || queue.shutdown {
|
2019-07-17 16:10:20 +00:00
|
|
|
return ReceiverItem::Flushing;
|
2021-09-29 10:45:04 +00:00
|
|
|
} else if let Some(buffer) = queue.buffer_queue.pop_front() {
|
|
|
|
return ReceiverItem::Buffer(buffer);
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
queue = (self.0.queue.0).1.wait(queue).unwrap();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-31 11:28:46 +00:00
|
|
|
#[allow(clippy::too_many_arguments)]
|
2021-09-29 10:45:04 +00:00
|
|
|
pub fn connect(
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2021-09-29 10:45:04 +00:00
|
|
|
ndi_name: Option<&str>,
|
|
|
|
url_address: Option<&str>,
|
|
|
|
receiver_ndi_name: &str,
|
|
|
|
connect_timeout: u32,
|
|
|
|
bandwidth: NDIlib_recv_bandwidth_e,
|
2021-10-01 07:17:57 +00:00
|
|
|
color_format: NDIlib_recv_color_format_e,
|
2021-09-29 10:45:04 +00:00
|
|
|
timestamp_mode: TimestampMode,
|
|
|
|
timeout: u32,
|
|
|
|
max_queue_length: usize,
|
|
|
|
) -> Option<Self> {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(CAT, obj: element, "Starting NDI connection...");
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
assert!(ndi_name.is_some() || url_address.is_some());
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(
|
2021-09-29 10:45:04 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Connecting to NDI source with NDI name '{:?}' and URL/Address {:?}",
|
|
|
|
ndi_name,
|
|
|
|
url_address,
|
|
|
|
);
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
// FIXME: Ideally we would use NDIlib_recv_color_format_fastest here but that seems to be
|
|
|
|
// broken with interlaced content currently
|
|
|
|
let recv = RecvInstance::builder(ndi_name, url_address, receiver_ndi_name)
|
|
|
|
.bandwidth(bandwidth)
|
2021-10-01 07:17:57 +00:00
|
|
|
.color_format(color_format)
|
2021-09-29 10:45:04 +00:00
|
|
|
.allow_video_fields(true)
|
|
|
|
.build();
|
|
|
|
let recv = match recv {
|
|
|
|
None => {
|
2021-09-13 09:26:56 +00:00
|
|
|
gst::element_error!(
|
2019-07-19 09:51:06 +00:00
|
|
|
element,
|
2021-09-29 10:45:04 +00:00
|
|
|
gst::CoreError::Negotiation,
|
|
|
|
["Failed to connect to source"]
|
2019-07-17 16:10:20 +00:00
|
|
|
);
|
|
|
|
return None;
|
|
|
|
}
|
2021-09-29 10:45:04 +00:00
|
|
|
Some(recv) => recv,
|
2019-07-17 16:10:20 +00:00
|
|
|
};
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
recv.set_tally(&Tally::default());
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
let enable_hw_accel = MetadataFrame::new(0, Some("<ndi_hwaccel enabled=\"true\"/>"));
|
|
|
|
recv.send_metadata(&enable_hw_accel);
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
// This will set info.audio/video accordingly
|
|
|
|
let receiver = Receiver::new(
|
|
|
|
recv,
|
|
|
|
timestamp_mode,
|
|
|
|
timeout,
|
|
|
|
connect_timeout,
|
|
|
|
max_queue_length,
|
|
|
|
element,
|
|
|
|
);
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
Some(receiver)
|
|
|
|
}
|
2020-01-16 09:14:10 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
fn receive_thread(receiver: &Weak<ReceiverInner>, recv: RecvInstance) {
|
2021-09-30 10:33:18 +00:00
|
|
|
let mut first_video_frame = true;
|
|
|
|
let mut first_audio_frame = true;
|
2021-09-29 10:45:04 +00:00
|
|
|
let mut first_frame = true;
|
|
|
|
let mut timer = time::Instant::now();
|
2023-10-11 19:25:29 +00:00
|
|
|
let mut pending_metas = VecDeque::<String>::new();
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
// Capture until error or shutdown
|
|
|
|
loop {
|
|
|
|
let receiver = match receiver.upgrade().map(Receiver) {
|
|
|
|
None => break,
|
|
|
|
Some(receiver) => receiver,
|
|
|
|
};
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
let element = match receiver.0.element.upgrade() {
|
|
|
|
None => return,
|
|
|
|
Some(element) => element,
|
|
|
|
};
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
let flushing = {
|
|
|
|
let queue = (receiver.0.queue.0).0.lock().unwrap();
|
|
|
|
if queue.shutdown {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::debug!(CAT, obj: element, "Shutting down");
|
2021-09-29 10:45:04 +00:00
|
|
|
break;
|
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
// If an error happened in the meantime, just go out of here
|
|
|
|
if queue.error.is_some() {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: element, "Error while waiting for connection");
|
2021-09-29 10:45:04 +00:00
|
|
|
return;
|
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
queue.flushing
|
|
|
|
};
|
2020-01-16 09:14:10 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
let timeout = if first_frame {
|
|
|
|
receiver.0.connect_timeout
|
|
|
|
} else {
|
|
|
|
receiver.0.timeout
|
|
|
|
};
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
let res = match recv.capture(50) {
|
|
|
|
_ if flushing => {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::debug!(CAT, obj: element, "Flushing");
|
2021-09-29 10:45:04 +00:00
|
|
|
Err(gst::FlowError::Flushing)
|
|
|
|
}
|
|
|
|
Err(_) => {
|
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::ResourceError::Read,
|
|
|
|
["Error receiving frame"]
|
2019-07-17 16:10:20 +00:00
|
|
|
);
|
2021-09-29 10:45:04 +00:00
|
|
|
Err(gst::FlowError::Error)
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-09-29 10:45:04 +00:00
|
|
|
Ok(None) if timeout > 0 && timer.elapsed().as_millis() >= timeout as u128 => {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::debug!(CAT, obj: element, "Timed out -- assuming EOS",);
|
2021-09-29 10:45:04 +00:00
|
|
|
Err(gst::FlowError::Eos)
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-09-29 10:45:04 +00:00
|
|
|
Ok(None) => {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::debug!(CAT, obj: element, "No frame received yet, retry");
|
2021-09-29 10:45:04 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
Ok(Some(Frame::Video(frame))) => {
|
|
|
|
first_frame = false;
|
2021-09-30 10:33:18 +00:00
|
|
|
let mut buffer = receiver.create_video_buffer_and_info(&element, frame);
|
|
|
|
if first_video_frame {
|
|
|
|
if let Ok(Buffer::Video(ref mut buffer, _)) = buffer {
|
|
|
|
buffer
|
|
|
|
.get_mut()
|
|
|
|
.unwrap()
|
|
|
|
.set_flags(gst::BufferFlags::DISCONT);
|
|
|
|
first_video_frame = false;
|
|
|
|
}
|
|
|
|
}
|
2023-09-07 12:28:24 +00:00
|
|
|
|
2023-10-11 19:25:29 +00:00
|
|
|
if !pending_metas.is_empty() {
|
2023-09-07 12:28:24 +00:00
|
|
|
if let Ok(Buffer::Video(ref mut buffer, _)) = buffer {
|
2023-10-11 19:25:29 +00:00
|
|
|
let mut ndi_cc_decoder = receiver.0.ndi_cc_decoder.borrow_mut();
|
|
|
|
for meta in pending_metas.drain(..) {
|
|
|
|
let res = ndi_cc_decoder.as_mut().unwrap().decode(&meta, buffer);
|
|
|
|
if let Err(err) = res {
|
|
|
|
gst::debug!(CAT, obj: element, "Failed to parse NDI metadata: {err}");
|
|
|
|
}
|
2023-09-07 12:28:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-30 10:33:18 +00:00
|
|
|
buffer
|
2021-09-29 10:45:04 +00:00
|
|
|
}
|
|
|
|
Ok(Some(Frame::Audio(frame))) => {
|
|
|
|
first_frame = false;
|
2021-09-30 10:33:18 +00:00
|
|
|
let mut buffer = receiver.create_audio_buffer_and_info(&element, frame);
|
|
|
|
if first_audio_frame {
|
2022-02-28 13:16:01 +00:00
|
|
|
if let Ok(Buffer::Audio(ref mut buffer, _)) = buffer {
|
2021-09-30 10:33:18 +00:00
|
|
|
buffer
|
|
|
|
.get_mut()
|
|
|
|
.unwrap()
|
|
|
|
.set_flags(gst::BufferFlags::DISCONT);
|
|
|
|
first_audio_frame = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
buffer
|
2021-09-29 10:45:04 +00:00
|
|
|
}
|
|
|
|
Ok(Some(Frame::Metadata(frame))) => {
|
|
|
|
if let Some(metadata) = frame.metadata() {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(
|
2021-09-29 10:45:04 +00:00
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: element,
|
2021-09-29 10:45:04 +00:00
|
|
|
"Received metadata at timecode {}: {}",
|
2022-10-17 17:48:43 +00:00
|
|
|
(frame.timecode() as u64 * 100).nseconds(),
|
2021-09-29 10:45:04 +00:00
|
|
|
metadata,
|
|
|
|
);
|
2023-09-07 12:28:24 +00:00
|
|
|
|
2023-10-11 19:25:29 +00:00
|
|
|
pending_metas.push_back(metadata.to_string());
|
2021-09-29 10:45:04 +00:00
|
|
|
}
|
2019-07-19 08:32:04 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
};
|
2019-07-19 08:32:04 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
match res {
|
|
|
|
Ok(item) => {
|
|
|
|
let mut queue = (receiver.0.queue.0).0.lock().unwrap();
|
|
|
|
while queue.buffer_queue.len() > receiver.0.max_queue_length {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::warning!(
|
2021-09-29 10:45:04 +00:00
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: element,
|
2021-09-29 10:45:04 +00:00
|
|
|
"Dropping old buffer -- queue has {} items",
|
|
|
|
queue.buffer_queue.len()
|
|
|
|
);
|
|
|
|
queue.buffer_queue.pop_front();
|
|
|
|
}
|
|
|
|
queue.buffer_queue.push_back(item);
|
|
|
|
(receiver.0.queue.0).1.notify_one();
|
|
|
|
timer = time::Instant::now();
|
|
|
|
}
|
|
|
|
Err(gst::FlowError::Eos) => {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::debug!(CAT, obj: element, "Signalling EOS");
|
2021-09-29 10:45:04 +00:00
|
|
|
let mut queue = (receiver.0.queue.0).0.lock().unwrap();
|
|
|
|
queue.timeout = true;
|
|
|
|
(receiver.0.queue.0).1.notify_one();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
Err(gst::FlowError::Flushing) => {
|
|
|
|
// Flushing, nothing to be done here except for emptying our queue
|
|
|
|
let mut queue = (receiver.0.queue.0).0.lock().unwrap();
|
|
|
|
queue.buffer_queue.clear();
|
|
|
|
(receiver.0.queue.0).1.notify_one();
|
|
|
|
timer = time::Instant::now();
|
|
|
|
}
|
|
|
|
Err(err) => {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: element, "Signalling error");
|
2021-09-29 10:45:04 +00:00
|
|
|
let mut queue = (receiver.0.queue.0).0.lock().unwrap();
|
|
|
|
if queue.error.is_none() {
|
|
|
|
queue.error = Some(err);
|
|
|
|
}
|
|
|
|
(receiver.0.queue.0).1.notify_one();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-07-19 08:32:04 +00:00
|
|
|
}
|
|
|
|
|
2019-07-19 09:51:06 +00:00
|
|
|
fn calculate_timestamp(
|
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2022-06-06 12:48:12 +00:00
|
|
|
is_audio: bool,
|
2019-07-19 09:51:06 +00:00
|
|
|
timestamp: i64,
|
|
|
|
timecode: i64,
|
2021-09-13 09:26:56 +00:00
|
|
|
duration: Option<gst::ClockTime>,
|
2021-09-30 10:37:30 +00:00
|
|
|
) -> Option<(gst::ClockTime, Option<gst::ClockTime>, bool)> {
|
2021-09-29 13:44:37 +00:00
|
|
|
let receive_time = element.current_running_time()?;
|
2019-07-19 09:51:06 +00:00
|
|
|
|
2022-10-17 17:48:43 +00:00
|
|
|
let real_time_now = (glib::real_time() as u64 * 1000).nseconds();
|
2019-07-19 09:51:06 +00:00
|
|
|
let timestamp = if timestamp == ndisys::NDIlib_recv_timestamp_undefined {
|
2021-09-13 09:26:56 +00:00
|
|
|
gst::ClockTime::NONE
|
2019-07-19 09:51:06 +00:00
|
|
|
} else {
|
2022-10-17 17:48:43 +00:00
|
|
|
Some((timestamp as u64 * 100).nseconds())
|
2019-07-19 09:51:06 +00:00
|
|
|
};
|
2022-10-17 17:48:43 +00:00
|
|
|
let timecode = (timecode as u64 * 100).nseconds();
|
2019-07-19 09:51:06 +00:00
|
|
|
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::log!(
|
2021-09-29 10:45:04 +00:00
|
|
|
CAT,
|
2019-07-19 09:51:06 +00:00
|
|
|
obj: element,
|
|
|
|
"Received frame with timecode {}, timestamp {}, duration {}, receive time {}, local time now {}",
|
|
|
|
timecode,
|
2021-09-13 09:26:56 +00:00
|
|
|
timestamp.display(),
|
|
|
|
duration.display(),
|
|
|
|
receive_time.display(),
|
2019-07-19 09:51:06 +00:00
|
|
|
real_time_now,
|
|
|
|
);
|
|
|
|
|
2022-12-13 09:43:16 +00:00
|
|
|
let res_timestamp = self.0.observations_timestamp[usize::from(!is_audio)].process(
|
2022-06-06 12:48:12 +00:00
|
|
|
element,
|
|
|
|
timestamp,
|
|
|
|
receive_time,
|
|
|
|
duration,
|
|
|
|
);
|
|
|
|
|
2022-12-13 09:43:16 +00:00
|
|
|
let res_timecode = self.0.observations_timecode[usize::from(!is_audio)].process(
|
2022-06-06 12:48:12 +00:00
|
|
|
element,
|
|
|
|
Some(timecode),
|
|
|
|
receive_time,
|
|
|
|
duration,
|
|
|
|
);
|
|
|
|
|
2021-09-30 10:37:30 +00:00
|
|
|
let (pts, duration, discont) = match self.0.timestamp_mode {
|
2022-06-07 09:47:21 +00:00
|
|
|
TimestampMode::ReceiveTimeTimecode => match res_timecode {
|
|
|
|
Some((pts, duration, discont)) => (pts, duration, discont),
|
|
|
|
None => {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::warning!(CAT, obj: element, "Can't calculate timestamp");
|
2022-06-07 09:47:21 +00:00
|
|
|
(receive_time, duration, false)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
TimestampMode::ReceiveTimeTimestamp => match res_timestamp {
|
|
|
|
Some((pts, duration, discont)) => (pts, duration, discont),
|
|
|
|
None => {
|
|
|
|
if timestamp.is_some() {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::warning!(CAT, obj: element, "Can't calculate timestamp");
|
2022-06-07 09:47:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
(receive_time, duration, false)
|
|
|
|
}
|
|
|
|
},
|
2021-09-30 10:37:30 +00:00
|
|
|
TimestampMode::Timecode => (timecode, duration, false),
|
|
|
|
TimestampMode::Timestamp if timestamp.is_none() => (receive_time, duration, false),
|
2019-07-19 09:51:06 +00:00
|
|
|
TimestampMode::Timestamp => {
|
|
|
|
// Timestamps are relative to the UNIX epoch
|
2021-09-13 09:26:56 +00:00
|
|
|
let timestamp = timestamp?;
|
2019-07-19 09:51:06 +00:00
|
|
|
if real_time_now > timestamp {
|
|
|
|
let diff = real_time_now - timestamp;
|
|
|
|
if diff > receive_time {
|
2021-09-30 10:37:30 +00:00
|
|
|
(gst::ClockTime::ZERO, duration, false)
|
2019-07-19 09:51:06 +00:00
|
|
|
} else {
|
2021-09-30 10:37:30 +00:00
|
|
|
(receive_time - diff, duration, false)
|
2019-07-19 09:51:06 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
let diff = timestamp - real_time_now;
|
2021-09-30 10:37:30 +00:00
|
|
|
(receive_time + diff, duration, false)
|
2019-07-19 09:51:06 +00:00
|
|
|
}
|
|
|
|
}
|
2021-09-30 10:37:30 +00:00
|
|
|
TimestampMode::ReceiveTime => (receive_time, duration, false),
|
2022-06-07 09:47:21 +00:00
|
|
|
TimestampMode::Auto => {
|
|
|
|
res_timecode
|
|
|
|
.or(res_timestamp)
|
|
|
|
.unwrap_or((receive_time, duration, false))
|
|
|
|
}
|
2019-07-19 09:51:06 +00:00
|
|
|
};
|
|
|
|
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::log!(
|
2021-09-29 10:45:04 +00:00
|
|
|
CAT,
|
2019-07-19 09:51:06 +00:00
|
|
|
obj: element,
|
|
|
|
"Calculated PTS {}, duration {}",
|
2021-09-13 09:26:56 +00:00
|
|
|
pts.display(),
|
|
|
|
duration.display(),
|
2019-07-19 09:51:06 +00:00
|
|
|
);
|
|
|
|
|
2021-09-30 10:37:30 +00:00
|
|
|
Some((pts, duration, discont))
|
2019-07-19 09:51:06 +00:00
|
|
|
}
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
fn create_video_buffer_and_info(
|
2019-07-19 08:32:04 +00:00
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2021-09-29 10:45:04 +00:00
|
|
|
video_frame: VideoFrame,
|
|
|
|
) -> Result<Buffer, gst::FlowError> {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(CAT, obj: element, "Received video frame {:?}", video_frame);
|
2019-07-19 09:51:06 +00:00
|
|
|
|
2021-09-30 10:37:30 +00:00
|
|
|
let (pts, duration, discont) = self
|
2019-07-19 10:00:27 +00:00
|
|
|
.calculate_video_timestamp(element, &video_frame)
|
|
|
|
.ok_or_else(|| {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(CAT, obj: element, "Flushing, dropping buffer");
|
2021-09-29 10:45:04 +00:00
|
|
|
gst::FlowError::Flushing
|
2019-07-19 10:00:27 +00:00
|
|
|
})?;
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2019-07-17 16:10:20 +00:00
|
|
|
let info = self.create_video_info(element, &video_frame)?;
|
|
|
|
|
2021-09-30 19:44:31 +00:00
|
|
|
let mut buffer = self.create_video_buffer(element, pts, duration, &info, &video_frame)?;
|
2021-09-30 10:37:30 +00:00
|
|
|
if discont {
|
|
|
|
buffer
|
|
|
|
.get_mut()
|
|
|
|
.unwrap()
|
|
|
|
.set_flags(gst::BufferFlags::RESYNC);
|
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2023-10-11 19:25:29 +00:00
|
|
|
let mut ndi_cc_decoder = self.0.ndi_cc_decoder.borrow_mut();
|
|
|
|
if ndi_cc_decoder.is_none() {
|
|
|
|
*ndi_cc_decoder = Some(NDICCMetaDecoder::new(info.width()));
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
let ndi_cc_decoder = ndi_cc_decoder.as_mut().unwrap();
|
|
|
|
// handle potential width change (also needed for standalone metadata)
|
|
|
|
ndi_cc_decoder.set_width(info.width());
|
|
|
|
|
|
|
|
if let Some(metadata) = video_frame.metadata() {
|
|
|
|
let res = ndi_cc_decoder.decode(metadata, &mut buffer);
|
|
|
|
if let Err(err) = res {
|
|
|
|
gst::debug!(CAT, obj: element, "Failed to parse NDI video frame metadata: {err}");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::log!(CAT, obj: element, "Produced video buffer {:?}", buffer);
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
Ok(Buffer::Video(buffer, info))
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn calculate_video_timestamp(
|
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2019-07-17 16:10:20 +00:00
|
|
|
video_frame: &VideoFrame,
|
2021-09-30 10:37:30 +00:00
|
|
|
) -> Option<(gst::ClockTime, Option<gst::ClockTime>, bool)> {
|
2021-09-13 09:26:56 +00:00
|
|
|
let duration = gst::ClockTime::SECOND.mul_div_floor(
|
|
|
|
video_frame.frame_rate().1 as u64,
|
|
|
|
video_frame.frame_rate().0 as u64,
|
|
|
|
);
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2019-07-19 09:51:06 +00:00
|
|
|
self.calculate_timestamp(
|
|
|
|
element,
|
2022-06-06 12:48:12 +00:00
|
|
|
false,
|
2019-07-19 09:51:06 +00:00
|
|
|
video_frame.timestamp(),
|
|
|
|
video_frame.timecode(),
|
2019-07-18 21:05:31 +00:00
|
|
|
duration,
|
2019-07-19 09:51:06 +00:00
|
|
|
)
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn create_video_info(
|
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2019-07-17 16:10:20 +00:00
|
|
|
video_frame: &VideoFrame,
|
2021-10-01 09:47:43 +00:00
|
|
|
) -> Result<VideoInfo, gst::FlowError> {
|
|
|
|
let fourcc = video_frame.fourcc();
|
|
|
|
|
|
|
|
let par = gst::Fraction::approximate_f32(video_frame.picture_aspect_ratio())
|
|
|
|
.unwrap_or_else(|| gst::Fraction::new(1, 1))
|
|
|
|
* gst::Fraction::new(video_frame.yres(), video_frame.xres());
|
|
|
|
let interlace_mode = match video_frame.frame_format_type() {
|
|
|
|
ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_progressive => {
|
|
|
|
gst_video::VideoInterlaceMode::Progressive
|
|
|
|
}
|
|
|
|
ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved => {
|
|
|
|
gst_video::VideoInterlaceMode::Interleaved
|
|
|
|
}
|
|
|
|
#[cfg(feature = "interlaced-fields")]
|
|
|
|
_ => gst_video::VideoInterlaceMode::Alternate,
|
|
|
|
#[cfg(not(feature = "interlaced-fields"))]
|
2020-01-02 12:00:47 +00:00
|
|
|
_ => {
|
2021-09-13 09:26:56 +00:00
|
|
|
gst::element_error!(
|
2020-01-02 12:00:47 +00:00
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
2021-10-01 09:47:43 +00:00
|
|
|
["Separate field interlacing not supported"]
|
2020-01-02 12:00:47 +00:00
|
|
|
);
|
|
|
|
return Err(gst::FlowError::NotNegotiated);
|
2021-10-01 09:47:43 +00:00
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
};
|
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
if [
|
|
|
|
ndisys::NDIlib_FourCC_video_type_UYVY,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_UYVA,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_YV12,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_NV12,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_I420,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_BGRA,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_BGRX,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_RGBA,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_BGRX,
|
|
|
|
]
|
|
|
|
.contains(&fourcc)
|
2019-07-17 16:10:20 +00:00
|
|
|
{
|
2021-10-01 09:47:43 +00:00
|
|
|
// YV12 and I420 are swapped in the NDI SDK compared to GStreamer
|
|
|
|
let format = match video_frame.fourcc() {
|
|
|
|
ndisys::NDIlib_FourCC_video_type_UYVY => gst_video::VideoFormat::Uyvy,
|
|
|
|
// FIXME: This drops the alpha plane!
|
|
|
|
ndisys::NDIlib_FourCC_video_type_UYVA => gst_video::VideoFormat::Uyvy,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_YV12 => gst_video::VideoFormat::I420,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_NV12 => gst_video::VideoFormat::Nv12,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_I420 => gst_video::VideoFormat::Yv12,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_BGRA => gst_video::VideoFormat::Bgra,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_BGRX => gst_video::VideoFormat::Bgrx,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_RGBA => gst_video::VideoFormat::Rgba,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_RGBX => gst_video::VideoFormat::Rgbx,
|
|
|
|
_ => {
|
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Unsupported video fourcc {:08x}", video_frame.fourcc()]
|
|
|
|
);
|
|
|
|
|
|
|
|
return Err(gst::FlowError::NotNegotiated);
|
|
|
|
} // TODO: NDIlib_FourCC_video_type_P216 and NDIlib_FourCC_video_type_PA16 not
|
|
|
|
// supported by GStreamer
|
|
|
|
};
|
|
|
|
|
|
|
|
#[cfg(feature = "interlaced-fields")]
|
|
|
|
{
|
|
|
|
let mut builder = gst_video::VideoInfo::builder(
|
|
|
|
format,
|
|
|
|
video_frame.xres() as u32,
|
|
|
|
video_frame.yres() as u32,
|
|
|
|
)
|
|
|
|
.fps(gst::Fraction::from(video_frame.frame_rate()))
|
|
|
|
.par(par)
|
|
|
|
.interlace_mode(interlace_mode);
|
|
|
|
|
|
|
|
if video_frame.frame_format_type()
|
|
|
|
== ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved
|
|
|
|
{
|
|
|
|
builder = builder.field_order(gst_video::VideoFieldOrder::TopFieldFirst);
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
2022-10-12 18:57:34 +00:00
|
|
|
return Ok(VideoInfo::Video(builder.build().map_err(|_| {
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Invalid video format configuration"]
|
|
|
|
);
|
|
|
|
|
|
|
|
gst::FlowError::NotNegotiated
|
|
|
|
})?));
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(not(feature = "interlaced-fields"))]
|
2019-07-17 16:10:20 +00:00
|
|
|
{
|
2021-10-01 09:47:43 +00:00
|
|
|
let mut builder = gst_video::VideoInfo::builder(
|
|
|
|
format,
|
|
|
|
video_frame.xres() as u32,
|
|
|
|
video_frame.yres() as u32,
|
|
|
|
)
|
|
|
|
.fps(gst::Fraction::from(video_frame.frame_rate()))
|
|
|
|
.par(par)
|
|
|
|
.interlace_mode(interlace_mode);
|
|
|
|
|
|
|
|
if video_frame.frame_format_type()
|
|
|
|
== ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved
|
|
|
|
{
|
|
|
|
builder = builder.field_order(gst_video::VideoFieldOrder::TopFieldFirst);
|
|
|
|
}
|
|
|
|
|
2022-10-12 18:57:34 +00:00
|
|
|
return Ok(VideoInfo::Video(builder.build().map_err(|_| {
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Invalid video format configuration"]
|
|
|
|
);
|
|
|
|
|
|
|
|
gst::FlowError::NotNegotiated
|
|
|
|
})?));
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
#[cfg(feature = "advanced-sdk")]
|
|
|
|
if [
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_SHQ0_highest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_SHQ2_highest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_SHQ7_highest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_SHQ0_lowest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_SHQ2_lowest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_SHQ7_lowest_bandwidth,
|
|
|
|
]
|
|
|
|
.contains(&fourcc)
|
|
|
|
{
|
|
|
|
let variant = match fourcc {
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_SHQ0_highest_bandwidth
|
|
|
|
| ndisys::NDIlib_FourCC_video_type_ex_SHQ0_lowest_bandwidth => String::from("SHQ0"),
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_SHQ2_highest_bandwidth
|
|
|
|
| ndisys::NDIlib_FourCC_video_type_ex_SHQ2_lowest_bandwidth => String::from("SHQ2"),
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_SHQ7_highest_bandwidth
|
|
|
|
| ndisys::NDIlib_FourCC_video_type_ex_SHQ7_lowest_bandwidth => String::from("SHQ7"),
|
|
|
|
_ => {
|
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
[
|
|
|
|
"Unsupported SpeedHQ video fourcc {:08x}",
|
|
|
|
video_frame.fourcc()
|
|
|
|
]
|
|
|
|
);
|
2019-07-19 09:51:06 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
return Err(gst::FlowError::NotNegotiated);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
return Ok(VideoInfo::SpeedHQInfo {
|
|
|
|
variant,
|
|
|
|
xres: video_frame.xres(),
|
|
|
|
yres: video_frame.yres(),
|
|
|
|
fps_n: video_frame.frame_rate().0,
|
|
|
|
fps_d: video_frame.frame_rate().1,
|
2022-01-22 10:12:03 +00:00
|
|
|
par_n: par.numer(),
|
|
|
|
par_d: par.denom(),
|
2021-10-01 09:47:43 +00:00
|
|
|
interlace_mode,
|
|
|
|
});
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
#[cfg(feature = "advanced-sdk")]
|
|
|
|
if [
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_H264_highest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_H264_lowest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_H264_alpha_highest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_H264_alpha_lowest_bandwidth,
|
|
|
|
]
|
|
|
|
.contains(&fourcc)
|
2019-07-17 16:10:20 +00:00
|
|
|
{
|
2021-10-01 09:47:43 +00:00
|
|
|
let compressed_packet = video_frame.compressed_packet().ok_or_else(|| {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::error!(
|
2021-10-01 09:47:43 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Video packet doesn't have compressed packet start"
|
2019-07-17 16:10:20 +00:00
|
|
|
);
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::element_error!(element, gst::StreamError::Format, ["Invalid video packet"]);
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
if compressed_packet.fourcc != NDIlib_compressed_FourCC_type_H264 {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::error!(CAT, obj: element, "Non-H264 video packet");
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::element_error!(element, gst::StreamError::Format, ["Invalid video packet"]);
|
|
|
|
|
|
|
|
return Err(gst::FlowError::Error);
|
2019-07-18 21:05:31 +00:00
|
|
|
}
|
|
|
|
|
2022-10-12 18:57:34 +00:00
|
|
|
return Ok(VideoInfo::H264 {
|
2021-10-01 09:47:43 +00:00
|
|
|
xres: video_frame.xres(),
|
|
|
|
yres: video_frame.yres(),
|
|
|
|
fps_n: video_frame.frame_rate().0,
|
|
|
|
fps_d: video_frame.frame_rate().1,
|
2022-01-22 10:12:03 +00:00
|
|
|
par_n: par.numer(),
|
|
|
|
par_d: par.denom(),
|
2021-10-01 09:47:43 +00:00
|
|
|
interlace_mode,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(feature = "advanced-sdk")]
|
|
|
|
if [
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_HEVC_highest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_HEVC_lowest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_HEVC_alpha_highest_bandwidth,
|
|
|
|
ndisys::NDIlib_FourCC_video_type_ex_HEVC_alpha_lowest_bandwidth,
|
|
|
|
]
|
|
|
|
.contains(&fourcc)
|
|
|
|
{
|
|
|
|
let compressed_packet = video_frame.compressed_packet().ok_or_else(|| {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::error!(
|
2021-10-01 09:47:43 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Video packet doesn't have compressed packet start"
|
2019-07-19 09:51:06 +00:00
|
|
|
);
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::element_error!(element, gst::StreamError::Format, ["Invalid video packet"]);
|
2019-07-19 09:51:06 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
|
|
|
|
|
|
|
if compressed_packet.fourcc != NDIlib_compressed_FourCC_type_HEVC {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::error!(CAT, obj: element, "Non-H265 video packet");
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::element_error!(element, gst::StreamError::Format, ["Invalid video packet"]);
|
|
|
|
|
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
|
2022-10-12 18:57:34 +00:00
|
|
|
return Ok(VideoInfo::H265 {
|
2021-10-01 09:47:43 +00:00
|
|
|
xres: video_frame.xres(),
|
|
|
|
yres: video_frame.yres(),
|
|
|
|
fps_n: video_frame.frame_rate().0,
|
|
|
|
fps_d: video_frame.frame_rate().1,
|
2022-01-22 10:12:03 +00:00
|
|
|
par_n: par.numer(),
|
|
|
|
par_d: par.denom(),
|
2021-10-01 09:47:43 +00:00
|
|
|
interlace_mode,
|
|
|
|
});
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Unsupported video fourcc {:08x}", video_frame.fourcc()]
|
|
|
|
);
|
|
|
|
Err(gst::FlowError::NotNegotiated)
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn create_video_buffer(
|
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2019-07-17 16:10:20 +00:00
|
|
|
pts: gst::ClockTime,
|
2021-09-13 09:26:56 +00:00
|
|
|
duration: Option<gst::ClockTime>,
|
2021-10-01 09:47:43 +00:00
|
|
|
info: &VideoInfo,
|
2019-07-17 16:10:20 +00:00
|
|
|
video_frame: &VideoFrame,
|
2021-09-30 19:44:31 +00:00
|
|
|
) -> Result<gst::Buffer, gst::FlowError> {
|
|
|
|
let mut buffer = self.copy_video_frame(element, info, video_frame)?;
|
2019-07-17 16:10:20 +00:00
|
|
|
{
|
|
|
|
let buffer = buffer.get_mut().unwrap();
|
|
|
|
buffer.set_pts(pts);
|
|
|
|
buffer.set_duration(duration);
|
|
|
|
|
2022-10-12 16:39:33 +00:00
|
|
|
gst::ReferenceTimestampMeta::add(
|
|
|
|
buffer,
|
2022-11-01 08:27:48 +00:00
|
|
|
&crate::TIMECODE_CAPS,
|
2022-10-17 17:48:43 +00:00
|
|
|
(video_frame.timecode() as u64 * 100).nseconds(),
|
2022-10-12 16:39:33 +00:00
|
|
|
gst::ClockTime::NONE,
|
|
|
|
);
|
|
|
|
if video_frame.timestamp() != ndisys::NDIlib_recv_timestamp_undefined {
|
2019-07-17 16:10:20 +00:00
|
|
|
gst::ReferenceTimestampMeta::add(
|
|
|
|
buffer,
|
2022-11-01 08:27:48 +00:00
|
|
|
&crate::TIMESTAMP_CAPS,
|
2022-10-17 17:48:43 +00:00
|
|
|
(video_frame.timestamp() as u64 * 100).nseconds(),
|
2021-09-13 09:26:56 +00:00
|
|
|
gst::ClockTime::NONE,
|
2019-07-17 16:10:20 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(feature = "interlaced-fields")]
|
|
|
|
{
|
|
|
|
match video_frame.frame_format_type() {
|
|
|
|
ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved => {
|
|
|
|
buffer.set_video_flags(
|
|
|
|
gst_video::VideoBufferFlags::INTERLACED
|
|
|
|
| gst_video::VideoBufferFlags::TFF,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_0 => {
|
|
|
|
buffer.set_video_flags(
|
|
|
|
gst_video::VideoBufferFlags::INTERLACED
|
|
|
|
| gst_video::VideoBufferFlags::TOP_FIELD,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_1 => {
|
|
|
|
buffer.set_video_flags(
|
|
|
|
gst_video::VideoBufferFlags::INTERLACED
|
|
|
|
| gst_video::VideoBufferFlags::BOTTOM_FIELD,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
_ => (),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(not(feature = "interlaced-fields"))]
|
|
|
|
{
|
|
|
|
if video_frame.frame_format_type()
|
|
|
|
== ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved
|
|
|
|
{
|
|
|
|
buffer.set_video_flags(
|
|
|
|
gst_video::VideoBufferFlags::INTERLACED | gst_video::VideoBufferFlags::TFF,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-30 19:44:31 +00:00
|
|
|
Ok(buffer)
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn copy_video_frame(
|
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
#[allow(unused_variables)] element: &gst::Element,
|
2021-10-01 09:47:43 +00:00
|
|
|
info: &VideoInfo,
|
2019-07-17 16:10:20 +00:00
|
|
|
video_frame: &VideoFrame,
|
2021-09-30 19:44:31 +00:00
|
|
|
) -> Result<gst::Buffer, gst::FlowError> {
|
2021-10-01 09:47:43 +00:00
|
|
|
match info {
|
2022-10-12 18:57:34 +00:00
|
|
|
VideoInfo::Video(ref info) => {
|
2021-10-01 09:47:43 +00:00
|
|
|
let src = video_frame.data().ok_or(gst::FlowError::Error)?;
|
|
|
|
|
|
|
|
let buffer = gst::Buffer::with_size(info.size()).unwrap();
|
|
|
|
let mut vframe = gst_video::VideoFrame::from_buffer_writable(buffer, info).unwrap();
|
|
|
|
|
|
|
|
match info.format() {
|
|
|
|
gst_video::VideoFormat::Uyvy
|
|
|
|
| gst_video::VideoFormat::Bgra
|
|
|
|
| gst_video::VideoFormat::Bgrx
|
|
|
|
| gst_video::VideoFormat::Rgba
|
|
|
|
| gst_video::VideoFormat::Rgbx => {
|
|
|
|
let line_bytes = if info.format() == gst_video::VideoFormat::Uyvy {
|
|
|
|
2 * vframe.width() as usize
|
|
|
|
} else {
|
|
|
|
4 * vframe.width() as usize
|
|
|
|
};
|
2023-04-05 12:56:39 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
let dest_stride = vframe.plane_stride()[0] as usize;
|
|
|
|
let dest = vframe.plane_data_mut(0).unwrap();
|
|
|
|
let src_stride = video_frame.line_stride_or_data_size_in_bytes() as usize;
|
2023-04-05 12:56:39 +00:00
|
|
|
let plane_size = video_frame.yres() as usize * src_stride;
|
|
|
|
|
|
|
|
if src.len() < plane_size || src_stride < line_bytes {
|
|
|
|
gst::error!(CAT, obj: element, "Video packet has wrong stride or size");
|
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Video packet has wrong stride or size"]
|
|
|
|
);
|
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
for (dest, src) in dest
|
|
|
|
.chunks_exact_mut(dest_stride)
|
|
|
|
.zip(src.chunks_exact(src_stride))
|
|
|
|
{
|
2023-04-05 12:56:39 +00:00
|
|
|
dest[..line_bytes].copy_from_slice(&src[..line_bytes]);
|
2021-10-01 09:47:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
gst_video::VideoFormat::Nv12 => {
|
2023-04-05 12:56:39 +00:00
|
|
|
let line_bytes = vframe.width() as usize;
|
|
|
|
let src_stride = video_frame.line_stride_or_data_size_in_bytes() as usize;
|
|
|
|
let plane_size = video_frame.yres() as usize * src_stride;
|
|
|
|
|
|
|
|
if src.len() < 2 * plane_size || src_stride < line_bytes {
|
|
|
|
gst::error!(CAT, obj: element, "Video packet has wrong stride or size");
|
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Video packet has wrong stride or size"]
|
|
|
|
);
|
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
// First plane
|
|
|
|
{
|
|
|
|
let dest_stride = vframe.plane_stride()[0] as usize;
|
|
|
|
let dest = vframe.plane_data_mut(0).unwrap();
|
2023-04-05 12:56:39 +00:00
|
|
|
let src = &src[..plane_size];
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
for (dest, src) in dest
|
|
|
|
.chunks_exact_mut(dest_stride)
|
|
|
|
.zip(src.chunks_exact(src_stride))
|
|
|
|
{
|
2023-04-05 12:56:39 +00:00
|
|
|
dest[..line_bytes].copy_from_slice(&src[..line_bytes]);
|
2021-10-01 09:47:43 +00:00
|
|
|
}
|
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
// Second plane
|
|
|
|
{
|
|
|
|
let dest_stride = vframe.plane_stride()[1] as usize;
|
|
|
|
let dest = vframe.plane_data_mut(1).unwrap();
|
2023-04-05 12:56:39 +00:00
|
|
|
let src = &src[plane_size..];
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
for (dest, src) in dest
|
|
|
|
.chunks_exact_mut(dest_stride)
|
|
|
|
.zip(src.chunks_exact(src_stride))
|
|
|
|
{
|
2023-04-05 12:56:39 +00:00
|
|
|
dest[..line_bytes].copy_from_slice(&src[..line_bytes]);
|
2021-10-01 09:47:43 +00:00
|
|
|
}
|
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
gst_video::VideoFormat::Yv12 | gst_video::VideoFormat::I420 => {
|
2023-04-05 12:56:39 +00:00
|
|
|
let line_bytes = vframe.width() as usize;
|
|
|
|
let line_bytes1 = (line_bytes + 1) / 2;
|
|
|
|
|
|
|
|
let src_stride = video_frame.line_stride_or_data_size_in_bytes() as usize;
|
|
|
|
let src_stride1 = (src_stride + 1) / 2;
|
|
|
|
|
|
|
|
let plane_size = video_frame.yres() as usize * src_stride;
|
|
|
|
let plane_size1 = ((video_frame.yres() as usize + 1) / 2) * src_stride1;
|
|
|
|
|
|
|
|
if src.len() < plane_size + 2 * plane_size1 || src_stride < line_bytes {
|
|
|
|
gst::error!(CAT, obj: element, "Video packet has wrong stride or size");
|
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Video packet has wrong stride or size"]
|
|
|
|
);
|
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
// First plane
|
|
|
|
{
|
|
|
|
let dest_stride = vframe.plane_stride()[0] as usize;
|
|
|
|
let dest = vframe.plane_data_mut(0).unwrap();
|
2023-04-05 12:56:39 +00:00
|
|
|
let src = &src[..plane_size];
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
for (dest, src) in dest
|
|
|
|
.chunks_exact_mut(dest_stride)
|
|
|
|
.zip(src.chunks_exact(src_stride))
|
|
|
|
{
|
2023-04-05 12:56:39 +00:00
|
|
|
dest[..line_bytes].copy_from_slice(&src[..line_bytes]);
|
2021-10-01 09:47:43 +00:00
|
|
|
}
|
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
// Second plane
|
|
|
|
{
|
|
|
|
let dest_stride = vframe.plane_stride()[1] as usize;
|
|
|
|
let dest = vframe.plane_data_mut(1).unwrap();
|
2023-04-05 12:56:39 +00:00
|
|
|
let src = &src[plane_size..][..plane_size1];
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
for (dest, src) in dest
|
|
|
|
.chunks_exact_mut(dest_stride)
|
|
|
|
.zip(src.chunks_exact(src_stride1))
|
|
|
|
{
|
2023-04-05 12:56:39 +00:00
|
|
|
dest[..line_bytes1].copy_from_slice(&src[..line_bytes1]);
|
2021-10-01 09:47:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Third plane
|
|
|
|
{
|
|
|
|
let dest_stride = vframe.plane_stride()[2] as usize;
|
|
|
|
let dest = vframe.plane_data_mut(2).unwrap();
|
2023-04-05 12:56:39 +00:00
|
|
|
let src = &src[plane_size + plane_size1..][..plane_size1];
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
for (dest, src) in dest
|
|
|
|
.chunks_exact_mut(dest_stride)
|
|
|
|
.zip(src.chunks_exact(src_stride1))
|
|
|
|
{
|
2023-04-05 12:56:39 +00:00
|
|
|
dest[..line_bytes1].copy_from_slice(&src[..line_bytes1]);
|
2021-10-01 09:47:43 +00:00
|
|
|
}
|
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
_ => unreachable!(),
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
Ok(vframe.into_buffer())
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
#[cfg(feature = "advanced-sdk")]
|
|
|
|
VideoInfo::SpeedHQInfo { .. } => {
|
|
|
|
let data = video_frame.data().ok_or_else(|| {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::error!(CAT, obj: element, "Video packet has no data");
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Invalid video packet"]
|
|
|
|
);
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
Ok(gst::Buffer::from_mut_slice(Vec::from(data)))
|
|
|
|
}
|
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
VideoInfo::H264 { .. } | VideoInfo::H265 { .. } => {
|
2021-10-01 09:47:43 +00:00
|
|
|
let compressed_packet = video_frame.compressed_packet().ok_or_else(|| {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::error!(
|
2021-10-01 09:47:43 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Video packet doesn't have compressed packet start"
|
|
|
|
);
|
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Invalid video packet"]
|
|
|
|
);
|
|
|
|
|
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
|
|
|
|
|
|
|
let mut buffer = Vec::new();
|
|
|
|
if let Some(extra_data) = compressed_packet.extra_data {
|
|
|
|
buffer.extend_from_slice(extra_data);
|
|
|
|
}
|
|
|
|
buffer.extend_from_slice(compressed_packet.data);
|
|
|
|
let mut buffer = gst::Buffer::from_mut_slice(buffer);
|
|
|
|
if !compressed_packet.key_frame {
|
|
|
|
let buffer = buffer.get_mut().unwrap();
|
|
|
|
buffer.set_flags(gst::BufferFlags::DELTA_UNIT);
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
Ok(buffer)
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
fn create_audio_buffer_and_info(
|
2019-07-17 16:10:20 +00:00
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2021-09-29 10:45:04 +00:00
|
|
|
audio_frame: AudioFrame,
|
|
|
|
) -> Result<Buffer, gst::FlowError> {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(CAT, obj: element, "Received audio frame {:?}", audio_frame);
|
2019-07-19 09:51:06 +00:00
|
|
|
|
2021-09-30 10:37:30 +00:00
|
|
|
let (pts, duration, discont) = self
|
2019-07-19 10:00:27 +00:00
|
|
|
.calculate_audio_timestamp(element, &audio_frame)
|
|
|
|
.ok_or_else(|| {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::debug!(CAT, obj: element, "Flushing, dropping buffer");
|
2021-09-29 10:45:04 +00:00
|
|
|
gst::FlowError::Flushing
|
2019-07-19 10:00:27 +00:00
|
|
|
})?;
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2019-07-17 16:10:20 +00:00
|
|
|
let info = self.create_audio_info(element, &audio_frame)?;
|
|
|
|
|
2021-09-30 19:44:31 +00:00
|
|
|
let mut buffer = self.create_audio_buffer(element, pts, duration, &info, &audio_frame)?;
|
2021-09-30 10:37:30 +00:00
|
|
|
if discont {
|
|
|
|
buffer
|
|
|
|
.get_mut()
|
|
|
|
.unwrap()
|
|
|
|
.set_flags(gst::BufferFlags::RESYNC);
|
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::log!(CAT, obj: element, "Produced audio buffer {:?}", buffer);
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-09-29 10:45:04 +00:00
|
|
|
Ok(Buffer::Audio(buffer, info))
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn calculate_audio_timestamp(
|
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2019-07-17 16:10:20 +00:00
|
|
|
audio_frame: &AudioFrame,
|
2021-09-30 10:37:30 +00:00
|
|
|
) -> Option<(gst::ClockTime, Option<gst::ClockTime>, bool)> {
|
2021-09-13 09:26:56 +00:00
|
|
|
let duration = gst::ClockTime::SECOND.mul_div_floor(
|
|
|
|
audio_frame.no_samples() as u64,
|
|
|
|
audio_frame.sample_rate() as u64,
|
|
|
|
);
|
2019-07-18 21:05:31 +00:00
|
|
|
|
2019-07-19 09:51:06 +00:00
|
|
|
self.calculate_timestamp(
|
|
|
|
element,
|
2022-06-06 12:48:12 +00:00
|
|
|
true,
|
2019-07-19 09:51:06 +00:00
|
|
|
audio_frame.timestamp(),
|
|
|
|
audio_frame.timecode(),
|
2019-07-18 21:05:31 +00:00
|
|
|
duration,
|
2019-07-19 09:51:06 +00:00
|
|
|
)
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn create_audio_info(
|
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
element: &gst::Element,
|
2019-07-17 16:10:20 +00:00
|
|
|
audio_frame: &AudioFrame,
|
2021-10-01 09:47:43 +00:00
|
|
|
) -> Result<AudioInfo, gst::FlowError> {
|
|
|
|
let fourcc = audio_frame.fourcc();
|
|
|
|
|
|
|
|
if [NDIlib_FourCC_audio_type_FLTp].contains(&fourcc) {
|
2022-11-28 15:06:07 +00:00
|
|
|
let channels = audio_frame.no_channels() as u32;
|
|
|
|
let mut positions = [gst_audio::AudioChannelPosition::None; 64];
|
2023-10-11 08:12:19 +00:00
|
|
|
if channels <= 8 {
|
|
|
|
let _ = gst_audio::AudioChannelPosition::positions_from_mask(
|
|
|
|
gst_audio::AudioChannelPosition::fallback_mask(channels),
|
|
|
|
&mut positions[..channels as usize],
|
|
|
|
);
|
|
|
|
}
|
2022-11-28 15:06:07 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
let builder = gst_audio::AudioInfo::builder(
|
|
|
|
gst_audio::AUDIO_FORMAT_F32,
|
|
|
|
audio_frame.sample_rate() as u32,
|
2022-11-28 15:06:07 +00:00
|
|
|
channels,
|
|
|
|
)
|
2022-11-29 09:59:02 +00:00
|
|
|
.positions(&positions[..channels as usize]);
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
let info = builder.build().map_err(|_| {
|
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Invalid audio format configuration"]
|
|
|
|
);
|
|
|
|
|
|
|
|
gst::FlowError::NotNegotiated
|
|
|
|
})?;
|
|
|
|
|
2022-10-12 18:57:34 +00:00
|
|
|
return Ok(AudioInfo::Audio(info));
|
2021-09-30 19:44:31 +00:00
|
|
|
}
|
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
#[cfg(feature = "advanced-sdk")]
|
|
|
|
if [NDIlib_FourCC_audio_type_AAC].contains(&fourcc) {
|
|
|
|
let compressed_packet = audio_frame.compressed_packet().ok_or_else(|| {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::error!(
|
2021-10-01 09:47:43 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Audio packet doesn't have compressed packet start"
|
|
|
|
);
|
|
|
|
gst::element_error!(element, gst::StreamError::Format, ["Invalid audio packet"]);
|
|
|
|
|
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
|
|
|
|
|
|
|
if compressed_packet.fourcc != NDIlib_compressed_FourCC_type_AAC {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::error!(CAT, obj: element, "Non-AAC audio packet");
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::element_error!(element, gst::StreamError::Format, ["Invalid audio packet"]);
|
|
|
|
|
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
|
2022-10-12 18:57:34 +00:00
|
|
|
return Ok(AudioInfo::Aac {
|
2021-10-01 09:47:43 +00:00
|
|
|
sample_rate: audio_frame.sample_rate(),
|
|
|
|
no_channels: audio_frame.no_channels(),
|
|
|
|
codec_data: compressed_packet
|
|
|
|
.extra_data
|
|
|
|
.ok_or(gst::FlowError::NotNegotiated)?
|
|
|
|
.try_into()
|
|
|
|
.map_err(|_| gst::FlowError::NotNegotiated)?,
|
|
|
|
});
|
|
|
|
}
|
2019-07-19 09:51:06 +00:00
|
|
|
|
2023-08-22 07:36:09 +00:00
|
|
|
// FIXME: Needs testing with an actual stream to understand how it works
|
|
|
|
// #[cfg(feature = "advanced-sdk")]
|
|
|
|
// if [NDIlib_FourCC_audio_type_Opus].contains(&fourcc) {}
|
2021-09-30 19:44:31 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Unsupported audio fourcc {:08x}", audio_frame.fourcc()]
|
|
|
|
);
|
|
|
|
Err(gst::FlowError::NotNegotiated)
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn create_audio_buffer(
|
|
|
|
&self,
|
2022-10-13 08:51:55 +00:00
|
|
|
#[allow(unused_variables)] element: &gst::Element,
|
2019-07-17 16:10:20 +00:00
|
|
|
pts: gst::ClockTime,
|
2021-09-13 09:26:56 +00:00
|
|
|
duration: Option<gst::ClockTime>,
|
2021-10-01 09:47:43 +00:00
|
|
|
info: &AudioInfo,
|
2019-07-17 16:10:20 +00:00
|
|
|
audio_frame: &AudioFrame,
|
2021-09-30 19:44:31 +00:00
|
|
|
) -> Result<gst::Buffer, gst::FlowError> {
|
2021-10-01 09:47:43 +00:00
|
|
|
match info {
|
2022-10-12 18:57:34 +00:00
|
|
|
AudioInfo::Audio(ref info) => {
|
2021-10-01 09:47:43 +00:00
|
|
|
let src = audio_frame.data().ok_or(gst::FlowError::Error)?;
|
|
|
|
let buff_size = (audio_frame.no_samples() as u32 * info.bpf()) as usize;
|
2021-09-30 19:44:31 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
let mut buffer = gst::Buffer::with_size(buff_size).unwrap();
|
|
|
|
{
|
|
|
|
let buffer = buffer.get_mut().unwrap();
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
buffer.set_pts(pts);
|
|
|
|
buffer.set_duration(duration);
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2022-10-12 16:39:33 +00:00
|
|
|
gst::ReferenceTimestampMeta::add(
|
|
|
|
buffer,
|
2022-11-01 08:27:48 +00:00
|
|
|
&crate::TIMECODE_CAPS,
|
2022-10-17 17:48:43 +00:00
|
|
|
(audio_frame.timecode() as u64 * 100).nseconds(),
|
2022-10-12 16:39:33 +00:00
|
|
|
gst::ClockTime::NONE,
|
|
|
|
);
|
|
|
|
if audio_frame.timestamp() != ndisys::NDIlib_recv_timestamp_undefined {
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::ReferenceTimestampMeta::add(
|
|
|
|
buffer,
|
2022-11-01 08:27:48 +00:00
|
|
|
&crate::TIMESTAMP_CAPS,
|
2022-10-17 17:48:43 +00:00
|
|
|
(audio_frame.timestamp() as u64 * 100).nseconds(),
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::ClockTime::NONE,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut dest = buffer.map_writable().unwrap();
|
|
|
|
let dest = dest
|
|
|
|
.as_mut_slice_of::<f32>()
|
|
|
|
.map_err(|_| gst::FlowError::NotNegotiated)?;
|
|
|
|
assert!(
|
|
|
|
dest.len()
|
|
|
|
== audio_frame.no_samples() as usize
|
|
|
|
* audio_frame.no_channels() as usize
|
2019-07-17 16:10:20 +00:00
|
|
|
);
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
for (channel, samples) in src
|
|
|
|
.chunks_exact(audio_frame.channel_stride_or_data_size_in_bytes() as usize)
|
|
|
|
.enumerate()
|
|
|
|
{
|
|
|
|
let samples = samples
|
|
|
|
.as_slice_of::<f32>()
|
|
|
|
.map_err(|_| gst::FlowError::NotNegotiated)?;
|
|
|
|
|
|
|
|
for (i, sample) in samples[..audio_frame.no_samples() as usize]
|
|
|
|
.iter()
|
|
|
|
.enumerate()
|
|
|
|
{
|
|
|
|
dest[i * (audio_frame.no_channels() as usize) + channel] = *sample;
|
|
|
|
}
|
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
|
|
|
|
Ok(buffer)
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
2021-10-01 09:47:43 +00:00
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
AudioInfo::Opus { .. } => {
|
2021-10-01 09:47:43 +00:00
|
|
|
let data = audio_frame.data().ok_or_else(|| {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::error!(CAT, obj: element, "Audio packet has no data");
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Invalid audio packet"]
|
|
|
|
);
|
2019-07-17 16:10:20 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
2021-09-30 19:44:31 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
Ok(gst::Buffer::from_mut_slice(Vec::from(data)))
|
|
|
|
}
|
|
|
|
#[cfg(feature = "advanced-sdk")]
|
2022-10-12 18:57:34 +00:00
|
|
|
AudioInfo::Aac { .. } => {
|
2021-10-01 09:47:43 +00:00
|
|
|
let compressed_packet = audio_frame.compressed_packet().ok_or_else(|| {
|
2022-10-13 08:51:55 +00:00
|
|
|
gst::error!(
|
2021-10-01 09:47:43 +00:00
|
|
|
CAT,
|
|
|
|
obj: element,
|
|
|
|
"Audio packet doesn't have compressed packet start"
|
|
|
|
);
|
|
|
|
gst::element_error!(
|
|
|
|
element,
|
|
|
|
gst::StreamError::Format,
|
|
|
|
["Invalid audio packet"]
|
|
|
|
);
|
2021-09-30 19:44:31 +00:00
|
|
|
|
2021-10-01 09:47:43 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
|
|
|
|
|
|
|
Ok(gst::Buffer::from_mut_slice(Vec::from(
|
|
|
|
compressed_packet.data,
|
|
|
|
)))
|
2021-09-30 19:44:31 +00:00
|
|
|
}
|
2019-07-17 16:10:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|