mirror of
https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs.git
synced 2025-04-28 07:55:22 +00:00
Merge pull request #21 from o-reo/master
FIX changed return for gstreamer functions, fixed NDI frames memory leaks
This commit is contained in:
commit
fb56cdc87c
3 changed files with 86 additions and 75 deletions
|
@ -46,7 +46,7 @@ impl Default for Settings {
|
||||||
}
|
}
|
||||||
|
|
||||||
static PROPERTIES: [subclass::Property; 3] = [
|
static PROPERTIES: [subclass::Property; 3] = [
|
||||||
subclass::Property("stream-name", || {
|
subclass::Property("stream-name", |_| {
|
||||||
glib::ParamSpec::string(
|
glib::ParamSpec::string(
|
||||||
"stream-name",
|
"stream-name",
|
||||||
"Sream Name",
|
"Sream Name",
|
||||||
|
@ -55,7 +55,7 @@ subclass::Property("stream-name", || {
|
||||||
glib::ParamFlags::READWRITE,
|
glib::ParamFlags::READWRITE,
|
||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
subclass::Property("ip", || {
|
subclass::Property("ip", |_| {
|
||||||
glib::ParamSpec::string(
|
glib::ParamSpec::string(
|
||||||
"ip",
|
"ip",
|
||||||
"Stream IP",
|
"Stream IP",
|
||||||
|
@ -64,7 +64,7 @@ subclass::Property("ip", || {
|
||||||
glib::ParamFlags::READWRITE,
|
glib::ParamFlags::READWRITE,
|
||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
subclass::Property("loss-threshold", || {
|
subclass::Property("loss-threshold", |_| {
|
||||||
glib::ParamSpec::uint(
|
glib::ParamSpec::uint(
|
||||||
"loss-threshold",
|
"loss-threshold",
|
||||||
"Loss threshold",
|
"Loss threshold",
|
||||||
|
@ -152,7 +152,8 @@ impl ObjectSubclass for NdiAudioSrc {
|
||||||
gst::PadDirection::Src,
|
gst::PadDirection::Src,
|
||||||
gst::PadPresence::Always,
|
gst::PadPresence::Always,
|
||||||
&caps,
|
&caps,
|
||||||
);
|
)
|
||||||
|
.unwrap();
|
||||||
klass.add_pad_template(src_pad_template);
|
klass.add_pad_template(src_pad_template);
|
||||||
|
|
||||||
klass.install_properties(&PROPERTIES);
|
klass.install_properties(&PROPERTIES);
|
||||||
|
@ -246,7 +247,7 @@ impl ObjectSubclass for NdiAudioSrc {
|
||||||
&self,
|
&self,
|
||||||
element: &gst::Element,
|
element: &gst::Element,
|
||||||
transition: gst::StateChange,
|
transition: gst::StateChange,
|
||||||
) -> gst::StateChangeReturn {
|
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
|
||||||
if transition == gst::StateChange::PausedToPlaying {
|
if transition == gst::StateChange::PausedToPlaying {
|
||||||
let mut receivers = hashmap_receivers.lock().unwrap();
|
let mut receivers = hashmap_receivers.lock().unwrap();
|
||||||
let settings = self.settings.lock().unwrap();
|
let settings = self.settings.lock().unwrap();
|
||||||
|
@ -257,35 +258,29 @@ impl ObjectSubclass for NdiAudioSrc {
|
||||||
|
|
||||||
let audio_frame: NDIlib_audio_frame_v2_t = Default::default();
|
let audio_frame: NDIlib_audio_frame_v2_t = Default::default();
|
||||||
|
|
||||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
|
||||||
unsafe {
|
unsafe {
|
||||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_audio {
|
while NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000)
|
||||||
frame_type = NDIlib_recv_capture_v2(
|
!= NDIlib_frame_type_e::NDIlib_frame_type_audio {}
|
||||||
pNDI_recv,
|
}
|
||||||
ptr::null(),
|
gst_debug!(self.cat, obj: element, "NDI audio frame received: {:?}", audio_frame);
|
||||||
&audio_frame,
|
|
||||||
ptr::null(),
|
if receiver.initial_timestamp <= audio_frame.timestamp as u64
|
||||||
1000,
|
|| receiver.initial_timestamp == 0 {
|
||||||
);
|
receiver.initial_timestamp = audio_frame.timestamp as u64;
|
||||||
gst_debug!(self.cat, obj: element, "NDI audio frame received: {:?}", audio_frame);
|
}
|
||||||
}
|
unsafe {
|
||||||
|
NDIlib_recv_free_audio_v2(pNDI_recv, &audio_frame);
|
||||||
if receiver.initial_timestamp <= audio_frame.timestamp as u64
|
}
|
||||||
|| receiver.initial_timestamp == 0
|
gst_debug!(self.cat, obj: element, "Setting initial timestamp to {}", receiver.initial_timestamp);
|
||||||
{
|
|
||||||
receiver.initial_timestamp = audio_frame.timestamp as u64;
|
|
||||||
}
|
|
||||||
gst_debug!(self.cat, obj: element, "Setting initial timestamp to {}", receiver.initial_timestamp);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
self.parent_change_state(element, transition)
|
self.parent_change_state(element, transition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BaseSrcImpl for NdiAudioSrc {
|
impl BaseSrcImpl for NdiAudioSrc {
|
||||||
fn set_caps(&self, element: &gst_base::BaseSrc, caps: &gst::CapsRef) -> bool {
|
fn set_caps(&self, element: &gst_base::BaseSrc, caps: &gst::CapsRef) -> Result<(), gst::LoggableError> {
|
||||||
let info = match gst_audio::AudioInfo::from_caps(caps) {
|
let info = match gst_audio::AudioInfo::from_caps(caps) {
|
||||||
None => return false,
|
None => return Err(gst_loggable_error!(self.cat, "Failed to build `AudioInfo` from caps {}", caps)),
|
||||||
Some(info) => info,
|
Some(info) => info,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -294,10 +289,10 @@ impl ObjectSubclass for NdiAudioSrc {
|
||||||
let mut state = self.state.lock().unwrap();
|
let mut state = self.state.lock().unwrap();
|
||||||
state.info = Some(info);
|
state.info = Some(info);
|
||||||
|
|
||||||
true
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start(&self, element: &gst_base::BaseSrc) -> bool {
|
fn start(&self, element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
|
||||||
*self.state.lock().unwrap() = Default::default();
|
*self.state.lock().unwrap() = Default::default();
|
||||||
|
|
||||||
let mut settings = self.settings.lock().unwrap();
|
let mut settings = self.settings.lock().unwrap();
|
||||||
|
@ -308,17 +303,23 @@ impl ObjectSubclass for NdiAudioSrc {
|
||||||
&settings.stream_name.clone(),
|
&settings.stream_name.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
settings.id_receiver != 0
|
match settings.id_receiver {
|
||||||
|
0 => Err(gst_error_msg!(
|
||||||
|
gst::ResourceError::NotFound,
|
||||||
|
["Could not connect to this source"]
|
||||||
|
)),
|
||||||
|
_ => Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn stop(&self, element: &gst_base::BaseSrc) -> bool {
|
fn stop(&self, element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
|
||||||
*self.state.lock().unwrap() = Default::default();
|
*self.state.lock().unwrap() = Default::default();
|
||||||
|
|
||||||
let settings = self.settings.lock().unwrap();
|
let settings = self.settings.lock().unwrap();
|
||||||
stop_ndi(self.cat, element, settings.id_receiver);
|
stop_ndi(self.cat, element, settings.id_receiver);
|
||||||
// Commented because when adding ndi destroy stopped in this line
|
// Commented because when adding ndi destroy stopped in this line
|
||||||
//*self.state.lock().unwrap() = Default::default();
|
//*self.state.lock().unwrap() = Default::default();
|
||||||
true
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn query(&self, element: &gst_base::BaseSrc, query: &mut gst::QueryRef) -> bool {
|
fn query(&self, element: &gst_base::BaseSrc, query: &mut gst::QueryRef) -> bool {
|
||||||
|
@ -341,7 +342,7 @@ impl ObjectSubclass for NdiAudioSrc {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
BaseSrcImpl::parent_query(self, element, query)
|
BaseSrcImplExt::parent_query(self, element, query)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fixate(&self, element: &gst_base::BaseSrc, caps: gst::Caps) -> gst::Caps {
|
fn fixate(&self, element: &gst_base::BaseSrc, caps: gst::Caps) -> gst::Caps {
|
||||||
|
@ -355,13 +356,9 @@ impl ObjectSubclass for NdiAudioSrc {
|
||||||
|
|
||||||
let audio_frame: NDIlib_audio_frame_v2_t = Default::default();
|
let audio_frame: NDIlib_audio_frame_v2_t = Default::default();
|
||||||
|
|
||||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
unsafe {
|
||||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_audio {
|
while NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000)
|
||||||
unsafe {
|
!= NDIlib_frame_type_e::NDIlib_frame_type_audio {}
|
||||||
frame_type =
|
|
||||||
NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000);
|
|
||||||
gst_debug!(self.cat, obj: element, "NDI audio frame received: {:?}", audio_frame);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let no_samples = audio_frame.no_samples as u64;
|
let no_samples = audio_frame.no_samples as u64;
|
||||||
|
@ -379,6 +376,10 @@ impl ObjectSubclass for NdiAudioSrc {
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
||||||
|
unsafe {
|
||||||
|
NDIlib_recv_free_audio_v2(pNDI_recv, &audio_frame);
|
||||||
|
}
|
||||||
|
|
||||||
self.parent_fixate(element, caps)
|
self.parent_fixate(element, caps)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -431,8 +432,9 @@ impl ObjectSubclass for NdiAudioSrc {
|
||||||
let buffer = gst::Buffer::with_size(0).unwrap();
|
let buffer = gst::Buffer::with_size(0).unwrap();
|
||||||
return Ok(buffer)
|
return Ok(buffer)
|
||||||
}
|
}
|
||||||
|
|
||||||
if time >= (audio_frame.timestamp as u64) {
|
if time >= (audio_frame.timestamp as u64) {
|
||||||
|
NDIlib_recv_free_audio_v2(pNDI_recv, &audio_frame);
|
||||||
gst_debug!(self.cat, obj: element, "Frame timestamp ({:?}) is lower than received in the first frame from NDI ({:?}), so skiping...", (audio_frame.timestamp as u64), time);
|
gst_debug!(self.cat, obj: element, "Frame timestamp ({:?}) is lower than received in the first frame from NDI ({:?}), so skiping...", (audio_frame.timestamp as u64), time);
|
||||||
} else {
|
} else {
|
||||||
skip_frame = false;
|
skip_frame = false;
|
||||||
|
@ -474,6 +476,7 @@ impl ObjectSubclass for NdiAudioSrc {
|
||||||
dst.reference_level = 0;
|
dst.reference_level = 0;
|
||||||
dst.p_data = buffer.map_writable().unwrap().as_mut_slice_of::<i16>().unwrap().as_mut_ptr();
|
dst.p_data = buffer.map_writable().unwrap().as_mut_slice_of::<i16>().unwrap().as_mut_ptr();
|
||||||
NDIlib_util_audio_to_interleaved_16s_v2(&audio_frame, &mut dst);
|
NDIlib_util_audio_to_interleaved_16s_v2(&audio_frame, &mut dst);
|
||||||
|
NDIlib_recv_free_audio_v2(pNDI_recv, &audio_frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
gst_log!(self.cat, obj: element, "Produced buffer {:?}", buffer);
|
gst_log!(self.cat, obj: element, "Produced buffer {:?}", buffer);
|
||||||
|
|
|
@ -33,6 +33,14 @@ extern "C" {
|
||||||
p_metadata: *const NDIlib_metadata_frame_t,
|
p_metadata: *const NDIlib_metadata_frame_t,
|
||||||
timeout_in_ms: u32,
|
timeout_in_ms: u32,
|
||||||
) -> NDIlib_frame_type_e;
|
) -> NDIlib_frame_type_e;
|
||||||
|
pub fn NDIlib_recv_free_video_v2(
|
||||||
|
p_instance: NDIlib_recv_instance_t,
|
||||||
|
p_video_data: *const NDIlib_video_frame_v2_t
|
||||||
|
);
|
||||||
|
pub fn NDIlib_recv_free_audio_v2(
|
||||||
|
p_instance: NDIlib_recv_instance_t,
|
||||||
|
p_audio_data: *const NDIlib_audio_frame_v2_t
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type NDIlib_find_instance_t = *mut ::std::os::raw::c_void;
|
pub type NDIlib_find_instance_t = *mut ::std::os::raw::c_void;
|
||||||
|
|
|
@ -47,7 +47,7 @@ impl Default for Settings {
|
||||||
}
|
}
|
||||||
|
|
||||||
static PROPERTIES: [subclass::Property; 3] = [
|
static PROPERTIES: [subclass::Property; 3] = [
|
||||||
subclass::Property("stream-name", || {
|
subclass::Property("stream-name", |_| {
|
||||||
glib::ParamSpec::string(
|
glib::ParamSpec::string(
|
||||||
"stream-name",
|
"stream-name",
|
||||||
"Stream Name",
|
"Stream Name",
|
||||||
|
@ -56,7 +56,7 @@ subclass::Property("stream-name", || {
|
||||||
glib::ParamFlags::READWRITE,
|
glib::ParamFlags::READWRITE,
|
||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
subclass::Property("ip", || {
|
subclass::Property("ip", |_| {
|
||||||
glib::ParamSpec::string(
|
glib::ParamSpec::string(
|
||||||
"ip",
|
"ip",
|
||||||
"Stream IP",
|
"Stream IP",
|
||||||
|
@ -65,7 +65,7 @@ subclass::Property("ip", || {
|
||||||
glib::ParamFlags::READWRITE,
|
glib::ParamFlags::READWRITE,
|
||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
subclass::Property("loss-threshold", || {
|
subclass::Property("loss-threshold", |_| {
|
||||||
glib::ParamSpec::uint(
|
glib::ParamSpec::uint(
|
||||||
"loss-threshold",
|
"loss-threshold",
|
||||||
"Loss threshold",
|
"Loss threshold",
|
||||||
|
@ -160,7 +160,7 @@ impl ObjectSubclass for NdiVideoSrc {
|
||||||
gst::PadDirection::Src,
|
gst::PadDirection::Src,
|
||||||
gst::PadPresence::Always,
|
gst::PadPresence::Always,
|
||||||
&caps,
|
&caps,
|
||||||
);
|
).unwrap();
|
||||||
klass.add_pad_template(src_pad_template);
|
klass.add_pad_template(src_pad_template);
|
||||||
|
|
||||||
klass.install_properties(&PROPERTIES);
|
klass.install_properties(&PROPERTIES);
|
||||||
|
@ -256,7 +256,7 @@ impl ObjectSubclass for NdiVideoSrc {
|
||||||
&self,
|
&self,
|
||||||
element: &gst::Element,
|
element: &gst::Element,
|
||||||
transition: gst::StateChange,
|
transition: gst::StateChange,
|
||||||
) -> gst::StateChangeReturn {
|
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
|
||||||
if transition == gst::StateChange::PausedToPlaying {
|
if transition == gst::StateChange::PausedToPlaying {
|
||||||
let mut receivers = hashmap_receivers.lock().unwrap();
|
let mut receivers = hashmap_receivers.lock().unwrap();
|
||||||
let settings = self.settings.lock().unwrap();
|
let settings = self.settings.lock().unwrap();
|
||||||
|
@ -267,35 +267,30 @@ impl ObjectSubclass for NdiVideoSrc {
|
||||||
|
|
||||||
let video_frame: NDIlib_video_frame_v2_t = Default::default();
|
let video_frame: NDIlib_video_frame_v2_t = Default::default();
|
||||||
|
|
||||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
|
||||||
unsafe {
|
unsafe {
|
||||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_video {
|
while NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000)
|
||||||
frame_type = NDIlib_recv_capture_v2(
|
!= NDIlib_frame_type_e::NDIlib_frame_type_video {}
|
||||||
pNDI_recv,
|
}
|
||||||
&video_frame,
|
gst_debug!(self.cat, obj: element, "NDI video frame received: {:?}", video_frame);
|
||||||
ptr::null(),
|
|
||||||
ptr::null(),
|
|
||||||
1000,
|
|
||||||
);
|
|
||||||
gst_debug!(self.cat, obj: element, "NDI video frame received: {:?}", video_frame);
|
|
||||||
}
|
|
||||||
|
|
||||||
if receiver.initial_timestamp <= video_frame.timestamp as u64
|
if receiver.initial_timestamp <= video_frame.timestamp as u64
|
||||||
|| receiver.initial_timestamp == 0
|
|| receiver.initial_timestamp == 0
|
||||||
{
|
{
|
||||||
receiver.initial_timestamp = video_frame.timestamp as u64;
|
receiver.initial_timestamp = video_frame.timestamp as u64;
|
||||||
}
|
}
|
||||||
|
unsafe {
|
||||||
|
NDIlib_recv_free_video_v2(pNDI_recv, &video_frame);
|
||||||
|
}
|
||||||
gst_debug!(self.cat, obj: element, "Setting initial timestamp to {}", receiver.initial_timestamp);
|
gst_debug!(self.cat, obj: element, "Setting initial timestamp to {}", receiver.initial_timestamp);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
self.parent_change_state(element, transition)
|
self.parent_change_state(element, transition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BaseSrcImpl for NdiVideoSrc {
|
impl BaseSrcImpl for NdiVideoSrc {
|
||||||
fn set_caps(&self, element: &gst_base::BaseSrc, caps: &gst::CapsRef) -> bool {
|
fn set_caps(&self, element: &gst_base::BaseSrc, caps: &gst::CapsRef) -> Result<(), gst::LoggableError> {
|
||||||
let info = match gst_video::VideoInfo::from_caps(caps) {
|
let info = match gst_video::VideoInfo::from_caps(caps) {
|
||||||
None => return false,
|
None => return Err(gst_loggable_error!(self.cat, "Failed to build `VideoInfo` from caps {}", caps)),
|
||||||
Some(info) => info,
|
Some(info) => info,
|
||||||
};
|
};
|
||||||
gst_debug!(self.cat, obj: element, "Configuring for caps {}", caps);
|
gst_debug!(self.cat, obj: element, "Configuring for caps {}", caps);
|
||||||
|
@ -303,10 +298,10 @@ impl ObjectSubclass for NdiVideoSrc {
|
||||||
let mut state = self.state.lock().unwrap();
|
let mut state = self.state.lock().unwrap();
|
||||||
state.info = Some(info);
|
state.info = Some(info);
|
||||||
let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
||||||
true
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start(&self, element: &gst_base::BaseSrc) -> bool {
|
fn start(&self, element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
|
||||||
*self.state.lock().unwrap() = Default::default();
|
*self.state.lock().unwrap() = Default::default();
|
||||||
let mut settings = self.settings.lock().unwrap();
|
let mut settings = self.settings.lock().unwrap();
|
||||||
settings.id_receiver = connect_ndi(
|
settings.id_receiver = connect_ndi(
|
||||||
|
@ -316,17 +311,24 @@ impl ObjectSubclass for NdiVideoSrc {
|
||||||
&settings.stream_name.clone(),
|
&settings.stream_name.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
settings.id_receiver != 0
|
// settings.id_receiver != 0
|
||||||
|
match settings.id_receiver {
|
||||||
|
0 => Err(gst_error_msg!(
|
||||||
|
gst::ResourceError::NotFound,
|
||||||
|
["Could not connect to this source"]
|
||||||
|
)),
|
||||||
|
_ => Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn stop(&self, element: &gst_base::BaseSrc) -> bool {
|
fn stop(&self, element: &gst_base::BaseSrc) -> Result<(), gst::ErrorMessage> {
|
||||||
*self.state.lock().unwrap() = Default::default();
|
*self.state.lock().unwrap() = Default::default();
|
||||||
|
|
||||||
let settings = self.settings.lock().unwrap();
|
let settings = self.settings.lock().unwrap();
|
||||||
stop_ndi(self.cat, element, settings.id_receiver);
|
stop_ndi(self.cat, element, settings.id_receiver);
|
||||||
// Commented because when adding ndi destroy stopped in this line
|
// Commented because when adding ndi destroy stopped in this line
|
||||||
//*self.state.lock().unwrap() = Default::default();
|
//*self.state.lock().unwrap() = Default::default();
|
||||||
true
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn query(&self, element: &gst_base::BaseSrc, query: &mut gst::QueryRef) -> bool {
|
fn query(&self, element: &gst_base::BaseSrc, query: &mut gst::QueryRef) -> bool {
|
||||||
|
@ -349,7 +351,7 @@ impl ObjectSubclass for NdiVideoSrc {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
BaseSrcImpl::parent_query(self, element, query)
|
BaseSrcImplExt::parent_query(self, element, query)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fixate(&self, element: &gst_base::BaseSrc, caps: gst::Caps) -> gst::Caps {
|
fn fixate(&self, element: &gst_base::BaseSrc, caps: gst::Caps) -> gst::Caps {
|
||||||
|
@ -362,15 +364,10 @@ impl ObjectSubclass for NdiVideoSrc {
|
||||||
|
|
||||||
let video_frame: NDIlib_video_frame_v2_t = Default::default();
|
let video_frame: NDIlib_video_frame_v2_t = Default::default();
|
||||||
|
|
||||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
unsafe {
|
||||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_video {
|
while NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000)
|
||||||
unsafe {
|
!= NDIlib_frame_type_e::NDIlib_frame_type_video {}
|
||||||
frame_type =
|
|
||||||
NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000);
|
|
||||||
gst_debug!(self.cat, obj: element, "NDI video frame received: {:?}", video_frame);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
settings.latency = gst::SECOND.mul_div_floor(
|
settings.latency = gst::SECOND.mul_div_floor(
|
||||||
video_frame.frame_rate_D as u64,
|
video_frame.frame_rate_D as u64,
|
||||||
video_frame.frame_rate_N as u64,
|
video_frame.frame_rate_N as u64,
|
||||||
|
@ -387,7 +384,9 @@ impl ObjectSubclass for NdiVideoSrc {
|
||||||
Fraction::new(video_frame.frame_rate_N, video_frame.frame_rate_D),
|
Fraction::new(video_frame.frame_rate_N, video_frame.frame_rate_D),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
unsafe {
|
||||||
|
NDIlib_recv_free_video_v2(pNDI_recv, &video_frame);
|
||||||
|
}
|
||||||
let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
||||||
self.parent_fixate(element, caps)
|
self.parent_fixate(element, caps)
|
||||||
}
|
}
|
||||||
|
@ -443,6 +442,7 @@ impl ObjectSubclass for NdiVideoSrc {
|
||||||
}
|
}
|
||||||
|
|
||||||
if time >= (video_frame.timestamp as u64) {
|
if time >= (video_frame.timestamp as u64) {
|
||||||
|
NDIlib_recv_free_video_v2(pNDI_recv, &video_frame);
|
||||||
gst_debug!(self.cat, obj: element, "Frame timestamp ({:?}) is lower than received in the first frame from NDI ({:?}), so skiping...", (video_frame.timestamp as u64), time);
|
gst_debug!(self.cat, obj: element, "Frame timestamp ({:?}) is lower than received in the first frame from NDI ({:?}), so skiping...", (video_frame.timestamp as u64), time);
|
||||||
} else {
|
} else {
|
||||||
skip_frame = false;
|
skip_frame = false;
|
||||||
|
|
Loading…
Reference in a new issue