mirror of
https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs.git
synced 2025-01-19 07:35:46 +00:00
Fix code style with cargo fmt
This commit is contained in:
parent
6db4929e08
commit
cf8b7db9bd
4 changed files with 816 additions and 747 deletions
|
@ -22,14 +22,14 @@ extern crate gstreamer_video as gst_video;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
|
|
||||||
mod ndivideosrc;
|
|
||||||
mod ndiaudiosrc;
|
mod ndiaudiosrc;
|
||||||
pub mod ndisys;
|
pub mod ndisys;
|
||||||
|
mod ndivideosrc;
|
||||||
|
|
||||||
use std::{thread, time};
|
|
||||||
use std::ffi::{CStr, CString};
|
|
||||||
use ndisys::*;
|
|
||||||
use gst_plugin::base_src::*;
|
use gst_plugin::base_src::*;
|
||||||
|
use ndisys::*;
|
||||||
|
use std::ffi::{CStr, CString};
|
||||||
|
use std::{thread, time};
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
|
@ -82,9 +82,7 @@ fn connect_ndi(cat: gst::DebugCategory , element: &BaseSrc, ip: String, stream
|
||||||
//FIXME Search for another way to know if the source is an audio or a video source
|
//FIXME Search for another way to know if the source is an audio or a video source
|
||||||
if element.get_name().contains("audiosrc") {
|
if element.get_name().contains("audiosrc") {
|
||||||
audio = true;
|
audio = true;
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
video = true;
|
video = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,23 +90,23 @@ fn connect_ndi(cat: gst::DebugCategory , element: &BaseSrc, ip: String, stream
|
||||||
if val.ip == ip || val.stream_name == stream_name {
|
if val.ip == ip || val.stream_name == stream_name {
|
||||||
if (val.audio && val.video) || (val.audio && audio) || (val.video && video) {
|
if (val.audio && val.video) || (val.audio && audio) || (val.video && video) {
|
||||||
continue;
|
continue;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
if video {
|
if video {
|
||||||
val.video = video;
|
val.video = video;
|
||||||
}
|
} else {
|
||||||
else{
|
|
||||||
val.audio = audio;
|
val.audio = audio;
|
||||||
}
|
}
|
||||||
return val.id;
|
return val.id;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
unsafe {
|
unsafe {
|
||||||
|
|
||||||
if !NDIlib_initialize() {
|
if !NDIlib_initialize() {
|
||||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Cannot run NDI: NDIlib_initialize error"]);
|
gst_element_error!(
|
||||||
|
element,
|
||||||
|
gst::CoreError::Negotiation,
|
||||||
|
["Cannot run NDI: NDIlib_initialize error"]
|
||||||
|
);
|
||||||
// return false;
|
// return false;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -118,7 +116,11 @@ fn connect_ndi(cat: gst::DebugCategory , element: &BaseSrc, ip: String, stream
|
||||||
let pNDI_find = NDIlib_find_create_v2(&NDI_find_create_desc);
|
let pNDI_find = NDIlib_find_create_v2(&NDI_find_create_desc);
|
||||||
//let ip_ptr = CString::new(ip.clone()).unwrap();
|
//let ip_ptr = CString::new(ip.clone()).unwrap();
|
||||||
if pNDI_find.is_null() {
|
if pNDI_find.is_null() {
|
||||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Cannot run NDI: NDIlib_find_create_v2 error"]);
|
gst_element_error!(
|
||||||
|
element,
|
||||||
|
gst::CoreError::Negotiation,
|
||||||
|
["Cannot run NDI: NDIlib_find_create_v2 error"]
|
||||||
|
);
|
||||||
// return false;
|
// return false;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -132,15 +134,26 @@ fn connect_ndi(cat: gst::DebugCategory , element: &BaseSrc, ip: String, stream
|
||||||
|
|
||||||
// We need at least one source
|
// We need at least one source
|
||||||
if p_sources.is_null() {
|
if p_sources.is_null() {
|
||||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Error getting NDIlib_find_get_current_sources"]);
|
gst_element_error!(
|
||||||
|
element,
|
||||||
|
gst::CoreError::Negotiation,
|
||||||
|
["Error getting NDIlib_find_get_current_sources"]
|
||||||
|
);
|
||||||
// return false;
|
// return false;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut no_source: isize = -1;
|
let mut no_source: isize = -1;
|
||||||
for i in 0..total_sources as isize {
|
for i in 0..total_sources as isize {
|
||||||
if CStr::from_ptr((*p_sources.offset(i)).p_ndi_name).to_string_lossy().into_owned() == stream_name ||
|
if CStr::from_ptr((*p_sources.offset(i)).p_ndi_name)
|
||||||
CStr::from_ptr((*p_sources.offset(i)).p_ip_address).to_string_lossy().into_owned() == ip{
|
.to_string_lossy()
|
||||||
|
.into_owned()
|
||||||
|
== stream_name
|
||||||
|
|| CStr::from_ptr((*p_sources.offset(i)).p_ip_address)
|
||||||
|
.to_string_lossy()
|
||||||
|
.into_owned()
|
||||||
|
== ip
|
||||||
|
{
|
||||||
no_source = i;
|
no_source = i;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -151,18 +164,27 @@ fn connect_ndi(cat: gst::DebugCategory , element: &BaseSrc, ip: String, stream
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
gst_debug!(cat, obj: element, "Total sources in network {}: Connecting to NDI source with name '{}' and address '{}'", total_sources,
|
gst_debug!(
|
||||||
|
cat,
|
||||||
|
obj: element,
|
||||||
|
"Total sources in network {}: Connecting to NDI source with name '{}' and address '{}'",
|
||||||
|
total_sources,
|
||||||
CStr::from_ptr((*p_sources.offset(no_source)).p_ndi_name)
|
CStr::from_ptr((*p_sources.offset(no_source)).p_ndi_name)
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.into_owned(),
|
.into_owned(),
|
||||||
CStr::from_ptr((*p_sources.offset(no_source)).p_ip_address)
|
CStr::from_ptr((*p_sources.offset(no_source)).p_ip_address)
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.into_owned());
|
.into_owned()
|
||||||
|
);
|
||||||
|
|
||||||
let source = *p_sources.offset(no_source).clone();
|
let source = *p_sources.offset(no_source).clone();
|
||||||
|
|
||||||
let source_ip = CStr::from_ptr(source.p_ip_address).to_string_lossy().into_owned();
|
let source_ip = CStr::from_ptr(source.p_ip_address)
|
||||||
let source_name = CStr::from_ptr(source.p_ndi_name).to_string_lossy().into_owned();
|
.to_string_lossy()
|
||||||
|
.into_owned();
|
||||||
|
let source_name = CStr::from_ptr(source.p_ndi_name)
|
||||||
|
.to_string_lossy()
|
||||||
|
.into_owned();
|
||||||
|
|
||||||
// We now have at least one source, so we create a receiver to look at it.
|
// We now have at least one source, so we create a receiver to look at it.
|
||||||
// We tell it that we prefer YCbCr video since it is more efficient for us. If the source has an alpha channel
|
// We tell it that we prefer YCbCr video since it is more efficient for us. If the source has an alpha channel
|
||||||
|
@ -177,7 +199,11 @@ fn connect_ndi(cat: gst::DebugCategory , element: &BaseSrc, ip: String, stream
|
||||||
let pNDI_recv = NDIlib_recv_create_v3(&NDI_recv_create_desc);
|
let pNDI_recv = NDIlib_recv_create_v3(&NDI_recv_create_desc);
|
||||||
if pNDI_recv.is_null() {
|
if pNDI_recv.is_null() {
|
||||||
//println!("Cannot run NDI: NDIlib_recv_create_v3 error.");
|
//println!("Cannot run NDI: NDIlib_recv_create_v3 error.");
|
||||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Cannot run NDI: NDIlib_recv_create_v3 error"]);
|
gst_element_error!(
|
||||||
|
element,
|
||||||
|
gst::CoreError::Negotiation,
|
||||||
|
["Cannot run NDI: NDIlib_recv_create_v3 error"]
|
||||||
|
);
|
||||||
// return false;
|
// return false;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -202,7 +228,17 @@ fn connect_ndi(cat: gst::DebugCategory , element: &BaseSrc, ip: String, stream
|
||||||
NDIlib_recv_send_metadata(pNDI_recv, &enable_hw_accel);
|
NDIlib_recv_send_metadata(pNDI_recv, &enable_hw_accel);
|
||||||
|
|
||||||
id_receiver += 1;
|
id_receiver += 1;
|
||||||
receivers.insert(id_receiver, ndi_receiver_info{stream_name: source_name.clone(), ip: source_ip.clone(), video:video, audio: audio, ndi_instance: NdiInstance{recv: pNDI_recv}, id: id_receiver});
|
receivers.insert(
|
||||||
|
id_receiver,
|
||||||
|
ndi_receiver_info {
|
||||||
|
stream_name: source_name.clone(),
|
||||||
|
ip: source_ip.clone(),
|
||||||
|
video: video,
|
||||||
|
audio: audio,
|
||||||
|
ndi_instance: NdiInstance { recv: pNDI_recv },
|
||||||
|
id: id_receiver,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// let start = SystemTime::now();
|
// let start = SystemTime::now();
|
||||||
// let since_the_epoch = start.duration_since(UNIX_EPOCH)
|
// let since_the_epoch = start.duration_since(UNIX_EPOCH)
|
||||||
|
@ -223,8 +259,7 @@ fn stop_ndi(cat: gst::DebugCategory , element: &BaseSrc, id: i8) -> bool{
|
||||||
if val.video && val.audio {
|
if val.video && val.audio {
|
||||||
if element.get_name().contains("audiosrc") {
|
if element.get_name().contains("audiosrc") {
|
||||||
val.audio = false;
|
val.audio = false;
|
||||||
}
|
} else {
|
||||||
else{
|
|
||||||
val.video = false;
|
val.video = false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -6,19 +6,19 @@ use gst::prelude::*;
|
||||||
use gst_audio;
|
use gst_audio;
|
||||||
use gst_base::prelude::*;
|
use gst_base::prelude::*;
|
||||||
|
|
||||||
|
use gobject_subclass::object::*;
|
||||||
use gst_plugin::base_src::*;
|
use gst_plugin::base_src::*;
|
||||||
use gst_plugin::element::*;
|
use gst_plugin::element::*;
|
||||||
use gobject_subclass::object::*;
|
|
||||||
|
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
use std::{i32, u32};
|
use std::{i32, u32};
|
||||||
|
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
|
|
||||||
use ndisys::*;
|
|
||||||
use connect_ndi;
|
use connect_ndi;
|
||||||
use stop_ndi;
|
|
||||||
use ndi_struct;
|
use ndi_struct;
|
||||||
|
use ndisys::*;
|
||||||
|
use stop_ndi;
|
||||||
|
|
||||||
use hashmap_receivers;
|
use hashmap_receivers;
|
||||||
|
|
||||||
|
@ -66,9 +66,7 @@ struct State {
|
||||||
|
|
||||||
impl Default for State {
|
impl Default for State {
|
||||||
fn default() -> State {
|
fn default() -> State {
|
||||||
State {
|
State { info: None }
|
||||||
info: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,9 +98,7 @@ impl NdiAudioSrc {
|
||||||
),
|
),
|
||||||
settings: Mutex::new(Default::default()),
|
settings: Mutex::new(Default::default()),
|
||||||
state: Mutex::new(Default::default()),
|
state: Mutex::new(Default::default()),
|
||||||
timestamp_data: Mutex::new(TimestampData{
|
timestamp_data: Mutex::new(TimestampData { offset: 0 }),
|
||||||
offset: 0,
|
|
||||||
}),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -159,7 +155,6 @@ impl NdiAudioSrc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Virtual methods of GObject itself
|
// Virtual methods of GObject itself
|
||||||
impl ObjectImpl<BaseSrc> for NdiAudioSrc {
|
impl ObjectImpl<BaseSrc> for NdiAudioSrc {
|
||||||
// Called whenever a value of a property is changed. It can be called
|
// Called whenever a value of a property is changed. It can be called
|
||||||
|
@ -184,7 +179,7 @@ impl NdiAudioSrc {
|
||||||
|
|
||||||
let _ =
|
let _ =
|
||||||
element.post_message(&gst::Message::new_latency().src(Some(&element)).build());
|
element.post_message(&gst::Message::new_latency().src(Some(&element)).build());
|
||||||
},
|
}
|
||||||
Property::String("ip", ..) => {
|
Property::String("ip", ..) => {
|
||||||
let mut settings = self.settings.lock().unwrap();
|
let mut settings = self.settings.lock().unwrap();
|
||||||
let ip = value.get().unwrap();
|
let ip = value.get().unwrap();
|
||||||
|
@ -215,7 +210,7 @@ impl NdiAudioSrc {
|
||||||
let settings = self.settings.lock().unwrap();
|
let settings = self.settings.lock().unwrap();
|
||||||
//TODO to_value supongo que solo funciona con numeros
|
//TODO to_value supongo que solo funciona con numeros
|
||||||
Ok(settings.stream_name.to_value())
|
Ok(settings.stream_name.to_value())
|
||||||
},
|
}
|
||||||
Property::String("ip", ..) => {
|
Property::String("ip", ..) => {
|
||||||
let settings = self.settings.lock().unwrap();
|
let settings = self.settings.lock().unwrap();
|
||||||
//TODO to_value supongo que solo funciona con numeros
|
//TODO to_value supongo que solo funciona con numeros
|
||||||
|
@ -228,7 +223,11 @@ impl NdiAudioSrc {
|
||||||
|
|
||||||
// Virtual methods of gst::Element. We override none
|
// Virtual methods of gst::Element. We override none
|
||||||
impl ElementImpl<BaseSrc> for NdiAudioSrc {
|
impl ElementImpl<BaseSrc> for NdiAudioSrc {
|
||||||
fn change_state(&self, element: &BaseSrc, transition: gst::StateChange) -> gst::StateChangeReturn {
|
fn change_state(
|
||||||
|
&self,
|
||||||
|
element: &BaseSrc,
|
||||||
|
transition: gst::StateChange,
|
||||||
|
) -> gst::StateChangeReturn {
|
||||||
if transition == gst::StateChange::PausedToPlaying {
|
if transition == gst::StateChange::PausedToPlaying {
|
||||||
let receivers = hashmap_receivers.lock().unwrap();
|
let receivers = hashmap_receivers.lock().unwrap();
|
||||||
let settings = self.settings.lock().unwrap();
|
let settings = self.settings.lock().unwrap();
|
||||||
|
@ -242,10 +241,18 @@ impl NdiAudioSrc {
|
||||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
||||||
unsafe {
|
unsafe {
|
||||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_audio {
|
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_audio {
|
||||||
frame_type = NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000);
|
frame_type = NDIlib_recv_capture_v2(
|
||||||
|
pNDI_recv,
|
||||||
|
ptr::null(),
|
||||||
|
&audio_frame,
|
||||||
|
ptr::null(),
|
||||||
|
1000,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ndi_struct.initial_timestamp <= audio_frame.timestamp as u64 || ndi_struct.initial_timestamp == 0{
|
if ndi_struct.initial_timestamp <= audio_frame.timestamp as u64
|
||||||
|
|| ndi_struct.initial_timestamp == 0
|
||||||
|
{
|
||||||
ndi_struct.initial_timestamp = audio_frame.timestamp as u64;
|
ndi_struct.initial_timestamp = audio_frame.timestamp as u64;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -263,7 +270,6 @@ impl NdiAudioSrc {
|
||||||
// We simply remember the resulting AudioInfo from the caps to be able to use this for knowing
|
// We simply remember the resulting AudioInfo from the caps to be able to use this for knowing
|
||||||
// the sample rate, etc. when creating buffers
|
// the sample rate, etc. when creating buffers
|
||||||
fn set_caps(&self, element: &BaseSrc, caps: &gst::CapsRef) -> bool {
|
fn set_caps(&self, element: &BaseSrc, caps: &gst::CapsRef) -> bool {
|
||||||
|
|
||||||
let info = match gst_audio::AudioInfo::from_caps(caps) {
|
let info = match gst_audio::AudioInfo::from_caps(caps) {
|
||||||
None => return false,
|
None => return false,
|
||||||
Some(info) => info,
|
Some(info) => info,
|
||||||
|
@ -284,11 +290,15 @@ impl NdiAudioSrc {
|
||||||
*self.state.lock().unwrap() = Default::default();
|
*self.state.lock().unwrap() = Default::default();
|
||||||
|
|
||||||
let mut settings = self.settings.lock().unwrap();
|
let mut settings = self.settings.lock().unwrap();
|
||||||
settings.id_receiver = connect_ndi(self.cat, element, settings.ip.clone(), settings.stream_name.clone());
|
settings.id_receiver = connect_ndi(
|
||||||
|
self.cat,
|
||||||
|
element,
|
||||||
|
settings.ip.clone(),
|
||||||
|
settings.stream_name.clone(),
|
||||||
|
);
|
||||||
if settings.id_receiver == 0 {
|
if settings.id_receiver == 0 {
|
||||||
return false;
|
return false;
|
||||||
}
|
} else {
|
||||||
else{
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -361,7 +371,8 @@ impl NdiAudioSrc {
|
||||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
||||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_audio {
|
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_audio {
|
||||||
unsafe {
|
unsafe {
|
||||||
frame_type = NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000);
|
frame_type =
|
||||||
|
NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut caps = gst::Caps::truncate(caps);
|
let mut caps = gst::Caps::truncate(caps);
|
||||||
|
@ -414,32 +425,38 @@ impl NdiAudioSrc {
|
||||||
|
|
||||||
let mut skip_frame = true;
|
let mut skip_frame = true;
|
||||||
while skip_frame {
|
while skip_frame {
|
||||||
let frame_type = NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000,);
|
let frame_type =
|
||||||
if frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none || frame_type == NDIlib_frame_type_e::NDIlib_frame_type_error {
|
NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000);
|
||||||
|
if frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none
|
||||||
|
|| frame_type == NDIlib_frame_type_e::NDIlib_frame_type_error
|
||||||
|
{
|
||||||
gst_element_error!(element, gst::ResourceError::Read, ["NDI frame type none received, assuming that the source closed the stream...."]);
|
gst_element_error!(element, gst::ResourceError::Read, ["NDI frame type none received, assuming that the source closed the stream...."]);
|
||||||
return Err(gst::FlowReturn::CustomError);
|
return Err(gst::FlowReturn::CustomError);
|
||||||
}
|
}
|
||||||
if time >= (audio_frame.timestamp as u64) {
|
if time >= (audio_frame.timestamp as u64) {
|
||||||
gst_debug!(self.cat, obj: element, "Frame timestamp ({:?}) is lower than received in the first frame from NDI ({:?}), so skiping...", (audio_frame.timestamp as u64), time);
|
gst_debug!(self.cat, obj: element, "Frame timestamp ({:?}) is lower than received in the first frame from NDI ({:?}), so skiping...", (audio_frame.timestamp as u64), time);
|
||||||
}
|
} else {
|
||||||
else{
|
|
||||||
skip_frame = false;
|
skip_frame = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pts = audio_frame.timestamp as u64 - time;
|
pts = audio_frame.timestamp as u64 - time;
|
||||||
|
|
||||||
let buff_size = ((audio_frame.channel_stride_in_bytes)) as usize;
|
let buff_size = (audio_frame.channel_stride_in_bytes) as usize;
|
||||||
let mut buffer = gst::Buffer::with_size(buff_size).unwrap();
|
let mut buffer = gst::Buffer::with_size(buff_size).unwrap();
|
||||||
{
|
{
|
||||||
let vec = Vec::from_raw_parts(audio_frame.p_data as *mut u8, buff_size, buff_size);
|
let vec = Vec::from_raw_parts(audio_frame.p_data as *mut u8, buff_size, buff_size);
|
||||||
let pts: gst::ClockTime = (pts * 100).into();
|
let pts: gst::ClockTime = (pts * 100).into();
|
||||||
|
|
||||||
let duration: gst::ClockTime = (((audio_frame.no_samples as f64 / audio_frame.sample_rate as f64) * 1000000000.0) as u64).into();
|
let duration: gst::ClockTime = (((audio_frame.no_samples as f64
|
||||||
|
/ audio_frame.sample_rate as f64)
|
||||||
|
* 1000000000.0) as u64)
|
||||||
|
.into();
|
||||||
let buffer = buffer.get_mut().unwrap();
|
let buffer = buffer.get_mut().unwrap();
|
||||||
|
|
||||||
if ndi_struct.start_pts == gst::ClockTime(Some(0)) {
|
if ndi_struct.start_pts == gst::ClockTime(Some(0)) {
|
||||||
ndi_struct.start_pts = element.get_clock().unwrap().get_time() - element.get_base_time();
|
ndi_struct.start_pts =
|
||||||
|
element.get_clock().unwrap().get_time() - element.get_base_time();
|
||||||
}
|
}
|
||||||
|
|
||||||
buffer.set_pts(pts + ndi_struct.start_pts);
|
buffer.set_pts(pts + ndi_struct.start_pts);
|
||||||
|
|
|
@ -157,7 +157,6 @@ pub struct NdiInstance {
|
||||||
|
|
||||||
unsafe impl ::std::marker::Send for NdiInstance {}
|
unsafe impl ::std::marker::Send for NdiInstance {}
|
||||||
|
|
||||||
|
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct NDIlib_tally_t {
|
pub struct NDIlib_tally_t {
|
||||||
|
|
|
@ -3,23 +3,23 @@
|
||||||
use glib;
|
use glib;
|
||||||
use gst;
|
use gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
use gst_video;
|
|
||||||
use gst_base::prelude::*;
|
|
||||||
use gst::Fraction;
|
use gst::Fraction;
|
||||||
|
use gst_base::prelude::*;
|
||||||
|
use gst_video;
|
||||||
|
|
||||||
|
use gobject_subclass::object::*;
|
||||||
use gst_plugin::base_src::*;
|
use gst_plugin::base_src::*;
|
||||||
use gst_plugin::element::*;
|
use gst_plugin::element::*;
|
||||||
use gobject_subclass::object::*;
|
|
||||||
|
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
use std::{i32, u32};
|
use std::{i32, u32};
|
||||||
|
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
|
|
||||||
use ndisys::*;
|
|
||||||
use connect_ndi;
|
use connect_ndi;
|
||||||
use stop_ndi;
|
|
||||||
use ndi_struct;
|
use ndi_struct;
|
||||||
|
use ndisys::*;
|
||||||
|
use stop_ndi;
|
||||||
|
|
||||||
use hashmap_receivers;
|
use hashmap_receivers;
|
||||||
|
|
||||||
|
@ -69,9 +69,7 @@ struct State {
|
||||||
|
|
||||||
impl Default for State {
|
impl Default for State {
|
||||||
fn default() -> State {
|
fn default() -> State {
|
||||||
State {
|
State { info: None }
|
||||||
info: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,9 +101,7 @@ impl NdiVideoSrc {
|
||||||
),
|
),
|
||||||
settings: Mutex::new(Default::default()),
|
settings: Mutex::new(Default::default()),
|
||||||
state: Mutex::new(Default::default()),
|
state: Mutex::new(Default::default()),
|
||||||
timestamp_data: Mutex::new(TimestampData{
|
timestamp_data: Mutex::new(TimestampData { offset: 0 }),
|
||||||
offset: 0,
|
|
||||||
}),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -168,8 +164,6 @@ impl NdiVideoSrc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Virtual methods of GObject itself
|
// Virtual methods of GObject itself
|
||||||
impl ObjectImpl<BaseSrc> for NdiVideoSrc {
|
impl ObjectImpl<BaseSrc> for NdiVideoSrc {
|
||||||
// Called whenever a value of a property is changed. It can be called
|
// Called whenever a value of a property is changed. It can be called
|
||||||
|
@ -194,7 +188,7 @@ impl NdiVideoSrc {
|
||||||
|
|
||||||
// let _ =
|
// let _ =
|
||||||
// element.post_message(&gst::Message::new_latency().src(Some(&element)).build());
|
// element.post_message(&gst::Message::new_latency().src(Some(&element)).build());
|
||||||
},
|
}
|
||||||
Property::String("ip", ..) => {
|
Property::String("ip", ..) => {
|
||||||
let mut settings = self.settings.lock().unwrap();
|
let mut settings = self.settings.lock().unwrap();
|
||||||
let ip = value.get().unwrap();
|
let ip = value.get().unwrap();
|
||||||
|
@ -224,7 +218,7 @@ impl NdiVideoSrc {
|
||||||
Property::String("stream-name", ..) => {
|
Property::String("stream-name", ..) => {
|
||||||
let settings = self.settings.lock().unwrap();
|
let settings = self.settings.lock().unwrap();
|
||||||
Ok(settings.stream_name.to_value())
|
Ok(settings.stream_name.to_value())
|
||||||
},
|
}
|
||||||
Property::String("ip", ..) => {
|
Property::String("ip", ..) => {
|
||||||
let settings = self.settings.lock().unwrap();
|
let settings = self.settings.lock().unwrap();
|
||||||
Ok(settings.ip.to_value())
|
Ok(settings.ip.to_value())
|
||||||
|
@ -236,7 +230,11 @@ impl NdiVideoSrc {
|
||||||
|
|
||||||
// Virtual methods of gst::Element. We override none
|
// Virtual methods of gst::Element. We override none
|
||||||
impl ElementImpl<BaseSrc> for NdiVideoSrc {
|
impl ElementImpl<BaseSrc> for NdiVideoSrc {
|
||||||
fn change_state(&self, element: &BaseSrc, transition: gst::StateChange) -> gst::StateChangeReturn {
|
fn change_state(
|
||||||
|
&self,
|
||||||
|
element: &BaseSrc,
|
||||||
|
transition: gst::StateChange,
|
||||||
|
) -> gst::StateChangeReturn {
|
||||||
if transition == gst::StateChange::PausedToPlaying {
|
if transition == gst::StateChange::PausedToPlaying {
|
||||||
let receivers = hashmap_receivers.lock().unwrap();
|
let receivers = hashmap_receivers.lock().unwrap();
|
||||||
let settings = self.settings.lock().unwrap();
|
let settings = self.settings.lock().unwrap();
|
||||||
|
@ -250,10 +248,18 @@ impl NdiVideoSrc {
|
||||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
||||||
unsafe {
|
unsafe {
|
||||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_video {
|
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_video {
|
||||||
frame_type = NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000);
|
frame_type = NDIlib_recv_capture_v2(
|
||||||
|
pNDI_recv,
|
||||||
|
&video_frame,
|
||||||
|
ptr::null(),
|
||||||
|
ptr::null(),
|
||||||
|
1000,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ndi_struct.initial_timestamp <= video_frame.timestamp as u64 || ndi_struct.initial_timestamp == 0{
|
if ndi_struct.initial_timestamp <= video_frame.timestamp as u64
|
||||||
|
|| ndi_struct.initial_timestamp == 0
|
||||||
|
{
|
||||||
ndi_struct.initial_timestamp = video_frame.timestamp as u64;
|
ndi_struct.initial_timestamp = video_frame.timestamp as u64;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -262,7 +268,6 @@ impl NdiVideoSrc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Virtual methods of gst_base::BaseSrc
|
// Virtual methods of gst_base::BaseSrc
|
||||||
impl BaseSrcImpl<BaseSrc> for NdiVideoSrc {
|
impl BaseSrcImpl<BaseSrc> for NdiVideoSrc {
|
||||||
// Called whenever the input/output caps are changing, i.e. in the very beginning before data
|
// Called whenever the input/output caps are changing, i.e. in the very beginning before data
|
||||||
|
@ -290,12 +295,16 @@ impl NdiVideoSrc {
|
||||||
// Reset state
|
// Reset state
|
||||||
*self.state.lock().unwrap() = Default::default();
|
*self.state.lock().unwrap() = Default::default();
|
||||||
let mut settings = self.settings.lock().unwrap();
|
let mut settings = self.settings.lock().unwrap();
|
||||||
settings.id_receiver = connect_ndi(self.cat, element, settings.ip.clone(), settings.stream_name.clone());
|
settings.id_receiver = connect_ndi(
|
||||||
|
self.cat,
|
||||||
|
element,
|
||||||
|
settings.ip.clone(),
|
||||||
|
settings.stream_name.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
if settings.id_receiver == 0 {
|
if settings.id_receiver == 0 {
|
||||||
return false;
|
return false;
|
||||||
}
|
} else {
|
||||||
else{
|
|
||||||
// let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
// let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -313,7 +322,6 @@ impl NdiVideoSrc {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn query(&self, element: &BaseSrc, query: &mut gst::QueryRef) -> bool {
|
fn query(&self, element: &BaseSrc, query: &mut gst::QueryRef) -> bool {
|
||||||
use gst::QueryView;
|
use gst::QueryView;
|
||||||
match query.view_mut() {
|
match query.view_mut() {
|
||||||
|
@ -376,7 +384,8 @@ impl NdiVideoSrc {
|
||||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
||||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_video {
|
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_video {
|
||||||
unsafe {
|
unsafe {
|
||||||
frame_type = NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000);
|
frame_type =
|
||||||
|
NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -386,7 +395,10 @@ impl NdiVideoSrc {
|
||||||
let s = caps.get_mut_structure(0).unwrap();
|
let s = caps.get_mut_structure(0).unwrap();
|
||||||
s.fixate_field_nearest_int("width", video_frame.xres);
|
s.fixate_field_nearest_int("width", video_frame.xres);
|
||||||
s.fixate_field_nearest_int("height", video_frame.yres);
|
s.fixate_field_nearest_int("height", video_frame.yres);
|
||||||
s.fixate_field_nearest_fraction("framerate", Fraction::new(video_frame.frame_rate_N, video_frame.frame_rate_D));
|
s.fixate_field_nearest_fraction(
|
||||||
|
"framerate",
|
||||||
|
Fraction::new(video_frame.frame_rate_N, video_frame.frame_rate_D),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Let BaseSrc fixate anything else for us. We could've alternatively have
|
// Let BaseSrc fixate anything else for us. We could've alternatively have
|
||||||
|
@ -432,15 +444,17 @@ impl NdiVideoSrc {
|
||||||
|
|
||||||
let mut skip_frame = true;
|
let mut skip_frame = true;
|
||||||
while skip_frame {
|
while skip_frame {
|
||||||
let frame_type = NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000,);
|
let frame_type =
|
||||||
if frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none || frame_type == NDIlib_frame_type_e::NDIlib_frame_type_error {
|
NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000);
|
||||||
|
if frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none
|
||||||
|
|| frame_type == NDIlib_frame_type_e::NDIlib_frame_type_error
|
||||||
|
{
|
||||||
gst_element_error!(element, gst::ResourceError::Read, ["NDI frame type none received, assuming that the source closed the stream...."]);
|
gst_element_error!(element, gst::ResourceError::Read, ["NDI frame type none received, assuming that the source closed the stream...."]);
|
||||||
return Err(gst::FlowReturn::CustomError);
|
return Err(gst::FlowReturn::CustomError);
|
||||||
}
|
}
|
||||||
if time >= (video_frame.timestamp as u64) {
|
if time >= (video_frame.timestamp as u64) {
|
||||||
gst_debug!(self.cat, obj: element, "Frame timestamp ({:?}) is lower than received in the first frame from NDI ({:?}), so skiping...", (video_frame.timestamp as u64), time);
|
gst_debug!(self.cat, obj: element, "Frame timestamp ({:?}) is lower than received in the first frame from NDI ({:?}), so skiping...", (video_frame.timestamp as u64), time);
|
||||||
}
|
} else {
|
||||||
else{
|
|
||||||
skip_frame = false;
|
skip_frame = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -453,11 +467,15 @@ impl NdiVideoSrc {
|
||||||
let vec = Vec::from_raw_parts(video_frame.p_data as *mut u8, buff_size, buff_size);
|
let vec = Vec::from_raw_parts(video_frame.p_data as *mut u8, buff_size, buff_size);
|
||||||
let pts: gst::ClockTime = (pts * 100).into();
|
let pts: gst::ClockTime = (pts * 100).into();
|
||||||
|
|
||||||
let duration: gst::ClockTime = (((video_frame.frame_rate_D as f64 / video_frame.frame_rate_N as f64) * 1000000000.0) as u64).into();
|
let duration: gst::ClockTime = (((video_frame.frame_rate_D as f64
|
||||||
|
/ video_frame.frame_rate_N as f64)
|
||||||
|
* 1000000000.0) as u64)
|
||||||
|
.into();
|
||||||
let buffer = buffer.get_mut().unwrap();
|
let buffer = buffer.get_mut().unwrap();
|
||||||
|
|
||||||
if ndi_struct.start_pts == gst::ClockTime(Some(0)) {
|
if ndi_struct.start_pts == gst::ClockTime(Some(0)) {
|
||||||
ndi_struct.start_pts = element.get_clock().unwrap().get_time() - element.get_base_time();
|
ndi_struct.start_pts =
|
||||||
|
element.get_clock().unwrap().get_time() - element.get_base_time();
|
||||||
}
|
}
|
||||||
|
|
||||||
buffer.set_pts(pts + ndi_struct.start_pts);
|
buffer.set_pts(pts + ndi_struct.start_pts);
|
||||||
|
|
Loading…
Reference in a new issue