Fix rust fmt

This commit is contained in:
rubenrua 2019-03-26 17:41:28 +01:00
parent fb56cdc87c
commit 6dc39b2dea
4 changed files with 760 additions and 654 deletions

View file

@ -60,7 +60,12 @@ lazy_static! {
static mut id_receiver: i8 = 0;
fn connect_ndi(cat: gst::DebugCategory, element: &gst_base::BaseSrc, ip: &str, stream_name: &str) -> i8 {
fn connect_ndi(
cat: gst::DebugCategory,
element: &gst_base::BaseSrc,
ip: &str,
stream_name: &str,
) -> i8 {
gst_debug!(cat, obj: element, "Starting NDI connection...");
let mut receivers = hashmap_receivers.lock().unwrap();

View file

@ -21,8 +21,8 @@ use ndi_struct;
use ndisys::*;
use stop_ndi;
use hashmap_receivers;
use byte_slice_cast::AsMutSliceOf;
use hashmap_receivers;
#[derive(Debug, Clone)]
struct Settings {
@ -46,7 +46,7 @@ impl Default for Settings {
}
static PROPERTIES: [subclass::Property; 3] = [
subclass::Property("stream-name", |_| {
subclass::Property("stream-name", |_| {
glib::ParamSpec::string(
"stream-name",
"Sream Name",
@ -54,8 +54,8 @@ subclass::Property("stream-name", |_| {
None,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("ip", |_| {
}),
subclass::Property("ip", |_| {
glib::ParamSpec::string(
"ip",
"Stream IP",
@ -63,8 +63,8 @@ subclass::Property("ip", |_| {
None,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("loss-threshold", |_| {
}),
subclass::Property("loss-threshold", |_| {
glib::ParamSpec::uint(
"loss-threshold",
"Loss threshold",
@ -74,7 +74,7 @@ subclass::Property("loss-threshold", |_| {
5,
glib::ParamFlags::READWRITE,
)
}),
}),
];
struct State {
@ -99,7 +99,6 @@ struct NdiAudioSrc {
}
impl ObjectSubclass for NdiAudioSrc {
const NAME: &'static str = "NdiAudioSrc";
type ParentType = gst_base::BaseSrc;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
@ -116,7 +115,7 @@ impl ObjectSubclass for NdiAudioSrc {
),
settings: Mutex::new(Default::default()),
state: Mutex::new(Default::default()),
timestamp_data: Mutex::new(TimestampData { offset: 0}),
timestamp_data: Mutex::new(TimestampData { offset: 0 }),
}
}
@ -158,9 +157,9 @@ impl ObjectSubclass for NdiAudioSrc {
klass.install_properties(&PROPERTIES);
}
}
}
impl ObjectImpl for NdiAudioSrc {
impl ObjectImpl for NdiAudioSrc {
glib_object_impl!();
fn constructed(&self, obj: &glib::Object) {
@ -240,9 +239,9 @@ impl ObjectSubclass for NdiAudioSrc {
_ => unimplemented!(),
}
}
}
}
impl ElementImpl for NdiAudioSrc {
impl ElementImpl for NdiAudioSrc {
fn change_state(
&self,
element: &gst::Element,
@ -259,28 +258,55 @@ impl ObjectSubclass for NdiAudioSrc {
let audio_frame: NDIlib_audio_frame_v2_t = Default::default();
unsafe {
while NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000)
!= NDIlib_frame_type_e::NDIlib_frame_type_audio {}
while NDIlib_recv_capture_v2(
pNDI_recv,
ptr::null(),
&audio_frame,
ptr::null(),
1000,
) != NDIlib_frame_type_e::NDIlib_frame_type_audio
{}
}
gst_debug!(self.cat, obj: element, "NDI audio frame received: {:?}", audio_frame);
gst_debug!(
self.cat,
obj: element,
"NDI audio frame received: {:?}",
audio_frame
);
if receiver.initial_timestamp <= audio_frame.timestamp as u64
|| receiver.initial_timestamp == 0 {
|| receiver.initial_timestamp == 0
{
receiver.initial_timestamp = audio_frame.timestamp as u64;
}
unsafe {
NDIlib_recv_free_audio_v2(pNDI_recv, &audio_frame);
}
gst_debug!(self.cat, obj: element, "Setting initial timestamp to {}", receiver.initial_timestamp);
gst_debug!(
self.cat,
obj: element,
"Setting initial timestamp to {}",
receiver.initial_timestamp
);
}
self.parent_change_state(element, transition)
}
}
}
impl BaseSrcImpl for NdiAudioSrc {
fn set_caps(&self, element: &gst_base::BaseSrc, caps: &gst::CapsRef) -> Result<(), gst::LoggableError> {
impl BaseSrcImpl for NdiAudioSrc {
fn set_caps(
&self,
element: &gst_base::BaseSrc,
caps: &gst::CapsRef,
) -> Result<(), gst::LoggableError> {
let info = match gst_audio::AudioInfo::from_caps(caps) {
None => return Err(gst_loggable_error!(self.cat, "Failed to build `AudioInfo` from caps {}", caps)),
None => {
return Err(gst_loggable_error!(
self.cat,
"Failed to build `AudioInfo` from caps {}",
caps
));
}
Some(info) => info,
};
@ -308,7 +334,7 @@ impl ObjectSubclass for NdiAudioSrc {
gst::ResourceError::NotFound,
["Could not connect to this source"]
)),
_ => Ok(())
_ => Ok(()),
}
}
@ -358,7 +384,8 @@ impl ObjectSubclass for NdiAudioSrc {
unsafe {
while NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000)
!= NDIlib_frame_type_e::NDIlib_frame_type_audio {}
!= NDIlib_frame_type_e::NDIlib_frame_type_audio
{}
}
let no_samples = audio_frame.no_samples as u64;
@ -372,7 +399,13 @@ impl ObjectSubclass for NdiAudioSrc {
s.fixate_field_nearest_int("rate", audio_rate);
s.fixate_field_nearest_int("channels", audio_frame.no_channels);
s.fixate_field_str("layout", "interleaved");
s.set_value("channel-mask", gst::Bitmask::new(gst_audio::AudioChannelPosition::get_fallback_mask(audio_frame.no_channels as u32)).to_send_value());
s.set_value(
"channel-mask",
gst::Bitmask::new(gst_audio::AudioChannelPosition::get_fallback_mask(
audio_frame.no_channels as u32,
))
.to_send_value(),
);
}
let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
@ -410,27 +443,36 @@ impl ObjectSubclass for NdiAudioSrc {
let audio_frame: NDIlib_audio_frame_v2_t = Default::default();
unsafe {
let time = receivers.get(&_settings.id_receiver).unwrap().initial_timestamp;
let time = receivers
.get(&_settings.id_receiver)
.unwrap()
.initial_timestamp;
let mut skip_frame = true;
let mut count_frame_none = 0;
while skip_frame {
let frame_type =
NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000);
if (frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none && _settings.loss_threshold != 0)
if (frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none
&& _settings.loss_threshold != 0)
|| frame_type == NDIlib_frame_type_e::NDIlib_frame_type_error
{
if count_frame_none < _settings.loss_threshold{
if count_frame_none < _settings.loss_threshold {
count_frame_none += 1;
continue;
}
gst_element_error!(element, gst::ResourceError::Read, ["NDI frame type none or error received, assuming that the source closed the stream...."]);
return Err(gst::FlowError::CustomError);
}
else if frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none && _settings.loss_threshold == 0{
gst_debug!(self.cat, obj: element, "No audio frame received, sending empty buffer");
} else if frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none
&& _settings.loss_threshold == 0
{
gst_debug!(
self.cat,
obj: element,
"No audio frame received, sending empty buffer"
);
let buffer = gst::Buffer::with_size(0).unwrap();
return Ok(buffer)
return Ok(buffer);
}
if time >= (audio_frame.timestamp as u64) {
@ -441,11 +483,21 @@ impl ObjectSubclass for NdiAudioSrc {
}
}
gst_log!(self.cat, obj: element, "NDI audio frame received: {:?}", (audio_frame));
gst_log!(
self.cat,
obj: element,
"NDI audio frame received: {:?}",
(audio_frame)
);
pts = audio_frame.timestamp as u64 - time;
gst_log!(self.cat, obj: element, "Calculated pts for audio frame: {:?}", (pts));
gst_log!(
self.cat,
obj: element,
"Calculated pts for audio frame: {:?}",
(pts)
);
// We multiply by 2 because is the size in bytes of an i16 variable
let buff_size = (audio_frame.no_samples * 2 * audio_frame.no_channels) as usize;
@ -474,7 +526,12 @@ impl ObjectSubclass for NdiAudioSrc {
let mut dst: NDIlib_audio_frame_interleaved_16s_t = Default::default();
dst.reference_level = 0;
dst.p_data = buffer.map_writable().unwrap().as_mut_slice_of::<i16>().unwrap().as_mut_ptr();
dst.p_data = buffer
.map_writable()
.unwrap()
.as_mut_slice_of::<i16>()
.unwrap()
.as_mut_ptr();
NDIlib_util_audio_to_interleaved_16s_v2(&audio_frame, &mut dst);
NDIlib_recv_free_audio_v2(pNDI_recv, &audio_frame);
}
@ -484,8 +541,8 @@ impl ObjectSubclass for NdiAudioSrc {
Ok(buffer)
}
}
}
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(plugin, "ndiaudiosrc", 0, NdiAudioSrc::get_type())
}
}

View file

@ -35,11 +35,11 @@ extern "C" {
) -> NDIlib_frame_type_e;
pub fn NDIlib_recv_free_video_v2(
p_instance: NDIlib_recv_instance_t,
p_video_data: *const NDIlib_video_frame_v2_t
p_video_data: *const NDIlib_video_frame_v2_t,
);
pub fn NDIlib_recv_free_audio_v2(
p_instance: NDIlib_recv_instance_t,
p_audio_data: *const NDIlib_audio_frame_v2_t
p_audio_data: *const NDIlib_audio_frame_v2_t,
);
}

View file

@ -47,7 +47,7 @@ impl Default for Settings {
}
static PROPERTIES: [subclass::Property; 3] = [
subclass::Property("stream-name", |_| {
subclass::Property("stream-name", |_| {
glib::ParamSpec::string(
"stream-name",
"Stream Name",
@ -55,8 +55,8 @@ subclass::Property("stream-name", |_| {
None,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("ip", |_| {
}),
subclass::Property("ip", |_| {
glib::ParamSpec::string(
"ip",
"Stream IP",
@ -64,8 +64,8 @@ subclass::Property("ip", |_| {
None,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("loss-threshold", |_| {
}),
subclass::Property("loss-threshold", |_| {
glib::ParamSpec::uint(
"loss-threshold",
"Loss threshold",
@ -75,7 +75,7 @@ subclass::Property("loss-threshold", |_| {
5,
glib::ParamFlags::READWRITE,
)
}),
}),
];
struct State {
@ -100,7 +100,6 @@ struct NdiVideoSrc {
}
impl ObjectSubclass for NdiVideoSrc {
const NAME: &'static str = "NdiVideoSrc";
type ParentType = gst_base::BaseSrc;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
@ -160,15 +159,15 @@ impl ObjectSubclass for NdiVideoSrc {
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
).unwrap();
)
.unwrap();
klass.add_pad_template(src_pad_template);
klass.install_properties(&PROPERTIES);
}
}
}
impl ObjectImpl for NdiVideoSrc {
impl ObjectImpl for NdiVideoSrc {
glib_object_impl!();
fn constructed(&self, obj: &glib::Object) {
@ -181,7 +180,6 @@ impl ObjectSubclass for NdiVideoSrc {
basesrc.set_format(gst::Format::Time);
}
fn set_property(&self, obj: &glib::Object, id: usize, value: &glib::Value) {
let prop = &PROPERTIES[id];
let basesrc = obj.downcast_ref::<gst_base::BaseSrc>().unwrap();
@ -249,9 +247,9 @@ impl ObjectSubclass for NdiVideoSrc {
_ => unimplemented!(),
}
}
}
}
impl ElementImpl for NdiVideoSrc {
impl ElementImpl for NdiVideoSrc {
fn change_state(
&self,
element: &gst::Element,
@ -268,10 +266,21 @@ impl ObjectSubclass for NdiVideoSrc {
let video_frame: NDIlib_video_frame_v2_t = Default::default();
unsafe {
while NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000)
!= NDIlib_frame_type_e::NDIlib_frame_type_video {}
while NDIlib_recv_capture_v2(
pNDI_recv,
&video_frame,
ptr::null(),
ptr::null(),
1000,
) != NDIlib_frame_type_e::NDIlib_frame_type_video
{}
}
gst_debug!(self.cat, obj: element, "NDI video frame received: {:?}", video_frame);
gst_debug!(
self.cat,
obj: element,
"NDI video frame received: {:?}",
video_frame
);
if receiver.initial_timestamp <= video_frame.timestamp as u64
|| receiver.initial_timestamp == 0
@ -281,16 +290,31 @@ impl ObjectSubclass for NdiVideoSrc {
unsafe {
NDIlib_recv_free_video_v2(pNDI_recv, &video_frame);
}
gst_debug!(self.cat, obj: element, "Setting initial timestamp to {}", receiver.initial_timestamp);
gst_debug!(
self.cat,
obj: element,
"Setting initial timestamp to {}",
receiver.initial_timestamp
);
}
self.parent_change_state(element, transition)
}
}
}
impl BaseSrcImpl for NdiVideoSrc {
fn set_caps(&self, element: &gst_base::BaseSrc, caps: &gst::CapsRef) -> Result<(), gst::LoggableError> {
impl BaseSrcImpl for NdiVideoSrc {
fn set_caps(
&self,
element: &gst_base::BaseSrc,
caps: &gst::CapsRef,
) -> Result<(), gst::LoggableError> {
let info = match gst_video::VideoInfo::from_caps(caps) {
None => return Err(gst_loggable_error!(self.cat, "Failed to build `VideoInfo` from caps {}", caps)),
None => {
return Err(gst_loggable_error!(
self.cat,
"Failed to build `VideoInfo` from caps {}",
caps
));
}
Some(info) => info,
};
gst_debug!(self.cat, obj: element, "Configuring for caps {}", caps);
@ -317,7 +341,7 @@ impl ObjectSubclass for NdiVideoSrc {
gst::ResourceError::NotFound,
["Could not connect to this source"]
)),
_ => Ok(())
_ => Ok(()),
}
}
@ -366,7 +390,8 @@ impl ObjectSubclass for NdiVideoSrc {
unsafe {
while NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000)
!= NDIlib_frame_type_e::NDIlib_frame_type_video {}
!= NDIlib_frame_type_e::NDIlib_frame_type_video
{}
}
settings.latency = gst::SECOND.mul_div_floor(
video_frame.frame_rate_D as u64,
@ -418,27 +443,36 @@ impl ObjectSubclass for NdiVideoSrc {
let video_frame: NDIlib_video_frame_v2_t = Default::default();
unsafe {
let time = receivers.get(&_settings.id_receiver).unwrap().initial_timestamp;
let time = receivers
.get(&_settings.id_receiver)
.unwrap()
.initial_timestamp;
let mut skip_frame = true;
let mut count_frame_none = 0;
while skip_frame {
let frame_type =
NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000);
if (frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none && _settings.loss_threshold != 0)
if (frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none
&& _settings.loss_threshold != 0)
|| frame_type == NDIlib_frame_type_e::NDIlib_frame_type_error
{
if count_frame_none < _settings.loss_threshold{
if count_frame_none < _settings.loss_threshold {
count_frame_none += 1;
continue;
}
gst_element_error!(element, gst::ResourceError::Read, ["NDI frame type none or error received, assuming that the source closed the stream...."]);
return Err(gst::FlowError::CustomError);
}
else if frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none && _settings.loss_threshold == 0{
gst_debug!(self.cat, obj: element, "No video frame received, sending empty buffer");
} else if frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none
&& _settings.loss_threshold == 0
{
gst_debug!(
self.cat,
obj: element,
"No video frame received, sending empty buffer"
);
let buffer = gst::Buffer::with_size(0).unwrap();
return Ok(buffer)
return Ok(buffer);
}
if time >= (video_frame.timestamp as u64) {
@ -449,11 +483,21 @@ impl ObjectSubclass for NdiVideoSrc {
}
}
gst_log!(self.cat, obj: element, "NDI video frame received: {:?}", (video_frame));
gst_log!(
self.cat,
obj: element,
"NDI video frame received: {:?}",
(video_frame)
);
pts = video_frame.timestamp as u64 - time;
gst_log!(self.cat, obj: element, "Calculated pts for video frame: {:?}", (pts));
gst_log!(
self.cat,
obj: element,
"Calculated pts for video frame: {:?}",
(pts)
);
let buff_size = (video_frame.yres * video_frame.line_stride_in_bytes) as usize;
let mut buffer = gst::Buffer::with_size(buff_size).unwrap();
@ -486,8 +530,8 @@ impl ObjectSubclass for NdiVideoSrc {
Ok(buffer)
}
}
}
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(plugin, "ndivideosrc", 0, NdiVideoSrc::get_type())
}
}