diff --git a/net/ndi/Cargo.toml b/net/ndi/Cargo.toml new file mode 100644 index 00000000..95447083 --- /dev/null +++ b/net/ndi/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "gst-plugin-ndi" +version = "1.0.0" +authors = ["Ruben Gonzalez ", "Daniel Vilar ", "Sebastian Dröge "] +repository = "https://github.com/teltek/gst-plugin-ndi" +license = "LGPL" +description = "NewTek NDI Plugin" +edition = "2018" + +[dependencies] +glib = "0.15" +gst = { package = "gstreamer", version = "0.18", features = ["v1_12"] } +gst-base = { package = "gstreamer-base", version = "0.18" } +gst-audio = { package = "gstreamer-audio", version = "0.18" } +gst-video = { package = "gstreamer-video", version = "0.18", features = ["v1_12"] } +byte-slice-cast = "1" +once_cell = "1.0" +byteorder = "1.0" + +[build-dependencies] +gst-plugin-version-helper = "0.7" + +[features] +default = ["interlaced-fields", "reference-timestamps", "sink"] +interlaced-fields = ["gst/v1_16", "gst-video/v1_16"] +reference-timestamps = ["gst/v1_14"] +sink = ["gst/v1_18", "gst-base/v1_18"] +advanced-sdk = [] + +[lib] +name = "gstndi" +crate-type = ["cdylib"] +path = "src/lib.rs" diff --git a/net/ndi/LICENSE b/net/ndi/LICENSE new file mode 100644 index 00000000..8000a6fa --- /dev/null +++ b/net/ndi/LICENSE @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 + USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random + Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! diff --git a/net/ndi/README.md b/net/ndi/README.md new file mode 100644 index 00000000..5e77e3aa --- /dev/null +++ b/net/ndi/README.md @@ -0,0 +1,81 @@ +GStreamer NDI Plugin for Linux +==================== + +*Compiled and tested with NDI SDK 4.0, 4.1 and 5.0* + +This is a plugin for the [GStreamer](https://gstreamer.freedesktop.org/) multimedia framework that allows GStreamer to receive a stream from a [NDI](https://www.newtek.com/ndi/) source. This plugin has been developed by [Teltek](http://teltek.es/) and was funded by the [University of the Arts London](https://www.arts.ac.uk/) and [The University of Manchester](https://www.manchester.ac.uk/). + +Currently the plugin has a source element for receiving from NDI sources, a sink element to provide an NDI source and a device provider for discovering NDI sources on the network. + +Some examples of how to use these elements from the command line: + +```console +# Information about the elements +$ gst-inspect-1.0 ndi +$ gst-inspect-1.0 ndisrc +$ gst-inspect-1.0 ndisink + +# Discover all NDI sources on the network +$ gst-device-monitor-1.0 -f Source/Network:application/x-ndi + +# Audio/Video source pipeline +$ gst-launch-1.0 ndisrc ndi-name="GC-DEV2 (OBS)" ! ndisrcdemux name=demux demux.video ! queue ! videoconvert ! autovideosink demux.audio ! queue ! audioconvert ! autoaudiosink + +# Audio/Video sink pipeline +$ gst-launch-1.0 videotestsrc is-live=true ! video/x-raw,format=UYVY ! ndisinkcombiner name=combiner ! ndisink ndi-name="My NDI source" audiotestsrc is-live=true ! combiner.audio +``` + +Feel free to contribute to this project. Some ways you can contribute are: +* Testing with more hardware and software and reporting bugs +* Doing pull requests. + +Compilation of the NDI element +------- +To compile the NDI element it's necessary to install Rust, the NDI SDK and the following packages for gstreamer: + +```console +$ apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \ + gstreamer1.0-plugins-base + +``` +To install the required NDI library there are two options: +1. Download NDI SDK from NDI website and move the library to the correct location. +2. Use a [deb package](https://github.com/Palakis/obs-ndi/releases/download/4.5.2/libndi3_3.5.1-1_amd64.deb) made by the community. Thanks to [NDI plugin for OBS](https://github.com/Palakis/obs-ndi). + +To install Rust, you can follow their documentation: https://www.rust-lang.org/en-US/install.html + +Once all requirements are met, you can build the plugin by executing the following command from the project root folder: + +``` +cargo build +export GST_PLUGIN_PATH=`pwd`/target/debug +gst-inspect-1.0 ndi +``` + +By default GStreamer 1.18 is required, to use an older version. You can build with `$ cargo build --no-default-features --features whatever_you_want_to_enable_of_the_above_features` + + +If all went ok, you should see info related to the NDI element. To make the plugin available without using `GST_PLUGIN_PATH` it's necessary to copy the plugin to the gstreamer plugins folder. + +```console +$ cargo build --release +$ sudo install -o root -g root -m 644 target/release/libgstndi.so /usr/lib/x86_64-linux-gnu/gstreamer-1.0/ +$ sudo ldconfig +$ gst-inspect-1.0 ndi +``` + +More info about GStreamer plugins written in Rust: +---------------------------------- +https://gitlab.freedesktop.org/gstreamer/gstreamer-rs +https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs + + +License +------- +This plugin is licensed under the LGPL - see the [LICENSE](LICENSE) file for details + + +Acknowledgments +------- +* University of the Arts London and The University of Manchester. +* Sebastian Dröge (@sdroege). diff --git a/net/ndi/build.rs b/net/ndi/build.rs new file mode 100644 index 00000000..cda12e57 --- /dev/null +++ b/net/ndi/build.rs @@ -0,0 +1,3 @@ +fn main() { + gst_plugin_version_helper::info() +} diff --git a/net/ndi/src/device_provider/imp.rs b/net/ndi/src/device_provider/imp.rs new file mode 100644 index 00000000..8484511e --- /dev/null +++ b/net/ndi/src/device_provider/imp.rs @@ -0,0 +1,269 @@ +use gst::prelude::*; +use gst::subclass::prelude::*; +use gst::{gst_error, gst_log, gst_trace}; + +use once_cell::sync::OnceCell; + +use std::sync::atomic; +use std::sync::Mutex; +use std::thread; + +use once_cell::sync::Lazy; + +use crate::ndi; + +static CAT: Lazy = Lazy::new(|| { + gst::DebugCategory::new( + "ndideviceprovider", + gst::DebugColorFlags::empty(), + Some("NewTek NDI Device Provider"), + ) +}); + +#[derive(Debug)] +pub struct DeviceProvider { + thread: Mutex>>, + current_devices: Mutex>, + find: Mutex>, + is_running: atomic::AtomicBool, +} + +#[glib::object_subclass] +impl ObjectSubclass for DeviceProvider { + const NAME: &'static str = "NdiDeviceProvider"; + type Type = super::DeviceProvider; + type ParentType = gst::DeviceProvider; + + fn new() -> Self { + Self { + thread: Mutex::new(None), + current_devices: Mutex::new(vec![]), + find: Mutex::new(None), + is_running: atomic::AtomicBool::new(false), + } + } +} + +impl ObjectImpl for DeviceProvider {} + +impl GstObjectImpl for DeviceProvider {} + +impl DeviceProviderImpl for DeviceProvider { + fn metadata() -> Option<&'static gst::subclass::DeviceProviderMetadata> { + static METADATA: Lazy = Lazy::new(|| { + gst::subclass::DeviceProviderMetadata::new("NewTek NDI Device Provider", + "Source/Audio/Video/Network", + "NewTek NDI Device Provider", + "Ruben Gonzalez , Daniel Vilar , Sebastian Dröge ") + }); + + Some(&*METADATA) + } + + fn probe(&self, _device_provider: &Self::Type) -> Vec { + self.current_devices + .lock() + .unwrap() + .iter() + .map(|d| d.clone().upcast()) + .collect() + } + + fn start(&self, device_provider: &Self::Type) -> Result<(), gst::LoggableError> { + let mut thread_guard = self.thread.lock().unwrap(); + if thread_guard.is_some() { + gst_log!(CAT, obj: device_provider, "Device provider already started"); + return Ok(()); + } + + self.is_running.store(true, atomic::Ordering::SeqCst); + + let device_provider_weak = device_provider.downgrade(); + let mut first = true; + *thread_guard = Some(thread::spawn(move || { + let device_provider = match device_provider_weak.upgrade() { + None => return, + Some(device_provider) => device_provider, + }; + + let imp = DeviceProvider::from_instance(&device_provider); + { + let mut find_guard = imp.find.lock().unwrap(); + if find_guard.is_some() { + gst_log!(CAT, obj: &device_provider, "Already started"); + return; + } + + let find = match ndi::FindInstance::builder().build() { + None => { + gst_error!(CAT, obj: &device_provider, "Failed to create Find instance"); + return; + } + Some(find) => find, + }; + *find_guard = Some(find); + } + + loop { + let device_provider = match device_provider_weak.upgrade() { + None => break, + Some(device_provider) => device_provider, + }; + + let imp = DeviceProvider::from_instance(&device_provider); + if !imp.is_running.load(atomic::Ordering::SeqCst) { + break; + } + + imp.poll(&device_provider, first); + first = false; + } + })); + + Ok(()) + } + + fn stop(&self, _device_provider: &Self::Type) { + if let Some(_thread) = self.thread.lock().unwrap().take() { + self.is_running.store(false, atomic::Ordering::SeqCst); + // Don't actually join because that might take a while + } + } +} + +impl DeviceProvider { + fn poll(&self, device_provider: &super::DeviceProvider, first: bool) { + let mut find_guard = self.find.lock().unwrap(); + let find = match *find_guard { + None => return, + Some(ref mut find) => find, + }; + + if !find.wait_for_sources(if first { 1000 } else { 5000 }) { + gst_trace!(CAT, obj: device_provider, "No new sources found"); + return; + } + + let sources = find.get_current_sources(); + let mut sources = sources.iter().map(|s| s.to_owned()).collect::>(); + + let mut current_devices_guard = self.current_devices.lock().unwrap(); + let mut expired_devices = vec![]; + let mut remaining_sources = vec![]; + + // First check for each device we previously knew if it's still available + for old_device in &*current_devices_guard { + let old_device_imp = Device::from_instance(old_device); + let old_source = old_device_imp.source.get().unwrap(); + + if !sources.contains(&*old_source) { + gst_log!( + CAT, + obj: device_provider, + "Source {:?} disappeared", + old_source + ); + expired_devices.push(old_device.clone()); + } else { + // Otherwise remember that we had it before already and don't have to announce it + // again. After the loop we're going to remove these all from the sources vec. + remaining_sources.push(old_source.to_owned()); + } + } + + for remaining_source in remaining_sources { + sources.retain(|s| s != &remaining_source); + } + + // Remove all expired devices from the list of cached devices + current_devices_guard.retain(|d| !expired_devices.contains(d)); + // And also notify the device provider of them having disappeared + for old_device in expired_devices { + device_provider.device_remove(&old_device); + } + + // Now go through all new devices and announce them + for source in sources { + gst_log!(CAT, obj: device_provider, "Source {:?} appeared", source); + let device = super::Device::new(&source); + device_provider.device_add(&device); + current_devices_guard.push(device); + } + } +} + +#[derive(Debug)] +pub struct Device { + source: OnceCell>, +} + +#[glib::object_subclass] +impl ObjectSubclass for Device { + const NAME: &'static str = "NdiDevice"; + type Type = super::Device; + type ParentType = gst::Device; + + fn new() -> Self { + Self { + source: OnceCell::new(), + } + } +} + +impl ObjectImpl for Device {} + +impl GstObjectImpl for Device {} + +impl DeviceImpl for Device { + fn create_element( + &self, + _device: &Self::Type, + name: Option<&str>, + ) -> Result { + let source_info = self.source.get().unwrap(); + let element = glib::Object::with_type( + crate::ndisrc::NdiSrc::static_type(), + &[ + ("name", &name), + ("ndi-name", &source_info.ndi_name()), + ("url-address", &source_info.url_address()), + ], + ) + .unwrap() + .dynamic_cast::() + .unwrap(); + + Ok(element) + } +} + +impl super::Device { + fn new(source: &ndi::Source<'_>) -> super::Device { + let display_name = source.ndi_name(); + let device_class = "Source/Audio/Video/Network"; + + let element_class = + glib::Class::::from_type(crate::ndisrc::NdiSrc::static_type()).unwrap(); + let templ = element_class.pad_template("src").unwrap(); + let caps = templ.caps(); + + // Put the url-address into the extra properties + let extra_properties = gst::Structure::builder("properties") + .field("ndi-name", &source.ndi_name()) + .field("url-address", &source.url_address()) + .build(); + + let device = glib::Object::new::(&[ + ("caps", &caps), + ("display-name", &display_name), + ("device-class", &device_class), + ("properties", &extra_properties), + ]) + .unwrap(); + let device_impl = Device::from_instance(&device); + + device_impl.source.set(source.to_owned()).unwrap(); + + device + } +} diff --git a/net/ndi/src/device_provider/mod.rs b/net/ndi/src/device_provider/mod.rs new file mode 100644 index 00000000..cda26ece --- /dev/null +++ b/net/ndi/src/device_provider/mod.rs @@ -0,0 +1,26 @@ +use glib::prelude::*; + +mod imp; + +glib::wrapper! { + pub struct DeviceProvider(ObjectSubclass) @extends gst::DeviceProvider, gst::Object; +} + +unsafe impl Send for DeviceProvider {} +unsafe impl Sync for DeviceProvider {} + +glib::wrapper! { + pub struct Device(ObjectSubclass) @extends gst::Device, gst::Object; +} + +unsafe impl Send for Device {} +unsafe impl Sync for Device {} + +pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> { + gst::DeviceProvider::register( + Some(plugin), + "ndideviceprovider", + gst::Rank::Primary, + DeviceProvider::static_type(), + ) +} diff --git a/net/ndi/src/lib.rs b/net/ndi/src/lib.rs new file mode 100644 index 00000000..dfdabf29 --- /dev/null +++ b/net/ndi/src/lib.rs @@ -0,0 +1,159 @@ +mod device_provider; +pub mod ndi; +#[cfg(feature = "sink")] +mod ndisink; +#[cfg(feature = "sink")] +mod ndisinkcombiner; +#[cfg(feature = "sink")] +pub mod ndisinkmeta; +mod ndisrc; +mod ndisrcdemux; +pub mod ndisrcmeta; +pub mod ndisys; +pub mod receiver; + +use crate::ndi::*; +use crate::ndisys::*; +use crate::receiver::*; + +use std::time; + +use once_cell::sync::Lazy; + +#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy, glib::Enum)] +#[repr(u32)] +#[enum_type(name = "GstNdiTimestampMode")] +pub enum TimestampMode { + #[enum_value(name = "Receive Time / Timecode", nick = "receive-time-vs-timecode")] + ReceiveTimeTimecode = 0, + #[enum_value(name = "Receive Time / Timestamp", nick = "receive-time-vs-timestamp")] + ReceiveTimeTimestamp = 1, + #[enum_value(name = "NDI Timecode", nick = "timecode")] + Timecode = 2, + #[enum_value(name = "NDI Timestamp", nick = "timestamp")] + Timestamp = 3, + #[enum_value(name = "Receive Time", nick = "receive-time")] + ReceiveTime = 4, +} + +#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy, glib::Enum)] +#[repr(u32)] +#[enum_type(name = "GstNdiRecvColorFormat")] +pub enum RecvColorFormat { + #[enum_value(name = "BGRX or BGRA", nick = "bgrx-bgra")] + BgrxBgra = 0, + #[enum_value(name = "UYVY or BGRA", nick = "uyvy-bgra")] + UyvyBgra = 1, + #[enum_value(name = "RGBX or RGBA", nick = "rgbx-rgba")] + RgbxRgba = 2, + #[enum_value(name = "UYVY or RGBA", nick = "uyvy-rgba")] + UyvyRgba = 3, + #[enum_value(name = "Fastest", nick = "fastest")] + Fastest = 4, + #[enum_value(name = "Best", nick = "best")] + Best = 5, + #[cfg(feature = "advanced-sdk")] + #[enum_value(name = "Compressed v1", nick = "compressed-v1")] + CompressedV1 = 6, + #[cfg(feature = "advanced-sdk")] + #[enum_value(name = "Compressed v2", nick = "compressed-v2")] + CompressedV2 = 7, + #[cfg(feature = "advanced-sdk")] + #[enum_value(name = "Compressed v3", nick = "compressed-v3")] + CompressedV3 = 8, + #[cfg(feature = "advanced-sdk")] + #[enum_value(name = "Compressed v3 with audio", nick = "compressed-v3-with-audio")] + CompressedV3WithAudio = 9, + #[cfg(feature = "advanced-sdk")] + #[enum_value(name = "Compressed v4", nick = "compressed-v4")] + CompressedV4 = 10, + #[cfg(feature = "advanced-sdk")] + #[enum_value(name = "Compressed v4 with audio", nick = "compressed-v4-with-audio")] + CompressedV4WithAudio = 11, + #[cfg(feature = "advanced-sdk")] + #[enum_value(name = "Compressed v5", nick = "compressed-v5")] + CompressedV5 = 12, + #[cfg(feature = "advanced-sdk")] + #[enum_value(name = "Compressed v5 with audio", nick = "compressed-v5-with-audio")] + CompressedV5WithAudio = 13, +} + +impl From for NDIlib_recv_color_format_e { + fn from(v: RecvColorFormat) -> Self { + match v { + RecvColorFormat::BgrxBgra => NDIlib_recv_color_format_BGRX_BGRA, + RecvColorFormat::UyvyBgra => NDIlib_recv_color_format_UYVY_BGRA, + RecvColorFormat::RgbxRgba => NDIlib_recv_color_format_RGBX_RGBA, + RecvColorFormat::UyvyRgba => NDIlib_recv_color_format_UYVY_RGBA, + RecvColorFormat::Fastest => NDIlib_recv_color_format_fastest, + RecvColorFormat::Best => NDIlib_recv_color_format_best, + #[cfg(feature = "advanced-sdk")] + RecvColorFormat::CompressedV1 => NDIlib_recv_color_format_ex_compressed, + #[cfg(feature = "advanced-sdk")] + RecvColorFormat::CompressedV2 => NDIlib_recv_color_format_ex_compressed_v2, + #[cfg(feature = "advanced-sdk")] + RecvColorFormat::CompressedV3 => NDIlib_recv_color_format_ex_compressed_v3, + #[cfg(feature = "advanced-sdk")] + RecvColorFormat::CompressedV3WithAudio => { + NDIlib_recv_color_format_ex_compressed_v3_with_audio + } + #[cfg(feature = "advanced-sdk")] + RecvColorFormat::CompressedV4 => NDIlib_recv_color_format_ex_compressed_v4, + #[cfg(feature = "advanced-sdk")] + RecvColorFormat::CompressedV4WithAudio => { + NDIlib_recv_color_format_ex_compressed_v4_with_audio + } + #[cfg(feature = "advanced-sdk")] + RecvColorFormat::CompressedV5 => NDIlib_recv_color_format_ex_compressed_v5, + #[cfg(feature = "advanced-sdk")] + RecvColorFormat::CompressedV5WithAudio => { + NDIlib_recv_color_format_ex_compressed_v5_with_audio + } + } + } +} + +fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> { + if !ndi::initialize() { + return Err(glib::bool_error!("Cannot initialize NDI")); + } + + device_provider::register(plugin)?; + + ndisrc::register(plugin)?; + ndisrcdemux::register(plugin)?; + + #[cfg(feature = "sink")] + { + ndisinkcombiner::register(plugin)?; + ndisink::register(plugin)?; + } + Ok(()) +} + +static DEFAULT_RECEIVER_NDI_NAME: Lazy = Lazy::new(|| { + format!( + "GStreamer NDI Source {}-{}", + env!("CARGO_PKG_VERSION"), + env!("COMMIT_ID") + ) +}); + +#[cfg(feature = "reference-timestamps")] +static TIMECODE_CAPS: Lazy = + Lazy::new(|| gst::Caps::new_simple("timestamp/x-ndi-timecode", &[])); +#[cfg(feature = "reference-timestamps")] +static TIMESTAMP_CAPS: Lazy = + Lazy::new(|| gst::Caps::new_simple("timestamp/x-ndi-timestamp", &[])); + +gst::plugin_define!( + ndi, + env!("CARGO_PKG_DESCRIPTION"), + plugin_init, + concat!(env!("CARGO_PKG_VERSION"), "-", env!("COMMIT_ID")), + "LGPL", + env!("CARGO_PKG_NAME"), + env!("CARGO_PKG_NAME"), + env!("CARGO_PKG_REPOSITORY"), + env!("BUILD_REL_DATE") +); diff --git a/net/ndi/src/ndi.rs b/net/ndi/src/ndi.rs new file mode 100644 index 00000000..24507b88 --- /dev/null +++ b/net/ndi/src/ndi.rs @@ -0,0 +1,1184 @@ +use crate::ndisys; +use crate::ndisys::*; +use std::ffi; +use std::mem; +use std::ptr; + +use byte_slice_cast::*; + +pub fn initialize() -> bool { + unsafe { NDIlib_initialize() } +} + +#[derive(Debug)] +pub struct FindBuilder<'a> { + show_local_sources: bool, + groups: Option<&'a str>, + extra_ips: Option<&'a str>, +} + +impl<'a> Default for FindBuilder<'a> { + fn default() -> Self { + Self { + show_local_sources: true, + groups: None, + extra_ips: None, + } + } +} + +impl<'a> FindBuilder<'a> { + pub fn show_local_sources(self, show_local_sources: bool) -> Self { + Self { + show_local_sources, + ..self + } + } + + pub fn groups(self, groups: &'a str) -> Self { + Self { + groups: Some(groups), + ..self + } + } + + pub fn extra_ips(self, extra_ips: &'a str) -> Self { + Self { + extra_ips: Some(extra_ips), + ..self + } + } + + pub fn build(self) -> Option { + let groups = self.groups.map(|s| ffi::CString::new(s).unwrap()); + let extra_ips = self.extra_ips.map(|s| ffi::CString::new(s).unwrap()); + + unsafe { + let ptr = NDIlib_find_create_v2(&NDIlib_find_create_t { + show_local_sources: self.show_local_sources, + p_groups: groups.as_ref().map(|s| s.as_ptr()).unwrap_or(ptr::null()), + p_extra_ips: extra_ips + .as_ref() + .map(|s| s.as_ptr()) + .unwrap_or(ptr::null()), + }); + if ptr.is_null() { + None + } else { + Some(FindInstance(ptr::NonNull::new_unchecked(ptr))) + } + } + } +} + +#[derive(Debug)] +pub struct FindInstance(ptr::NonNull<::std::os::raw::c_void>); +unsafe impl Send for FindInstance {} + +impl FindInstance { + pub fn builder<'a>() -> FindBuilder<'a> { + FindBuilder::default() + } + + pub fn wait_for_sources(&mut self, timeout_in_ms: u32) -> bool { + unsafe { NDIlib_find_wait_for_sources(self.0.as_ptr(), timeout_in_ms) } + } + + pub fn get_current_sources(&mut self) -> Vec { + unsafe { + let mut no_sources = mem::MaybeUninit::uninit(); + let sources_ptr = + NDIlib_find_get_current_sources(self.0.as_ptr(), no_sources.as_mut_ptr()); + let no_sources = no_sources.assume_init(); + + if sources_ptr.is_null() || no_sources == 0 { + return vec![]; + } + + let mut sources = vec![]; + for i in 0..no_sources { + sources.push(Source::Borrowed( + ptr::NonNull::new_unchecked(sources_ptr.add(i as usize) as *mut _), + self, + )); + } + + sources + } + } +} + +impl Drop for FindInstance { + fn drop(&mut self) { + unsafe { + NDIlib_find_destroy(self.0.as_mut()); + } + } +} + +#[derive(Debug)] +pub enum Source<'a> { + Borrowed(ptr::NonNull, &'a FindInstance), + Owned(NDIlib_source_t, ffi::CString, ffi::CString), +} + +unsafe impl<'a> Send for Source<'a> {} +unsafe impl<'a> Sync for Source<'a> {} + +impl<'a> Source<'a> { + pub fn ndi_name(&self) -> &str { + unsafe { + let ptr = match *self { + Source::Borrowed(ptr, _) => &*ptr.as_ptr(), + Source::Owned(ref source, _, _) => source, + }; + + assert!(!ptr.p_ndi_name.is_null()); + ffi::CStr::from_ptr(ptr.p_ndi_name).to_str().unwrap() + } + } + + pub fn url_address(&self) -> &str { + unsafe { + let ptr = match *self { + Source::Borrowed(ptr, _) => &*ptr.as_ptr(), + Source::Owned(ref source, _, _) => source, + }; + + assert!(!ptr.p_url_address.is_null()); + ffi::CStr::from_ptr(ptr.p_url_address).to_str().unwrap() + } + } + + pub fn to_owned<'b>(&self) -> Source<'b> { + unsafe { + let (ndi_name, url_address) = match *self { + Source::Borrowed(ptr, _) => (ptr.as_ref().p_ndi_name, ptr.as_ref().p_url_address), + Source::Owned(_, ref ndi_name, ref url_address) => { + (ndi_name.as_ptr(), url_address.as_ptr()) + } + }; + + let ndi_name = ffi::CString::new(ffi::CStr::from_ptr(ndi_name).to_bytes()).unwrap(); + let url_address = + ffi::CString::new(ffi::CStr::from_ptr(url_address).to_bytes()).unwrap(); + + Source::Owned( + NDIlib_source_t { + p_ndi_name: ndi_name.as_ptr(), + p_url_address: url_address.as_ptr(), + }, + ndi_name, + url_address, + ) + } + } +} + +impl<'a> PartialEq for Source<'a> { + fn eq(&self, other: &Source<'a>) -> bool { + self.ndi_name() == other.ndi_name() && self.url_address() == other.url_address() + } +} + +#[derive(Debug)] +pub struct RecvBuilder<'a> { + ndi_name: Option<&'a str>, + url_address: Option<&'a str>, + allow_video_fields: bool, + bandwidth: NDIlib_recv_bandwidth_e, + color_format: NDIlib_recv_color_format_e, + ndi_recv_name: &'a str, +} + +impl<'a> RecvBuilder<'a> { + pub fn allow_video_fields(self, allow_video_fields: bool) -> Self { + Self { + allow_video_fields, + ..self + } + } + + pub fn bandwidth(self, bandwidth: NDIlib_recv_bandwidth_e) -> Self { + Self { bandwidth, ..self } + } + + pub fn color_format(self, color_format: NDIlib_recv_color_format_e) -> Self { + Self { + color_format, + ..self + } + } + + pub fn build(self) -> Option { + unsafe { + let ndi_recv_name = ffi::CString::new(self.ndi_recv_name).unwrap(); + let ndi_name = self + .ndi_name + .as_ref() + .map(|s| ffi::CString::new(*s).unwrap()); + let url_address = self + .url_address + .as_ref() + .map(|s| ffi::CString::new(*s).unwrap()); + let ptr = NDIlib_recv_create_v3(&NDIlib_recv_create_v3_t { + source_to_connect_to: NDIlib_source_t { + p_ndi_name: ndi_name + .as_ref() + .map(|s| s.as_ptr()) + .unwrap_or_else(|| ptr::null_mut()), + p_url_address: url_address + .as_ref() + .map(|s| s.as_ptr()) + .unwrap_or_else(|| ptr::null_mut()), + }, + allow_video_fields: self.allow_video_fields, + bandwidth: self.bandwidth, + color_format: self.color_format, + p_ndi_recv_name: ndi_recv_name.as_ptr(), + }); + + if ptr.is_null() { + None + } else { + Some(RecvInstance(ptr::NonNull::new_unchecked(ptr))) + } + } + } +} + +#[derive(Debug, Clone)] +pub struct RecvInstance(ptr::NonNull<::std::os::raw::c_void>); + +unsafe impl Send for RecvInstance {} + +impl RecvInstance { + pub fn builder<'a>( + ndi_name: Option<&'a str>, + url_address: Option<&'a str>, + ndi_recv_name: &'a str, + ) -> RecvBuilder<'a> { + RecvBuilder { + ndi_name, + url_address, + allow_video_fields: true, + bandwidth: NDIlib_recv_bandwidth_highest, + color_format: NDIlib_recv_color_format_UYVY_BGRA, + ndi_recv_name, + } + } + + pub fn set_tally(&self, tally: &Tally) -> bool { + unsafe { NDIlib_recv_set_tally(self.0.as_ptr(), &tally.0) } + } + + pub fn send_metadata(&self, metadata: &MetadataFrame) -> bool { + unsafe { NDIlib_recv_send_metadata(self.0.as_ptr(), metadata.as_ptr()) } + } + + pub fn get_queue(&self) -> Queue { + unsafe { + let mut queue = mem::MaybeUninit::uninit(); + NDIlib_recv_get_queue(self.0.as_ptr(), queue.as_mut_ptr()); + Queue(queue.assume_init()) + } + } + + pub fn capture(&self, timeout_in_ms: u32) -> Result, ()> { + unsafe { + let ptr = self.0.as_ptr(); + + let mut video_frame = mem::zeroed(); + let mut audio_frame = mem::zeroed(); + let mut metadata_frame = mem::zeroed(); + + let res = NDIlib_recv_capture_v3( + ptr, + &mut video_frame, + &mut audio_frame, + &mut metadata_frame, + timeout_in_ms, + ); + + match res { + NDIlib_frame_type_e::NDIlib_frame_type_audio => Ok(Some(Frame::Audio( + AudioFrame::BorrowedRecv(audio_frame, self), + ))), + NDIlib_frame_type_e::NDIlib_frame_type_video => Ok(Some(Frame::Video( + VideoFrame::BorrowedRecv(video_frame, self), + ))), + NDIlib_frame_type_e::NDIlib_frame_type_metadata => Ok(Some(Frame::Metadata( + MetadataFrame::Borrowed(metadata_frame, self), + ))), + NDIlib_frame_type_e::NDIlib_frame_type_error => Err(()), + _ => Ok(None), + } + } + } +} + +impl Drop for RecvInstance { + fn drop(&mut self) { + unsafe { NDIlib_recv_destroy(self.0.as_ptr() as *mut _) } + } +} + +#[derive(Debug)] +pub struct SendBuilder<'a> { + ndi_name: &'a str, + clock_audio: bool, + clock_video: bool, +} + +impl<'a> SendBuilder<'a> { + pub fn clock_audio(self) -> Self { + Self { + clock_audio: true, + ..self + } + } + + pub fn clock_video(self) -> Self { + Self { + clock_video: true, + ..self + } + } + + pub fn build(self) -> Option { + unsafe { + let ndi_name = ffi::CString::new(self.ndi_name).unwrap(); + let ptr = NDIlib_send_create(&NDIlib_send_create_t { + p_ndi_name: ndi_name.as_ptr(), + clock_video: self.clock_video, + clock_audio: self.clock_audio, + p_groups: ptr::null(), + }); + + if ptr.is_null() { + None + } else { + Some(SendInstance(ptr::NonNull::new_unchecked(ptr))) + } + } + } +} + +#[derive(Debug)] +pub struct SendInstance(ptr::NonNull<::std::os::raw::c_void>); + +unsafe impl Send for SendInstance {} + +impl SendInstance { + pub fn builder(ndi_name: &str) -> SendBuilder { + SendBuilder { + ndi_name, + clock_video: false, + clock_audio: false, + } + } + + pub fn send_video(&mut self, frame: &VideoFrame) { + unsafe { + NDIlib_send_send_video_v2(self.0.as_ptr(), frame.as_ptr()); + } + } + + pub fn send_audio(&mut self, frame: &AudioFrame) { + unsafe { + NDIlib_send_send_audio_v3(self.0.as_ptr(), frame.as_ptr()); + } + } +} + +impl Drop for SendInstance { + fn drop(&mut self) { + unsafe { NDIlib_send_destroy(self.0.as_ptr() as *mut _) } + } +} + +#[derive(Debug)] +pub struct Tally(NDIlib_tally_t); +unsafe impl Send for Tally {} + +impl Default for Tally { + fn default() -> Self { + Self(NDIlib_tally_t { + on_program: true, + on_preview: false, + }) + } +} + +impl Tally { + pub fn new(on_program: bool, on_preview: bool) -> Self { + Self(NDIlib_tally_t { + on_program, + on_preview, + }) + } + + pub fn on_program(&self) -> bool { + self.0.on_program + } + + pub fn on_preview(&self) -> bool { + self.0.on_preview + } +} + +#[derive(Debug)] +pub enum Frame<'a> { + Video(VideoFrame<'a>), + Audio(AudioFrame<'a>), + Metadata(MetadataFrame<'a>), +} + +#[derive(Debug)] +pub enum VideoFrame<'a> { + //Owned(NDIlib_video_frame_v2_t, Option, Option>), + BorrowedRecv(NDIlib_video_frame_v2_t, &'a RecvInstance), + BorrowedGst( + NDIlib_video_frame_v2_t, + &'a gst_video::VideoFrameRef<&'a gst::BufferRef>, + ), +} + +impl<'a> VideoFrame<'a> { + pub fn xres(&self) -> i32 { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + frame.xres + } + } + } + + pub fn yres(&self) -> i32 { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + frame.yres + } + } + } + + pub fn fourcc(&self) -> NDIlib_FourCC_video_type_e { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + frame.FourCC + } + } + } + + pub fn frame_rate(&self) -> (i32, i32) { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + (frame.frame_rate_N, frame.frame_rate_D) + } + } + } + + pub fn picture_aspect_ratio(&self) -> f32 { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + frame.picture_aspect_ratio + } + } + } + + pub fn frame_format_type(&self) -> NDIlib_frame_format_type_e { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + frame.frame_format_type + } + } + } + + pub fn timecode(&self) -> i64 { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + frame.timecode + } + } + } + + pub fn data(&self) -> Option<&[u8]> { + let fourcc = self.fourcc(); + + if [ + NDIlib_FourCC_video_type_UYVY, + NDIlib_FourCC_video_type_UYVA, + NDIlib_FourCC_video_type_P216, + NDIlib_FourCC_video_type_PA16, + NDIlib_FourCC_video_type_YV12, + NDIlib_FourCC_video_type_I420, + NDIlib_FourCC_video_type_NV12, + NDIlib_FourCC_video_type_BGRA, + NDIlib_FourCC_video_type_BGRX, + NDIlib_FourCC_video_type_RGBA, + NDIlib_FourCC_video_type_RGBX, + ] + .contains(&fourcc) + { + // FIXME: Unclear if this is correct. Needs to be validated against an actual + // interlaced stream + let frame_size = if self.frame_format_type() + == NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_0 + || self.frame_format_type() + == NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_1 + { + self.yres() * self.line_stride_or_data_size_in_bytes() / 2 + } else { + self.yres() * self.line_stride_or_data_size_in_bytes() + }; + + return unsafe { + use std::slice; + match self { + VideoFrame::BorrowedRecv(ref frame, _) + | VideoFrame::BorrowedGst(ref frame, _) => Some(slice::from_raw_parts( + frame.p_data as *const u8, + frame_size as usize, + )), + } + }; + } + + #[cfg(feature = "advanced-sdk")] + if [ + NDIlib_FourCC_video_type_ex_SHQ0_highest_bandwidth, + NDIlib_FourCC_video_type_ex_SHQ2_highest_bandwidth, + NDIlib_FourCC_video_type_ex_SHQ7_highest_bandwidth, + NDIlib_FourCC_video_type_ex_SHQ0_lowest_bandwidth, + NDIlib_FourCC_video_type_ex_SHQ2_lowest_bandwidth, + NDIlib_FourCC_video_type_ex_SHQ7_lowest_bandwidth, + ] + .contains(&fourcc) + { + return unsafe { + use std::slice; + match self { + VideoFrame::BorrowedRecv(ref frame, _) + | VideoFrame::BorrowedGst(ref frame, _) => Some(slice::from_raw_parts( + frame.p_data as *const u8, + frame.line_stride_or_data_size_in_bytes as usize, + )), + } + }; + } + + None + } + + #[cfg(feature = "advanced-sdk")] + pub fn compressed_packet(&self) -> Option { + use byteorder::{LittleEndian, ReadBytesExt}; + use std::io::Cursor; + use std::slice; + + unsafe { + let fourcc = self.fourcc(); + + if ![ + NDIlib_FourCC_video_type_ex_H264_highest_bandwidth, + NDIlib_FourCC_video_type_ex_H264_lowest_bandwidth, + NDIlib_FourCC_video_type_ex_HEVC_highest_bandwidth, + NDIlib_FourCC_video_type_ex_HEVC_lowest_bandwidth, + NDIlib_FourCC_video_type_ex_H264_alpha_highest_bandwidth, + NDIlib_FourCC_video_type_ex_H264_alpha_lowest_bandwidth, + NDIlib_FourCC_video_type_ex_HEVC_alpha_highest_bandwidth, + NDIlib_FourCC_video_type_ex_HEVC_alpha_lowest_bandwidth, + ] + .contains(&fourcc) + { + return None; + } + + let data = match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + slice::from_raw_parts( + frame.p_data as *const u8, + frame.line_stride_or_data_size_in_bytes as usize, + ) + } + }; + + let mut cursor = Cursor::new(data); + let version = cursor.read_u32::().ok()?; + if version != ndisys::NDIlib_compressed_packet_version_0 { + return None; + } + + let fourcc = cursor.read_u32::().ok()?; + let pts = cursor.read_i64::().ok()?; + let dts = cursor.read_i64::().ok()?; + let _reserved = cursor.read_u64::().ok()?; + let flags = cursor.read_u32::().ok()?; + let data_size = cursor.read_u32::().ok()?; + let extra_data_size = cursor.read_u32::().ok()?; + + let expected_size = (ndisys::NDIlib_compressed_packet_version_0 as usize) + .checked_add(data_size as usize)? + .checked_add(extra_data_size as usize)?; + if data.len() < expected_size { + return None; + } + + Some(CompressedPacket { + fourcc, + pts, + dts, + key_frame: flags & ndisys::NDIlib_compressed_packet_flags_keyframe != 0, + data: &data[ndisys::NDIlib_compressed_packet_version_0 as usize..] + [..data_size as usize], + extra_data: if extra_data_size > 0 { + Some( + &data[ndisys::NDIlib_compressed_packet_version_0 as usize + + data_size as usize..][..extra_data_size as usize], + ) + } else { + None + }, + }) + } + } + + pub fn line_stride_or_data_size_in_bytes(&self) -> i32 { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + let stride = frame.line_stride_or_data_size_in_bytes; + + if stride != 0 { + return stride; + } + + let xres = frame.xres; + + match frame.FourCC { + ndisys::NDIlib_FourCC_video_type_UYVY + | ndisys::NDIlib_FourCC_video_type_UYVA + | ndisys::NDIlib_FourCC_video_type_YV12 + | ndisys::NDIlib_FourCC_video_type_NV12 + | ndisys::NDIlib_FourCC_video_type_I420 + | ndisys::NDIlib_FourCC_video_type_BGRA + | ndisys::NDIlib_FourCC_video_type_BGRX + | ndisys::NDIlib_FourCC_video_type_RGBA + | ndisys::NDIlib_FourCC_video_type_RGBX => xres, + ndisys::NDIlib_FourCC_video_type_P216 + | ndisys::NDIlib_FourCC_video_type_PA16 => 2 * xres, + _ => 0, + } + } + } + } + + pub fn metadata(&self) -> Option<&str> { + unsafe { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + if frame.p_metadata.is_null() { + None + } else { + Some(ffi::CStr::from_ptr(frame.p_metadata).to_str().unwrap()) + } + } + } + } + } + + pub fn timestamp(&self) -> i64 { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => { + frame.timestamp + } + } + } + + pub fn as_ptr(&self) -> *const NDIlib_video_frame_v2_t { + match self { + VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => frame, + } + } + + pub fn try_from_video_frame( + frame: &'a gst_video::VideoFrameRef<&'a gst::BufferRef>, + timecode: i64, + ) -> Result { + // Planar formats must be in contiguous memory + let format = match frame.format() { + gst_video::VideoFormat::Uyvy => ndisys::NDIlib_FourCC_video_type_UYVY, + gst_video::VideoFormat::I420 => { + if (frame.plane_data(1).unwrap().as_ptr() as usize) + .checked_sub(frame.plane_data(0).unwrap().as_ptr() as usize) + != Some(frame.height() as usize * frame.plane_stride()[0] as usize) + { + return Err(()); + } + + if (frame.plane_data(2).unwrap().as_ptr() as usize) + .checked_sub(frame.plane_data(1).unwrap().as_ptr() as usize) + != Some((frame.height() as usize + 1) / 2 * frame.plane_stride()[1] as usize) + { + return Err(()); + } + + ndisys::NDIlib_FourCC_video_type_I420 + } + gst_video::VideoFormat::Nv12 => { + if (frame.plane_data(1).unwrap().as_ptr() as usize) + .checked_sub(frame.plane_data(0).unwrap().as_ptr() as usize) + != Some(frame.height() as usize * frame.plane_stride()[0] as usize) + { + return Err(()); + } + + ndisys::NDIlib_FourCC_video_type_NV12 + } + gst_video::VideoFormat::Nv21 => { + if (frame.plane_data(1).unwrap().as_ptr() as usize) + .checked_sub(frame.plane_data(0).unwrap().as_ptr() as usize) + != Some(frame.height() as usize * frame.plane_stride()[0] as usize) + { + return Err(()); + } + + ndisys::NDIlib_FourCC_video_type_NV12 + } + gst_video::VideoFormat::Yv12 => { + if (frame.plane_data(1).unwrap().as_ptr() as usize) + .checked_sub(frame.plane_data(0).unwrap().as_ptr() as usize) + != Some(frame.height() as usize * frame.plane_stride()[0] as usize) + { + return Err(()); + } + + if (frame.plane_data(2).unwrap().as_ptr() as usize) + .checked_sub(frame.plane_data(1).unwrap().as_ptr() as usize) + != Some((frame.height() as usize + 1) / 2 * frame.plane_stride()[1] as usize) + { + return Err(()); + } + + ndisys::NDIlib_FourCC_video_type_YV12 + } + gst_video::VideoFormat::Bgra => ndisys::NDIlib_FourCC_video_type_BGRA, + gst_video::VideoFormat::Bgrx => ndisys::NDIlib_FourCC_video_type_BGRX, + gst_video::VideoFormat::Rgba => ndisys::NDIlib_FourCC_video_type_RGBA, + gst_video::VideoFormat::Rgbx => ndisys::NDIlib_FourCC_video_type_RGBX, + _ => return Err(()), + }; + + let frame_format_type = match frame.info().interlace_mode() { + gst_video::VideoInterlaceMode::Progressive => { + NDIlib_frame_format_type_e::NDIlib_frame_format_type_progressive + } + gst_video::VideoInterlaceMode::Interleaved => { + NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved + } + // FIXME: Is this correct? + #[cfg(feature = "interlaced-fields")] + gst_video::VideoInterlaceMode::Alternate + if frame.flags().contains(gst_video::VideoFrameFlags::TFF) => + { + NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_0 + } + #[cfg(feature = "interlaced-fields")] + gst_video::VideoInterlaceMode::Alternate + if !frame.flags().contains(gst_video::VideoFrameFlags::TFF) => + { + NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_1 + } + _ => return Err(()), + }; + + let picture_aspect_ratio = + frame.info().par() * gst::Fraction::new(frame.width() as i32, frame.height() as i32); + let picture_aspect_ratio = + picture_aspect_ratio.numer() as f32 / picture_aspect_ratio.denom() as f32; + + let ndi_frame = NDIlib_video_frame_v2_t { + xres: frame.width() as i32, + yres: frame.height() as i32, + FourCC: format, + frame_rate_N: frame.info().fps().numer(), + frame_rate_D: frame.info().fps().denom(), + picture_aspect_ratio, + frame_format_type, + timecode, + p_data: frame.plane_data(0).unwrap().as_ptr() as *const ::std::os::raw::c_char, + line_stride_or_data_size_in_bytes: frame.plane_stride()[0], + p_metadata: ptr::null(), + timestamp: 0, + }; + + Ok(VideoFrame::BorrowedGst(ndi_frame, frame)) + } +} + +impl<'a> Drop for VideoFrame<'a> { + #[allow(irrefutable_let_patterns)] + fn drop(&mut self) { + if let VideoFrame::BorrowedRecv(ref mut frame, recv) = *self { + unsafe { + NDIlib_recv_free_video_v2(recv.0.as_ptr() as *mut _, frame); + } + } + } +} + +#[derive(Debug)] +pub enum AudioFrame<'a> { + Owned( + NDIlib_audio_frame_v3_t, + Option, + Option>, + ), + BorrowedRecv(NDIlib_audio_frame_v3_t, &'a RecvInstance), +} + +impl<'a> AudioFrame<'a> { + pub fn sample_rate(&self) -> i32 { + match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + frame.sample_rate + } + } + } + + pub fn no_channels(&self) -> i32 { + match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + frame.no_channels + } + } + } + + pub fn no_samples(&self) -> i32 { + match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + frame.no_samples + } + } + } + + pub fn timecode(&self) -> i64 { + match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + frame.timecode + } + } + } + + pub fn fourcc(&self) -> NDIlib_FourCC_audio_type_e { + match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + frame.FourCC + } + } + } + + pub fn data(&self) -> Option<&[u8]> { + unsafe { + use std::slice; + + let fourcc = self.fourcc(); + + if [NDIlib_FourCC_audio_type_FLTp].contains(&fourcc) { + return match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + Some(slice::from_raw_parts( + frame.p_data as *const u8, + (frame.no_channels * frame.channel_stride_or_data_size_in_bytes) + as usize, + )) + } + }; + } + + #[cfg(feature = "advanced-sdk")] + if [NDIlib_FourCC_audio_type_Opus].contains(&fourcc) { + return match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + Some(slice::from_raw_parts( + frame.p_data as *const u8, + frame.channel_stride_or_data_size_in_bytes as usize, + )) + } + }; + } + + None + } + } + + #[cfg(feature = "advanced-sdk")] + pub fn compressed_packet(&self) -> Option { + use byteorder::{LittleEndian, ReadBytesExt}; + use std::io::Cursor; + use std::slice; + + unsafe { + let fourcc = self.fourcc(); + + if ![NDIlib_FourCC_audio_type_AAC].contains(&fourcc) { + return None; + } + + let data = match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + slice::from_raw_parts( + frame.p_data as *const u8, + frame.channel_stride_or_data_size_in_bytes as usize, + ) + } + }; + + let mut cursor = Cursor::new(data); + let version = cursor.read_u32::().ok()?; + if version != ndisys::NDIlib_compressed_packet_version_0 { + return None; + } + + let fourcc = cursor.read_u32::().ok()?; + let pts = cursor.read_i64::().ok()?; + let dts = cursor.read_i64::().ok()?; + let _reserved = cursor.read_u64::().ok()?; + let flags = cursor.read_u32::().ok()?; + let data_size = cursor.read_u32::().ok()?; + let extra_data_size = cursor.read_u32::().ok()?; + + let expected_size = (ndisys::NDIlib_compressed_packet_version_0 as usize) + .checked_add(data_size as usize)? + .checked_add(extra_data_size as usize)?; + if data.len() < expected_size { + return None; + } + + Some(CompressedPacket { + fourcc, + pts, + dts, + key_frame: flags & ndisys::NDIlib_compressed_packet_flags_keyframe != 0, + data: &data[ndisys::NDIlib_compressed_packet_version_0 as usize..] + [..data_size as usize], + extra_data: if extra_data_size > 0 { + Some( + &data[ndisys::NDIlib_compressed_packet_version_0 as usize + + data_size as usize..][..extra_data_size as usize], + ) + } else { + None + }, + }) + } + } + + pub fn channel_stride_or_data_size_in_bytes(&self) -> i32 { + match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + frame.channel_stride_or_data_size_in_bytes + } + } + } + + pub fn metadata(&self) -> Option<&str> { + unsafe { + match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + if frame.p_metadata.is_null() { + None + } else { + Some(ffi::CStr::from_ptr(frame.p_metadata).to_str().unwrap()) + } + } + } + } + } + + pub fn timestamp(&self) -> i64 { + match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => { + frame.timestamp + } + } + } + + pub fn as_ptr(&self) -> *const NDIlib_audio_frame_v3_t { + match self { + AudioFrame::BorrowedRecv(ref frame, _) | AudioFrame::Owned(ref frame, _, _) => frame, + } + } + + pub fn try_from_buffer( + info: &gst_audio::AudioInfo, + buffer: &gst::BufferRef, + timecode: i64, + ) -> Result { + if info.format() != gst_audio::AUDIO_FORMAT_F32 { + return Err(()); + } + + let map = buffer.map_readable().map_err(|_| ())?; + let src_data = map.as_slice_of::().map_err(|_| ())?; + + let no_samples = src_data.len() as i32 / info.channels() as i32; + let channel_stride_or_data_size_in_bytes = no_samples * mem::size_of::() as i32; + let mut dest_data = + Vec::::with_capacity(no_samples as usize * info.channels() as usize); + + assert_eq!(dest_data.capacity(), src_data.len()); + + unsafe { + let dest_ptr = dest_data.as_mut_ptr(); + + for (i, samples) in src_data.chunks_exact(info.channels() as usize).enumerate() { + for (c, sample) in samples.iter().enumerate() { + ptr::write(dest_ptr.add(c * no_samples as usize + i), *sample); + } + } + + dest_data.set_len(no_samples as usize * info.channels() as usize); + } + + let dest = NDIlib_audio_frame_v3_t { + sample_rate: info.rate() as i32, + no_channels: info.channels() as i32, + no_samples, + timecode, + FourCC: NDIlib_FourCC_audio_type_FLTp, + p_data: dest_data.as_mut_ptr(), + channel_stride_or_data_size_in_bytes, + p_metadata: ptr::null(), + timestamp: 0, + }; + + Ok(AudioFrame::Owned(dest, None, Some(dest_data))) + } +} + +impl<'a> Drop for AudioFrame<'a> { + #[allow(irrefutable_let_patterns)] + fn drop(&mut self) { + if let AudioFrame::BorrowedRecv(ref mut frame, recv) = *self { + unsafe { + NDIlib_recv_free_audio_v3(recv.0.as_ptr() as *mut _, frame); + } + } + } +} + +#[cfg(feature = "advanced-sdk")] +pub struct CompressedPacket<'a> { + pub fourcc: ndisys::NDIlib_compressed_FourCC_type_e, + pub pts: i64, + pub dts: i64, + pub key_frame: bool, + pub data: &'a [u8], + pub extra_data: Option<&'a [u8]>, +} + +#[derive(Debug)] +pub enum MetadataFrame<'a> { + Owned(NDIlib_metadata_frame_t, Option), + Borrowed(NDIlib_metadata_frame_t, &'a RecvInstance), +} + +impl<'a> MetadataFrame<'a> { + pub fn new(timecode: i64, data: Option<&str>) -> Self { + let data = data.map(|s| ffi::CString::new(s).unwrap()); + + MetadataFrame::Owned( + NDIlib_metadata_frame_t { + length: data + .as_ref() + .map(|s| s.to_str().unwrap().len()) + .unwrap_or(0) as i32, + timecode, + p_data: data + .as_ref() + .map(|s| s.as_ptr() as *mut _) + .unwrap_or(ptr::null_mut()), + }, + data, + ) + } + + pub fn timecode(&self) -> i64 { + match self { + MetadataFrame::Owned(ref frame, _) => frame.timecode, + MetadataFrame::Borrowed(ref frame, _) => frame.timecode, + } + } + + pub fn metadata(&self) -> Option<&str> { + unsafe { + match self { + MetadataFrame::Owned(_, ref metadata) => { + metadata.as_ref().map(|s| s.to_str().unwrap()) + } + MetadataFrame::Borrowed(ref frame, _) => { + if frame.p_data.is_null() || frame.length == 0 { + None + } else if frame.length != 0 { + use std::slice; + + Some( + ffi::CStr::from_bytes_with_nul_unchecked(slice::from_raw_parts( + frame.p_data as *const u8, + frame.length as usize, + )) + .to_str() + .unwrap(), + ) + } else { + Some(ffi::CStr::from_ptr(frame.p_data).to_str().unwrap()) + } + } + } + } + } + + pub fn as_ptr(&self) -> *const NDIlib_metadata_frame_t { + match self { + MetadataFrame::Owned(ref frame, _) => frame, + MetadataFrame::Borrowed(ref frame, _) => frame, + } + } +} + +impl<'a> Default for MetadataFrame<'a> { + fn default() -> Self { + MetadataFrame::Owned( + NDIlib_metadata_frame_t { + length: 0, + timecode: 0, //NDIlib_send_timecode_synthesize, + p_data: ptr::null(), + }, + None, + ) + } +} + +impl<'a> Drop for MetadataFrame<'a> { + fn drop(&mut self) { + if let MetadataFrame::Borrowed(ref mut frame, recv) = *self { + unsafe { + NDIlib_recv_free_metadata(recv.0.as_ptr() as *mut _, frame); + } + } + } +} + +#[derive(Debug, Clone)] +pub struct Queue(NDIlib_recv_queue_t); + +impl Queue { + pub fn audio_frames(&self) -> i32 { + self.0.audio_frames + } + pub fn video_frames(&self) -> i32 { + self.0.video_frames + } + pub fn metadata_frames(&self) -> i32 { + self.0.metadata_frames + } +} diff --git a/net/ndi/src/ndisink/imp.rs b/net/ndi/src/ndisink/imp.rs new file mode 100644 index 00000000..86c31032 --- /dev/null +++ b/net/ndi/src/ndisink/imp.rs @@ -0,0 +1,363 @@ +use glib::subclass::prelude::*; +use gst::prelude::*; +use gst::subclass::prelude::*; +use gst::{gst_debug, gst_error, gst_info, gst_trace}; +use gst_base::prelude::*; +use gst_base::subclass::prelude::*; + +use std::sync::Mutex; + +use once_cell::sync::Lazy; + +use crate::ndi::SendInstance; + +static DEFAULT_SENDER_NDI_NAME: Lazy = Lazy::new(|| { + format!( + "GStreamer NDI Sink {}-{}", + env!("CARGO_PKG_VERSION"), + env!("COMMIT_ID") + ) +}); + +#[derive(Debug)] +struct Settings { + ndi_name: String, +} + +impl Default for Settings { + fn default() -> Self { + Settings { + ndi_name: DEFAULT_SENDER_NDI_NAME.clone(), + } + } +} + +struct State { + send: SendInstance, + video_info: Option, + audio_info: Option, +} + +pub struct NdiSink { + settings: Mutex, + state: Mutex>, +} + +static CAT: Lazy = Lazy::new(|| { + gst::DebugCategory::new("ndisink", gst::DebugColorFlags::empty(), Some("NDI Sink")) +}); + +#[glib::object_subclass] +impl ObjectSubclass for NdiSink { + const NAME: &'static str = "NdiSink"; + type Type = super::NdiSink; + type ParentType = gst_base::BaseSink; + + fn new() -> Self { + Self { + settings: Mutex::new(Default::default()), + state: Mutex::new(Default::default()), + } + } +} + +impl ObjectImpl for NdiSink { + fn properties() -> &'static [glib::ParamSpec] { + static PROPERTIES: Lazy> = Lazy::new(|| { + vec![glib::ParamSpecString::new( + "ndi-name", + "NDI Name", + "NDI Name to use", + Some(DEFAULT_SENDER_NDI_NAME.as_ref()), + glib::ParamFlags::READWRITE, + )] + }); + + PROPERTIES.as_ref() + } + + fn set_property( + &self, + _obj: &Self::Type, + _id: usize, + value: &glib::Value, + pspec: &glib::ParamSpec, + ) { + match pspec.name() { + "ndi-name" => { + let mut settings = self.settings.lock().unwrap(); + settings.ndi_name = value + .get::() + .unwrap_or_else(|_| DEFAULT_SENDER_NDI_NAME.clone()); + } + _ => unimplemented!(), + }; + } + + fn property(&self, _obj: &Self::Type, _id: usize, pspec: &glib::ParamSpec) -> glib::Value { + match pspec.name() { + "ndi-name" => { + let settings = self.settings.lock().unwrap(); + settings.ndi_name.to_value() + } + _ => unimplemented!(), + } + } +} + +impl GstObjectImpl for NdiSink {} + +impl ElementImpl for NdiSink { + fn metadata() -> Option<&'static gst::subclass::ElementMetadata> { + static ELEMENT_METADATA: Lazy = Lazy::new(|| { + gst::subclass::ElementMetadata::new( + "NDI Sink", + "Sink/Audio/Video", + "Render as an NDI stream", + "Sebastian Dröge ", + ) + }); + + Some(&*ELEMENT_METADATA) + } + + fn pad_templates() -> &'static [gst::PadTemplate] { + static PAD_TEMPLATES: Lazy> = Lazy::new(|| { + let caps = gst::Caps::builder_full() + .structure( + gst::Structure::builder("video/x-raw") + .field( + "format", + &gst::List::new(&[ + &gst_video::VideoFormat::Uyvy.to_str(), + &gst_video::VideoFormat::I420.to_str(), + &gst_video::VideoFormat::Nv12.to_str(), + &gst_video::VideoFormat::Nv21.to_str(), + &gst_video::VideoFormat::Yv12.to_str(), + &gst_video::VideoFormat::Bgra.to_str(), + &gst_video::VideoFormat::Bgrx.to_str(), + &gst_video::VideoFormat::Rgba.to_str(), + &gst_video::VideoFormat::Rgbx.to_str(), + ]), + ) + .field("width", &gst::IntRange::::new(1, std::i32::MAX)) + .field("height", &gst::IntRange::::new(1, std::i32::MAX)) + .field( + "framerate", + &gst::FractionRange::new( + gst::Fraction::new(0, 1), + gst::Fraction::new(std::i32::MAX, 1), + ), + ) + .build(), + ) + .structure( + gst::Structure::builder("audio/x-raw") + .field("format", &gst_audio::AUDIO_FORMAT_F32.to_str()) + .field("rate", &gst::IntRange::::new(1, i32::MAX)) + .field("channels", &gst::IntRange::::new(1, i32::MAX)) + .field("layout", &"interleaved") + .build(), + ) + .build(); + + let sink_pad_template = gst::PadTemplate::new( + "sink", + gst::PadDirection::Sink, + gst::PadPresence::Always, + &caps, + ) + .unwrap(); + vec![sink_pad_template] + }); + + PAD_TEMPLATES.as_ref() + } +} + +impl BaseSinkImpl for NdiSink { + fn start(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { + let mut state_storage = self.state.lock().unwrap(); + let settings = self.settings.lock().unwrap(); + + let send = SendInstance::builder(&settings.ndi_name) + .build() + .ok_or_else(|| { + gst::error_msg!( + gst::ResourceError::OpenWrite, + ["Could not create send instance"] + ) + })?; + + let state = State { + send, + video_info: None, + audio_info: None, + }; + *state_storage = Some(state); + gst_info!(CAT, obj: element, "Started"); + + Ok(()) + } + + fn stop(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { + let mut state_storage = self.state.lock().unwrap(); + + *state_storage = None; + gst_info!(CAT, obj: element, "Stopped"); + + Ok(()) + } + + fn unlock(&self, _element: &Self::Type) -> Result<(), gst::ErrorMessage> { + Ok(()) + } + + fn unlock_stop(&self, _element: &Self::Type) -> Result<(), gst::ErrorMessage> { + Ok(()) + } + + fn set_caps(&self, element: &Self::Type, caps: &gst::Caps) -> Result<(), gst::LoggableError> { + gst_debug!(CAT, obj: element, "Setting caps {}", caps); + + let mut state_storage = self.state.lock().unwrap(); + let state = match &mut *state_storage { + None => return Err(gst::loggable_error!(CAT, "Sink not started yet")), + Some(ref mut state) => state, + }; + + let s = caps.structure(0).unwrap(); + if s.name() == "video/x-raw" { + let info = gst_video::VideoInfo::from_caps(caps) + .map_err(|_| gst::loggable_error!(CAT, "Couldn't parse caps {}", caps))?; + + state.video_info = Some(info); + state.audio_info = None; + } else { + let info = gst_audio::AudioInfo::from_caps(caps) + .map_err(|_| gst::loggable_error!(CAT, "Couldn't parse caps {}", caps))?; + + state.audio_info = Some(info); + state.video_info = None; + } + + Ok(()) + } + + fn render( + &self, + element: &Self::Type, + buffer: &gst::Buffer, + ) -> Result { + let mut state_storage = self.state.lock().unwrap(); + let state = match &mut *state_storage { + None => return Err(gst::FlowError::Error), + Some(ref mut state) => state, + }; + + if let Some(ref info) = state.video_info { + if let Some(audio_meta) = buffer.meta::() { + for (buffer, info, timecode) in audio_meta.buffers() { + let frame = crate::ndi::AudioFrame::try_from_buffer(info, buffer, *timecode) + .map_err(|_| { + gst_error!(CAT, obj: element, "Unsupported audio frame"); + gst::FlowError::NotNegotiated + })?; + + gst_trace!( + CAT, + obj: element, + "Sending audio buffer {:?} with timecode {} and format {:?}", + buffer, + if *timecode < 0 { + gst::ClockTime::NONE.display() + } else { + Some(gst::ClockTime::from_nseconds(*timecode as u64 * 100)).display() + }, + info, + ); + state.send.send_audio(&frame); + } + } + + // Skip empty/gap buffers from ndisinkcombiner + if buffer.size() != 0 { + let timecode = element + .segment() + .downcast::() + .ok() + .and_then(|segment| { + segment + .to_running_time(buffer.pts()) + .zip(element.base_time()) + }) + .and_then(|(running_time, base_time)| running_time.checked_add(base_time)) + .map(|time| (time.nseconds() / 100) as i64) + .unwrap_or(crate::ndisys::NDIlib_send_timecode_synthesize); + + let frame = gst_video::VideoFrameRef::from_buffer_ref_readable(buffer, info) + .map_err(|_| { + gst_error!(CAT, obj: element, "Failed to map buffer"); + gst::FlowError::Error + })?; + + let frame = crate::ndi::VideoFrame::try_from_video_frame(&frame, timecode) + .map_err(|_| { + gst_error!(CAT, obj: element, "Unsupported video frame"); + gst::FlowError::NotNegotiated + })?; + + gst_trace!( + CAT, + obj: element, + "Sending video buffer {:?} with timecode {} and format {:?}", + buffer, + if timecode < 0 { + gst::ClockTime::NONE.display() + } else { + Some(gst::ClockTime::from_nseconds(timecode as u64 * 100)).display() + }, + info + ); + state.send.send_video(&frame); + } + } else if let Some(ref info) = state.audio_info { + let timecode = element + .segment() + .downcast::() + .ok() + .and_then(|segment| { + segment + .to_running_time(buffer.pts()) + .zip(element.base_time()) + }) + .and_then(|(running_time, base_time)| running_time.checked_add(base_time)) + .map(|time| (time.nseconds() / 100) as i64) + .unwrap_or(crate::ndisys::NDIlib_send_timecode_synthesize); + + let frame = + crate::ndi::AudioFrame::try_from_buffer(info, buffer, timecode).map_err(|_| { + gst_error!(CAT, obj: element, "Unsupported audio frame"); + gst::FlowError::NotNegotiated + })?; + + gst_trace!( + CAT, + obj: element, + "Sending audio buffer {:?} with timecode {} and format {:?}", + buffer, + if timecode < 0 { + gst::ClockTime::NONE.display() + } else { + Some(gst::ClockTime::from_nseconds(timecode as u64 * 100)).display() + }, + info, + ); + state.send.send_audio(&frame); + } else { + return Err(gst::FlowError::Error); + } + + Ok(gst::FlowSuccess::Ok) + } +} diff --git a/net/ndi/src/ndisink/mod.rs b/net/ndi/src/ndisink/mod.rs new file mode 100644 index 00000000..8d6d955a --- /dev/null +++ b/net/ndi/src/ndisink/mod.rs @@ -0,0 +1,19 @@ +use glib::prelude::*; + +mod imp; + +glib::wrapper! { + pub struct NdiSink(ObjectSubclass) @extends gst_base::BaseSink, gst::Element, gst::Object; +} + +unsafe impl Send for NdiSink {} +unsafe impl Sync for NdiSink {} + +pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> { + gst::Element::register( + Some(plugin), + "ndisink", + gst::Rank::None, + NdiSink::static_type(), + ) +} diff --git a/net/ndi/src/ndisinkcombiner/imp.rs b/net/ndi/src/ndisinkcombiner/imp.rs new file mode 100644 index 00000000..20fd8816 --- /dev/null +++ b/net/ndi/src/ndisinkcombiner/imp.rs @@ -0,0 +1,634 @@ +use glib::prelude::*; +use glib::subclass::prelude::*; +use gst::prelude::*; +use gst::subclass::prelude::*; +use gst::{gst_debug, gst_error, gst_trace, gst_warning}; +use gst_base::prelude::*; +use gst_base::subclass::prelude::*; + +use once_cell::sync::Lazy; + +use std::mem; +use std::sync::Mutex; + +static CAT: once_cell::sync::Lazy = once_cell::sync::Lazy::new(|| { + gst::DebugCategory::new( + "ndisinkcombiner", + gst::DebugColorFlags::empty(), + Some("NDI sink audio/video combiner"), + ) +}); + +struct State { + // Note that this applies to the currently pending buffer on the pad and *not* + // to the current_video_buffer below! + video_info: Option, + audio_info: Option, + current_video_buffer: Option<(gst::Buffer, gst::ClockTime)>, + current_audio_buffers: Vec<(gst::Buffer, gst_audio::AudioInfo, i64)>, +} + +pub struct NdiSinkCombiner { + video_pad: gst_base::AggregatorPad, + audio_pad: Mutex>, + state: Mutex>, +} + +#[glib::object_subclass] +impl ObjectSubclass for NdiSinkCombiner { + const NAME: &'static str = "NdiSinkCombiner"; + type Type = super::NdiSinkCombiner; + type ParentType = gst_base::Aggregator; + + fn with_class(klass: &Self::Class) -> Self { + let templ = klass.pad_template("video").unwrap(); + let video_pad = + gst::PadBuilder::::from_template(&templ, Some("video")) + .build(); + + Self { + video_pad, + audio_pad: Mutex::new(None), + state: Mutex::new(None), + } + } +} + +impl ObjectImpl for NdiSinkCombiner { + fn constructed(&self, obj: &Self::Type) { + obj.add_pad(&self.video_pad).unwrap(); + + self.parent_constructed(obj); + } +} + +impl GstObjectImpl for NdiSinkCombiner {} + +impl ElementImpl for NdiSinkCombiner { + fn metadata() -> Option<&'static gst::subclass::ElementMetadata> { + static ELEMENT_METADATA: Lazy = Lazy::new(|| { + gst::subclass::ElementMetadata::new( + "NDI Sink Combiner", + "Combiner/Audio/Video", + "NDI sink audio/video combiner", + "Sebastian Dröge ", + ) + }); + + Some(&*ELEMENT_METADATA) + } + + fn pad_templates() -> &'static [gst::PadTemplate] { + static PAD_TEMPLATES: Lazy> = Lazy::new(|| { + let caps = gst::Caps::builder("video/x-raw") + .field( + "format", + &gst::List::new(&[ + &gst_video::VideoFormat::Uyvy.to_str(), + &gst_video::VideoFormat::I420.to_str(), + &gst_video::VideoFormat::Nv12.to_str(), + &gst_video::VideoFormat::Nv21.to_str(), + &gst_video::VideoFormat::Yv12.to_str(), + &gst_video::VideoFormat::Bgra.to_str(), + &gst_video::VideoFormat::Bgrx.to_str(), + &gst_video::VideoFormat::Rgba.to_str(), + &gst_video::VideoFormat::Rgbx.to_str(), + ]), + ) + .field("width", &gst::IntRange::::new(1, i32::MAX)) + .field("height", &gst::IntRange::::new(1, i32::MAX)) + .field( + "framerate", + &gst::FractionRange::new( + gst::Fraction::new(1, i32::MAX), + gst::Fraction::new(i32::MAX, 1), + ), + ) + .build(); + let src_pad_template = gst::PadTemplate::with_gtype( + "src", + gst::PadDirection::Src, + gst::PadPresence::Always, + &caps, + gst_base::AggregatorPad::static_type(), + ) + .unwrap(); + + let video_sink_pad_template = gst::PadTemplate::with_gtype( + "video", + gst::PadDirection::Sink, + gst::PadPresence::Always, + &caps, + gst_base::AggregatorPad::static_type(), + ) + .unwrap(); + + let caps = gst::Caps::builder("audio/x-raw") + .field("format", &gst_audio::AUDIO_FORMAT_F32.to_str()) + .field("rate", &gst::IntRange::::new(1, i32::MAX)) + .field("channels", &gst::IntRange::::new(1, i32::MAX)) + .field("layout", &"interleaved") + .build(); + let audio_sink_pad_template = gst::PadTemplate::with_gtype( + "audio", + gst::PadDirection::Sink, + gst::PadPresence::Request, + &caps, + gst_base::AggregatorPad::static_type(), + ) + .unwrap(); + vec![ + src_pad_template, + video_sink_pad_template, + audio_sink_pad_template, + ] + }); + + PAD_TEMPLATES.as_ref() + } + + fn release_pad(&self, element: &Self::Type, pad: &gst::Pad) { + let mut audio_pad_storage = self.audio_pad.lock().unwrap(); + + if audio_pad_storage.as_ref().map(|p| p.upcast_ref()) == Some(pad) { + gst_debug!(CAT, obj: element, "Release audio pad"); + self.parent_release_pad(element, pad); + *audio_pad_storage = None; + } + } +} + +impl AggregatorImpl for NdiSinkCombiner { + fn create_new_pad( + &self, + agg: &Self::Type, + templ: &gst::PadTemplate, + _req_name: Option<&str>, + _caps: Option<&gst::Caps>, + ) -> Option { + let mut audio_pad_storage = self.audio_pad.lock().unwrap(); + + if audio_pad_storage.is_some() { + gst_error!(CAT, obj: agg, "Audio pad already requested"); + return None; + } + + let sink_templ = agg.pad_template("audio").unwrap(); + if templ != &sink_templ { + gst_error!(CAT, obj: agg, "Wrong pad template"); + return None; + } + + let pad = + gst::PadBuilder::::from_template(templ, Some("audio")).build(); + *audio_pad_storage = Some(pad.clone()); + + gst_debug!(CAT, obj: agg, "Requested audio pad"); + + Some(pad) + } + + fn start(&self, agg: &Self::Type) -> Result<(), gst::ErrorMessage> { + let mut state_storage = self.state.lock().unwrap(); + *state_storage = Some(State { + audio_info: None, + video_info: None, + current_video_buffer: None, + current_audio_buffers: Vec::new(), + }); + + gst_debug!(CAT, obj: agg, "Started"); + + Ok(()) + } + + fn stop(&self, agg: &Self::Type) -> Result<(), gst::ErrorMessage> { + // Drop our state now + let _ = self.state.lock().unwrap().take(); + + gst_debug!(CAT, obj: agg, "Stopped"); + + Ok(()) + } + + fn next_time(&self, _agg: &Self::Type) -> Option { + // FIXME: What to do here? We don't really know when the next buffer is expected + gst::ClockTime::NONE + } + + fn clip( + &self, + agg: &Self::Type, + agg_pad: &gst_base::AggregatorPad, + mut buffer: gst::Buffer, + ) -> Option { + let segment = match agg_pad.segment().downcast::() { + Ok(segment) => segment, + Err(_) => { + gst_error!(CAT, obj: agg, "Only TIME segments supported"); + return Some(buffer); + } + }; + + let pts = buffer.pts(); + if pts.is_none() { + gst_error!(CAT, obj: agg, "Only buffers with PTS supported"); + return Some(buffer); + } + + let duration = buffer.duration(); + + gst_trace!( + CAT, + obj: agg_pad, + "Clipping buffer {:?} with PTS {} and duration {}", + buffer, + pts.display(), + duration.display(), + ); + + let state_storage = self.state.lock().unwrap(); + let state = match &*state_storage { + Some(ref state) => state, + None => return None, + }; + + let duration = if duration.is_some() { + duration + } else if let Some(ref audio_info) = state.audio_info { + gst::ClockTime::SECOND.mul_div_floor( + buffer.size() as u64, + audio_info.rate() as u64 * audio_info.bpf() as u64, + ) + } else if let Some(ref video_info) = state.video_info { + if video_info.fps().numer() > 0 { + gst::ClockTime::SECOND.mul_div_floor( + video_info.fps().denom() as u64, + video_info.fps().numer() as u64, + ) + } else { + gst::ClockTime::NONE + } + } else { + unreachable!() + }; + + gst_debug!( + CAT, + obj: agg_pad, + "Clipping buffer {:?} with PTS {} and duration {}", + buffer, + pts.display(), + duration.display(), + ); + + if agg_pad == &self.video_pad { + let end_pts = pts + .zip(duration) + .and_then(|(pts, duration)| pts.checked_add(duration)); + + segment.clip(pts, end_pts).map(|(start, stop)| { + { + let buffer = buffer.make_mut(); + buffer.set_pts(start); + buffer.set_duration( + stop.zip(start) + .and_then(|(stop, start)| stop.checked_sub(start)), + ); + } + + buffer + }) + } else if let Some(ref audio_info) = state.audio_info { + gst_audio::audio_buffer_clip( + buffer, + segment.upcast_ref(), + audio_info.rate(), + audio_info.bpf(), + ) + } else { + // Can't really have audio buffers without caps + unreachable!(); + } + } + + fn aggregate( + &self, + agg: &Self::Type, + timeout: bool, + ) -> Result { + // FIXME: Can't really happen because we always return NONE from get_next_time() but that + // should be improved! + assert!(!timeout); + + // Because peek_buffer() can call into clip() and that would take the state lock again, + // first try getting buffers from both pads here + let video_buffer_and_segment = match self.video_pad.peek_buffer() { + Some(video_buffer) => { + let video_segment = self.video_pad.segment(); + let video_segment = match video_segment.downcast::() { + Ok(video_segment) => video_segment, + Err(video_segment) => { + gst_error!( + CAT, + obj: agg, + "Video segment of wrong format {:?}", + video_segment.format() + ); + return Err(gst::FlowError::Error); + } + }; + + Some((video_buffer, video_segment)) + } + None if !self.video_pad.is_eos() => { + gst_trace!(CAT, obj: agg, "Waiting for video buffer"); + return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA); + } + None => None, + }; + + let audio_buffer_segment_and_pad; + if let Some(audio_pad) = self.audio_pad.lock().unwrap().clone() { + audio_buffer_segment_and_pad = match audio_pad.peek_buffer() { + Some(audio_buffer) if audio_buffer.size() == 0 => { + // Skip empty/gap audio buffer + audio_pad.drop_buffer(); + gst_trace!(CAT, obj: agg, "Empty audio buffer, waiting for next"); + return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA); + } + Some(audio_buffer) => { + let audio_segment = audio_pad.segment(); + let audio_segment = match audio_segment.downcast::() { + Ok(audio_segment) => audio_segment, + Err(audio_segment) => { + gst_error!( + CAT, + obj: agg, + "Audio segment of wrong format {:?}", + audio_segment.format() + ); + return Err(gst::FlowError::Error); + } + }; + + Some((audio_buffer, audio_segment, audio_pad)) + } + None if !audio_pad.is_eos() => { + gst_trace!(CAT, obj: agg, "Waiting for audio buffer"); + return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA); + } + None => None, + }; + } else { + audio_buffer_segment_and_pad = None; + } + + let mut state_storage = self.state.lock().unwrap(); + let state = match &mut *state_storage { + Some(ref mut state) => state, + None => return Err(gst::FlowError::Flushing), + }; + + let (mut current_video_buffer, current_video_running_time_end, next_video_buffer) = + if let Some((video_buffer, video_segment)) = video_buffer_and_segment { + let video_running_time = video_segment.to_running_time(video_buffer.pts()).unwrap(); + + match state.current_video_buffer { + None => { + gst_trace!(CAT, obj: agg, "First video buffer, waiting for second"); + state.current_video_buffer = Some((video_buffer, video_running_time)); + drop(state_storage); + self.video_pad.drop_buffer(); + return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA); + } + Some((ref buffer, _)) => ( + buffer.clone(), + Some(video_running_time), + Some((video_buffer, video_running_time)), + ), + } + } else { + match (&state.current_video_buffer, &audio_buffer_segment_and_pad) { + (None, None) => { + gst_trace!( + CAT, + obj: agg, + "All pads are EOS and no buffers are queued, finishing" + ); + return Err(gst::FlowError::Eos); + } + (None, Some((ref audio_buffer, ref audio_segment, _))) => { + // Create an empty dummy buffer for attaching the audio. This is going to + // be dropped by the sink later. + let audio_running_time = + audio_segment.to_running_time(audio_buffer.pts()).unwrap(); + + let video_segment = self.video_pad.segment(); + let video_segment = match video_segment.downcast::() { + Ok(video_segment) => video_segment, + Err(video_segment) => { + gst_error!( + CAT, + obj: agg, + "Video segment of wrong format {:?}", + video_segment.format() + ); + return Err(gst::FlowError::Error); + } + }; + let video_pts = + video_segment.position_from_running_time(audio_running_time); + if video_pts.is_none() { + gst_warning!(CAT, obj: agg, "Can't output more audio after video EOS"); + return Err(gst::FlowError::Eos); + } + + let mut buffer = gst::Buffer::new(); + { + let buffer = buffer.get_mut().unwrap(); + buffer.set_pts(video_pts); + } + + (buffer, gst::ClockTime::NONE, None) + } + (Some((ref buffer, _)), _) => (buffer.clone(), gst::ClockTime::NONE, None), + } + }; + + if let Some((audio_buffer, audio_segment, audio_pad)) = audio_buffer_segment_and_pad { + let audio_info = match state.audio_info { + Some(ref audio_info) => audio_info, + None => { + gst_error!(CAT, obj: agg, "Have no audio caps"); + return Err(gst::FlowError::NotNegotiated); + } + }; + + let audio_running_time = audio_segment.to_running_time(audio_buffer.pts()); + let duration = gst::ClockTime::SECOND.mul_div_floor( + audio_buffer.size() as u64 / audio_info.bpf() as u64, + audio_info.rate() as u64, + ); + let audio_running_time_end = audio_running_time + .zip(duration) + .and_then(|(running_time, duration)| running_time.checked_add(duration)); + + if audio_running_time_end + .zip(current_video_running_time_end) + .map(|(audio, video)| audio <= video) + .unwrap_or(true) + { + let timecode = agg + .base_time() + .zip(audio_running_time) + .map(|(base_time, audio_running_time)| { + ((base_time.nseconds() + audio_running_time.nseconds()) / 100) as i64 + }) + .unwrap_or(crate::ndisys::NDIlib_send_timecode_synthesize); + + gst_trace!( + CAT, + obj: agg, + "Including audio buffer {:?} with timecode {}: {} <= {}", + audio_buffer, + timecode, + audio_running_time_end.display(), + current_video_running_time_end.display(), + ); + state + .current_audio_buffers + .push((audio_buffer, audio_info.clone(), timecode)); + audio_pad.drop_buffer(); + + // If there is still video data, wait for the next audio buffer or EOS, + // otherwise just output the dummy video buffer directly. + if current_video_running_time_end.is_some() { + return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA); + } + } + + // Otherwise finish this video buffer with all audio that has accumulated so + // far + } + + let audio_buffers = mem::take(&mut state.current_audio_buffers); + + if !audio_buffers.is_empty() { + let current_video_buffer = current_video_buffer.make_mut(); + crate::ndisinkmeta::NdiSinkAudioMeta::add(current_video_buffer, audio_buffers); + } + + if let Some((video_buffer, video_running_time)) = next_video_buffer { + state.current_video_buffer = Some((video_buffer, video_running_time)); + drop(state_storage); + self.video_pad.drop_buffer(); + } else { + state.current_video_buffer = None; + drop(state_storage); + } + + gst_trace!( + CAT, + obj: agg, + "Finishing video buffer {:?}", + current_video_buffer + ); + agg.finish_buffer(current_video_buffer) + } + + fn sink_event( + &self, + agg: &Self::Type, + pad: &gst_base::AggregatorPad, + event: gst::Event, + ) -> bool { + use gst::EventView; + + match event.view() { + EventView::Caps(caps) => { + let caps = caps.caps_owned(); + + let mut state_storage = self.state.lock().unwrap(); + let state = match &mut *state_storage { + Some(ref mut state) => state, + None => return false, + }; + + if pad == &self.video_pad { + let info = match gst_video::VideoInfo::from_caps(&caps) { + Ok(info) => info, + Err(_) => { + gst_error!(CAT, obj: pad, "Failed to parse caps {:?}", caps); + return false; + } + }; + + // 2 frames latency because we queue 1 frame and wait until audio + // up to the end of that frame has arrived. + let latency = if info.fps().numer() > 0 { + gst::ClockTime::SECOND + .mul_div_floor(2 * info.fps().denom() as u64, info.fps().numer() as u64) + .unwrap_or(80 * gst::ClockTime::MSECOND) + } else { + // let's assume 25fps and 2 frames latency + 80 * gst::ClockTime::MSECOND + }; + + state.video_info = Some(info); + + drop(state_storage); + + agg.set_latency(latency, gst::ClockTime::NONE); + + // The video caps are passed through as the audio is included only in a meta + agg.set_src_caps(&caps); + } else { + let info = match gst_audio::AudioInfo::from_caps(&caps) { + Ok(info) => info, + Err(_) => { + gst_error!(CAT, obj: pad, "Failed to parse caps {:?}", caps); + return false; + } + }; + + state.audio_info = Some(info); + } + } + // The video segment is passed through as-is and the video timestamps are preserved + EventView::Segment(segment) if pad == &self.video_pad => { + let segment = segment.segment(); + gst_debug!(CAT, obj: agg, "Updating segment {:?}", segment); + agg.update_segment(segment); + } + _ => (), + } + + self.parent_sink_event(agg, pad, event) + } + + fn sink_query( + &self, + agg: &Self::Type, + pad: &gst_base::AggregatorPad, + query: &mut gst::QueryRef, + ) -> bool { + use gst::QueryView; + + match query.view_mut() { + QueryView::Caps(_) if pad == &self.video_pad => { + // Directly forward caps queries + let srcpad = agg.static_pad("src").unwrap(); + return srcpad.peer_query(query); + } + _ => (), + } + + self.parent_sink_query(agg, pad, query) + } + + fn negotiate(&self, _agg: &Self::Type) -> bool { + // No negotiation needed as the video caps are just passed through + true + } +} diff --git a/net/ndi/src/ndisinkcombiner/mod.rs b/net/ndi/src/ndisinkcombiner/mod.rs new file mode 100644 index 00000000..b86c4cab --- /dev/null +++ b/net/ndi/src/ndisinkcombiner/mod.rs @@ -0,0 +1,19 @@ +use glib::prelude::*; + +mod imp; + +glib::wrapper! { + pub struct NdiSinkCombiner(ObjectSubclass) @extends gst_base::Aggregator, gst::Element, gst::Object; +} + +unsafe impl Send for NdiSinkCombiner {} +unsafe impl Sync for NdiSinkCombiner {} + +pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> { + gst::Element::register( + Some(plugin), + "ndisinkcombiner", + gst::Rank::None, + NdiSinkCombiner::static_type(), + ) +} diff --git a/net/ndi/src/ndisinkmeta.rs b/net/ndi/src/ndisinkmeta.rs new file mode 100644 index 00000000..14aee926 --- /dev/null +++ b/net/ndi/src/ndisinkmeta.rs @@ -0,0 +1,142 @@ +use gst::prelude::*; +use std::fmt; +use std::mem; + +#[repr(transparent)] +pub struct NdiSinkAudioMeta(imp::NdiSinkAudioMeta); + +unsafe impl Send for NdiSinkAudioMeta {} +unsafe impl Sync for NdiSinkAudioMeta {} + +impl NdiSinkAudioMeta { + pub fn add( + buffer: &mut gst::BufferRef, + buffers: Vec<(gst::Buffer, gst_audio::AudioInfo, i64)>, + ) -> gst::MetaRefMut { + unsafe { + // Manually dropping because gst_buffer_add_meta() takes ownership of the + // content of the struct + let mut params = mem::ManuallyDrop::new(imp::NdiSinkAudioMetaParams { buffers }); + + let meta = gst::ffi::gst_buffer_add_meta( + buffer.as_mut_ptr(), + imp::ndi_sink_audio_meta_get_info(), + &mut *params as *mut imp::NdiSinkAudioMetaParams as glib::ffi::gpointer, + ) as *mut imp::NdiSinkAudioMeta; + + Self::from_mut_ptr(buffer, meta) + } + } + + pub fn buffers(&self) -> &[(gst::Buffer, gst_audio::AudioInfo, i64)] { + &self.0.buffers + } +} + +unsafe impl MetaAPI for NdiSinkAudioMeta { + type GstType = imp::NdiSinkAudioMeta; + + fn meta_api() -> glib::Type { + imp::ndi_sink_audio_meta_api_get_type() + } +} + +impl fmt::Debug for NdiSinkAudioMeta { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("NdiSinkAudioMeta") + .field("buffers", &self.buffers()) + .finish() + } +} + +mod imp { + use glib::translate::*; + use once_cell::sync::Lazy; + use std::mem; + use std::ptr; + + pub(super) struct NdiSinkAudioMetaParams { + pub buffers: Vec<(gst::Buffer, gst_audio::AudioInfo, i64)>, + } + + #[repr(C)] + pub struct NdiSinkAudioMeta { + parent: gst::ffi::GstMeta, + pub(super) buffers: Vec<(gst::Buffer, gst_audio::AudioInfo, i64)>, + } + + pub(super) fn ndi_sink_audio_meta_api_get_type() -> glib::Type { + static TYPE: Lazy = Lazy::new(|| unsafe { + let t = from_glib(gst::ffi::gst_meta_api_type_register( + b"GstNdiSinkAudioMetaAPI\0".as_ptr() as *const _, + [ptr::null::()].as_ptr() as *mut *const _, + )); + + assert_ne!(t, glib::Type::INVALID); + + t + }); + + *TYPE + } + + unsafe extern "C" fn ndi_sink_audio_meta_init( + meta: *mut gst::ffi::GstMeta, + params: glib::ffi::gpointer, + _buffer: *mut gst::ffi::GstBuffer, + ) -> glib::ffi::gboolean { + assert!(!params.is_null()); + + let meta = &mut *(meta as *mut NdiSinkAudioMeta); + let params = ptr::read(params as *const NdiSinkAudioMetaParams); + + ptr::write(&mut meta.buffers, params.buffers); + + true.into_glib() + } + + unsafe extern "C" fn ndi_sink_audio_meta_free( + meta: *mut gst::ffi::GstMeta, + _buffer: *mut gst::ffi::GstBuffer, + ) { + let meta = &mut *(meta as *mut NdiSinkAudioMeta); + + ptr::drop_in_place(&mut meta.buffers); + } + + unsafe extern "C" fn ndi_sink_audio_meta_transform( + dest: *mut gst::ffi::GstBuffer, + meta: *mut gst::ffi::GstMeta, + _buffer: *mut gst::ffi::GstBuffer, + _type_: glib::ffi::GQuark, + _data: glib::ffi::gpointer, + ) -> glib::ffi::gboolean { + let meta = &*(meta as *mut NdiSinkAudioMeta); + + super::NdiSinkAudioMeta::add(gst::BufferRef::from_mut_ptr(dest), meta.buffers.clone()); + + true.into_glib() + } + + pub(super) fn ndi_sink_audio_meta_get_info() -> *const gst::ffi::GstMetaInfo { + struct MetaInfo(ptr::NonNull); + unsafe impl Send for MetaInfo {} + unsafe impl Sync for MetaInfo {} + + static META_INFO: Lazy = Lazy::new(|| unsafe { + MetaInfo( + ptr::NonNull::new(gst::ffi::gst_meta_register( + ndi_sink_audio_meta_api_get_type().into_glib(), + b"GstNdiSinkAudioMeta\0".as_ptr() as *const _, + mem::size_of::(), + Some(ndi_sink_audio_meta_init), + Some(ndi_sink_audio_meta_free), + Some(ndi_sink_audio_meta_transform), + ) as *mut gst::ffi::GstMetaInfo) + .expect("Failed to register meta API"), + ) + }); + + META_INFO.0.as_ptr() + } +} diff --git a/net/ndi/src/ndisrc/imp.rs b/net/ndi/src/ndisrc/imp.rs new file mode 100644 index 00000000..8bed052a --- /dev/null +++ b/net/ndi/src/ndisrc/imp.rs @@ -0,0 +1,632 @@ +use gst::prelude::*; +use gst::subclass::prelude::*; +use gst::{gst_debug, gst_error}; +use gst_base::prelude::*; +use gst_base::subclass::base_src::CreateSuccess; +use gst_base::subclass::prelude::*; + +use std::sync::Mutex; +use std::{i32, u32}; + +use once_cell::sync::Lazy; + +use crate::ndisys; + +use crate::ndisrcmeta; +use crate::Buffer; +use crate::Receiver; +use crate::ReceiverControlHandle; +use crate::ReceiverItem; +use crate::RecvColorFormat; +use crate::TimestampMode; +use crate::DEFAULT_RECEIVER_NDI_NAME; + +static CAT: Lazy = Lazy::new(|| { + gst::DebugCategory::new( + "ndisrc", + gst::DebugColorFlags::empty(), + Some("NewTek NDI Source"), + ) +}); + +#[derive(Debug, Clone)] +struct Settings { + ndi_name: Option, + url_address: Option, + connect_timeout: u32, + timeout: u32, + max_queue_length: u32, + receiver_ndi_name: String, + bandwidth: ndisys::NDIlib_recv_bandwidth_e, + color_format: RecvColorFormat, + timestamp_mode: TimestampMode, +} + +impl Default for Settings { + fn default() -> Self { + Settings { + ndi_name: None, + url_address: None, + receiver_ndi_name: DEFAULT_RECEIVER_NDI_NAME.clone(), + connect_timeout: 10000, + timeout: 5000, + max_queue_length: 10, + bandwidth: ndisys::NDIlib_recv_bandwidth_highest, + color_format: RecvColorFormat::UyvyBgra, + timestamp_mode: TimestampMode::ReceiveTimeTimecode, + } + } +} + +struct State { + video_info: Option, + video_caps: Option, + audio_info: Option, + audio_caps: Option, + current_latency: Option, + receiver: Option, +} + +impl Default for State { + fn default() -> State { + State { + video_info: None, + video_caps: None, + audio_info: None, + audio_caps: None, + current_latency: gst::ClockTime::NONE, + receiver: None, + } + } +} + +pub struct NdiSrc { + settings: Mutex, + state: Mutex, + receiver_controller: Mutex>, +} + +#[glib::object_subclass] +impl ObjectSubclass for NdiSrc { + const NAME: &'static str = "NdiSrc"; + type Type = super::NdiSrc; + type ParentType = gst_base::BaseSrc; + + fn new() -> Self { + Self { + settings: Mutex::new(Default::default()), + state: Mutex::new(Default::default()), + receiver_controller: Mutex::new(None), + } + } +} + +impl ObjectImpl for NdiSrc { + fn properties() -> &'static [glib::ParamSpec] { + static PROPERTIES: Lazy> = Lazy::new(|| { + vec![ + glib::ParamSpecString::new( + "ndi-name", + "NDI Name", + "NDI stream name of the sender", + None, + glib::ParamFlags::READWRITE, + ), + glib::ParamSpecString::new( + "url-address", + "URL/Address", + "URL/address and port of the sender, e.g. 127.0.0.1:5961", + None, + glib::ParamFlags::READWRITE, + ), + glib::ParamSpecString::new( + "receiver-ndi-name", + "Receiver NDI Name", + "NDI stream name of this receiver", + Some(&*DEFAULT_RECEIVER_NDI_NAME), + glib::ParamFlags::READWRITE, + ), + glib::ParamSpecUInt::new( + "connect-timeout", + "Connect Timeout", + "Connection timeout in ms", + 0, + u32::MAX, + 10000, + glib::ParamFlags::READWRITE, + ), + glib::ParamSpecUInt::new( + "timeout", + "Timeout", + "Receive timeout in ms", + 0, + u32::MAX, + 5000, + glib::ParamFlags::READWRITE, + ), + glib::ParamSpecUInt::new( + "max-queue-length", + "Max Queue Length", + "Maximum receive queue length", + 0, + u32::MAX, + 10, + glib::ParamFlags::READWRITE, + ), + glib::ParamSpecInt::new( + "bandwidth", + "Bandwidth", + "Bandwidth, -10 metadata-only, 10 audio-only, 100 highest", + -10, + 100, + 100, + glib::ParamFlags::READWRITE, + ), + glib::ParamSpecEnum::new( + "color-format", + "Color Format", + "Receive color format", + RecvColorFormat::static_type(), + RecvColorFormat::UyvyBgra as u32 as i32, + glib::ParamFlags::READWRITE, + ), + glib::ParamSpecEnum::new( + "timestamp-mode", + "Timestamp Mode", + "Timestamp information to use for outgoing PTS", + TimestampMode::static_type(), + TimestampMode::ReceiveTimeTimecode as i32, + glib::ParamFlags::READWRITE, + ), + ] + }); + + PROPERTIES.as_ref() + } + + fn constructed(&self, obj: &Self::Type) { + self.parent_constructed(obj); + + // Initialize live-ness and notify the base class that + // we'd like to operate in Time format + obj.set_live(true); + obj.set_format(gst::Format::Time); + } + + fn set_property( + &self, + obj: &Self::Type, + _id: usize, + value: &glib::Value, + pspec: &glib::ParamSpec, + ) { + match pspec.name() { + "ndi-name" => { + let mut settings = self.settings.lock().unwrap(); + let ndi_name = value.get().unwrap(); + gst_debug!( + CAT, + obj: obj, + "Changing ndi-name from {:?} to {:?}", + settings.ndi_name, + ndi_name, + ); + settings.ndi_name = ndi_name; + } + "url-address" => { + let mut settings = self.settings.lock().unwrap(); + let url_address = value.get().unwrap(); + gst_debug!( + CAT, + obj: obj, + "Changing url-address from {:?} to {:?}", + settings.url_address, + url_address, + ); + settings.url_address = url_address; + } + "receiver-ndi-name" => { + let mut settings = self.settings.lock().unwrap(); + let receiver_ndi_name = value.get::>().unwrap(); + gst_debug!( + CAT, + obj: obj, + "Changing receiver-ndi-name from {:?} to {:?}", + settings.receiver_ndi_name, + receiver_ndi_name, + ); + settings.receiver_ndi_name = + receiver_ndi_name.unwrap_or_else(|| DEFAULT_RECEIVER_NDI_NAME.clone()); + } + "connect-timeout" => { + let mut settings = self.settings.lock().unwrap(); + let connect_timeout = value.get().unwrap(); + gst_debug!( + CAT, + obj: obj, + "Changing connect-timeout from {} to {}", + settings.connect_timeout, + connect_timeout, + ); + settings.connect_timeout = connect_timeout; + } + "timeout" => { + let mut settings = self.settings.lock().unwrap(); + let timeout = value.get().unwrap(); + gst_debug!( + CAT, + obj: obj, + "Changing timeout from {} to {}", + settings.timeout, + timeout, + ); + settings.timeout = timeout; + } + "max-queue-length" => { + let mut settings = self.settings.lock().unwrap(); + let max_queue_length = value.get().unwrap(); + gst_debug!( + CAT, + obj: obj, + "Changing max-queue-length from {} to {}", + settings.max_queue_length, + max_queue_length, + ); + settings.max_queue_length = max_queue_length; + } + "bandwidth" => { + let mut settings = self.settings.lock().unwrap(); + let bandwidth = value.get().unwrap(); + gst_debug!( + CAT, + obj: obj, + "Changing bandwidth from {} to {}", + settings.bandwidth, + bandwidth, + ); + settings.bandwidth = bandwidth; + } + "color-format" => { + let mut settings = self.settings.lock().unwrap(); + let color_format = value.get().unwrap(); + gst_debug!( + CAT, + obj: obj, + "Changing color format from {:?} to {:?}", + settings.color_format, + color_format, + ); + settings.color_format = color_format; + } + "timestamp-mode" => { + let mut settings = self.settings.lock().unwrap(); + let timestamp_mode = value.get().unwrap(); + gst_debug!( + CAT, + obj: obj, + "Changing timestamp mode from {:?} to {:?}", + settings.timestamp_mode, + timestamp_mode + ); + if settings.timestamp_mode != timestamp_mode { + let _ = obj.post_message(gst::message::Latency::builder().src(obj).build()); + } + settings.timestamp_mode = timestamp_mode; + } + _ => unimplemented!(), + } + } + + fn property(&self, _obj: &Self::Type, _id: usize, pspec: &glib::ParamSpec) -> glib::Value { + match pspec.name() { + "ndi-name" => { + let settings = self.settings.lock().unwrap(); + settings.ndi_name.to_value() + } + "url-address" => { + let settings = self.settings.lock().unwrap(); + settings.url_address.to_value() + } + "receiver-ndi-name" => { + let settings = self.settings.lock().unwrap(); + settings.receiver_ndi_name.to_value() + } + "connect-timeout" => { + let settings = self.settings.lock().unwrap(); + settings.connect_timeout.to_value() + } + "timeout" => { + let settings = self.settings.lock().unwrap(); + settings.timeout.to_value() + } + "max-queue-length" => { + let settings = self.settings.lock().unwrap(); + settings.max_queue_length.to_value() + } + "bandwidth" => { + let settings = self.settings.lock().unwrap(); + settings.bandwidth.to_value() + } + "color-format" => { + let settings = self.settings.lock().unwrap(); + settings.color_format.to_value() + } + "timestamp-mode" => { + let settings = self.settings.lock().unwrap(); + settings.timestamp_mode.to_value() + } + _ => unimplemented!(), + } + } +} + +impl GstObjectImpl for NdiSrc {} + +impl ElementImpl for NdiSrc { + fn metadata() -> Option<&'static gst::subclass::ElementMetadata> { + static ELEMENT_METADATA: Lazy = Lazy::new(|| { + gst::subclass::ElementMetadata::new( + "NewTek NDI Source", + "Source/Audio/Video/Network", + "NewTek NDI source", + "Ruben Gonzalez , Daniel Vilar , Sebastian Dröge ", + ) + }); + + Some(&*ELEMENT_METADATA) + } + + fn pad_templates() -> &'static [gst::PadTemplate] { + static PAD_TEMPLATES: Lazy> = Lazy::new(|| { + let src_pad_template = gst::PadTemplate::new( + "src", + gst::PadDirection::Src, + gst::PadPresence::Always, + &gst::Caps::builder("application/x-ndi").build(), + ) + .unwrap(); + + vec![src_pad_template] + }); + + PAD_TEMPLATES.as_ref() + } + + fn change_state( + &self, + element: &Self::Type, + transition: gst::StateChange, + ) -> Result { + match transition { + gst::StateChange::PausedToPlaying => { + if let Some(ref controller) = *self.receiver_controller.lock().unwrap() { + controller.set_playing(true); + } + } + gst::StateChange::PlayingToPaused => { + if let Some(ref controller) = *self.receiver_controller.lock().unwrap() { + controller.set_playing(false); + } + } + gst::StateChange::PausedToReady => { + if let Some(ref controller) = *self.receiver_controller.lock().unwrap() { + controller.shutdown(); + } + } + _ => (), + } + + self.parent_change_state(element, transition) + } +} + +impl BaseSrcImpl for NdiSrc { + fn negotiate(&self, element: &Self::Type) -> Result<(), gst::LoggableError> { + element + .set_caps(&gst::Caps::builder("application/x-ndi").build()) + .map_err(|_| gst::loggable_error!(CAT, "Failed to negotiate caps",)) + } + + fn unlock(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { + gst_debug!(CAT, obj: element, "Unlocking",); + if let Some(ref controller) = *self.receiver_controller.lock().unwrap() { + controller.set_flushing(true); + } + Ok(()) + } + + fn unlock_stop(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { + gst_debug!(CAT, obj: element, "Stop unlocking",); + if let Some(ref controller) = *self.receiver_controller.lock().unwrap() { + controller.set_flushing(false); + } + Ok(()) + } + + fn start(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> { + *self.state.lock().unwrap() = Default::default(); + let settings = self.settings.lock().unwrap().clone(); + + if settings.ndi_name.is_none() && settings.url_address.is_none() { + return Err(gst::error_msg!( + gst::LibraryError::Settings, + ["No NDI name or URL/address given"] + )); + } + + let receiver = Receiver::connect( + element.upcast_ref(), + settings.ndi_name.as_deref(), + settings.url_address.as_deref(), + &settings.receiver_ndi_name, + settings.connect_timeout, + settings.bandwidth, + settings.color_format.into(), + settings.timestamp_mode, + settings.timeout, + settings.max_queue_length as usize, + ); + + match receiver { + None => Err(gst::error_msg!( + gst::ResourceError::NotFound, + ["Could not connect to this source"] + )), + Some(receiver) => { + *self.receiver_controller.lock().unwrap() = + Some(receiver.receiver_control_handle()); + let mut state = self.state.lock().unwrap(); + state.receiver = Some(receiver); + + Ok(()) + } + } + } + + fn stop(&self, _element: &Self::Type) -> Result<(), gst::ErrorMessage> { + if let Some(ref controller) = self.receiver_controller.lock().unwrap().take() { + controller.shutdown(); + } + *self.state.lock().unwrap() = State::default(); + Ok(()) + } + + fn query(&self, element: &Self::Type, query: &mut gst::QueryRef) -> bool { + use gst::QueryView; + + match query.view_mut() { + QueryView::Scheduling(ref mut q) => { + q.set(gst::SchedulingFlags::SEQUENTIAL, 1, -1, 0); + q.add_scheduling_modes(&[gst::PadMode::Push]); + true + } + QueryView::Latency(ref mut q) => { + let state = self.state.lock().unwrap(); + let settings = self.settings.lock().unwrap(); + + if let Some(latency) = state.current_latency { + let min = if matches!( + settings.timestamp_mode, + TimestampMode::ReceiveTimeTimecode | TimestampMode::ReceiveTimeTimestamp + ) { + latency + } else { + gst::ClockTime::ZERO + }; + + let max = settings.max_queue_length as u64 * latency; + + gst_debug!( + CAT, + obj: element, + "Returning latency min {} max {}", + min, + max + ); + q.set(true, min, max); + true + } else { + false + } + } + _ => BaseSrcImplExt::parent_query(self, element, query), + } + } + + fn create( + &self, + element: &Self::Type, + _offset: u64, + _buffer: Option<&mut gst::BufferRef>, + _length: u32, + ) -> Result { + let recv = { + let mut state = self.state.lock().unwrap(); + match state.receiver.take() { + Some(recv) => recv, + None => { + gst_error!(CAT, obj: element, "Have no receiver"); + return Err(gst::FlowError::Error); + } + } + }; + + let res = recv.capture(); + + let mut state = self.state.lock().unwrap(); + state.receiver = Some(recv); + + match res { + ReceiverItem::Buffer(buffer) => { + let buffer = match buffer { + Buffer::Audio(mut buffer, info) => { + if state.audio_info.as_ref() != Some(&info) { + let caps = info.to_caps().map_err(|_| { + gst::element_error!( + element, + gst::ResourceError::Settings, + ["Invalid audio info received: {:?}", info] + ); + gst::FlowError::NotNegotiated + })?; + state.audio_info = Some(info); + state.audio_caps = Some(caps); + } + + { + let buffer = buffer.get_mut().unwrap(); + ndisrcmeta::NdiSrcMeta::add( + buffer, + ndisrcmeta::StreamType::Audio, + state.audio_caps.as_ref().unwrap(), + ); + } + + buffer + } + Buffer::Video(mut buffer, info) => { + let mut latency_changed = false; + + if state.video_info.as_ref() != Some(&info) { + let caps = info.to_caps().map_err(|_| { + gst::element_error!( + element, + gst::ResourceError::Settings, + ["Invalid audio info received: {:?}", info] + ); + gst::FlowError::NotNegotiated + })?; + state.video_info = Some(info); + state.video_caps = Some(caps); + latency_changed = state.current_latency != buffer.duration(); + state.current_latency = buffer.duration(); + } + + { + let buffer = buffer.get_mut().unwrap(); + ndisrcmeta::NdiSrcMeta::add( + buffer, + ndisrcmeta::StreamType::Video, + state.video_caps.as_ref().unwrap(), + ); + } + + drop(state); + if latency_changed { + let _ = element.post_message( + gst::message::Latency::builder().src(element).build(), + ); + } + + buffer + } + }; + + Ok(CreateSuccess::NewBuffer(buffer)) + } + ReceiverItem::Timeout => Err(gst::FlowError::Eos), + ReceiverItem::Flushing => Err(gst::FlowError::Flushing), + ReceiverItem::Error(err) => Err(err), + } + } +} diff --git a/net/ndi/src/ndisrc/mod.rs b/net/ndi/src/ndisrc/mod.rs new file mode 100644 index 00000000..e603d69d --- /dev/null +++ b/net/ndi/src/ndisrc/mod.rs @@ -0,0 +1,19 @@ +use glib::prelude::*; + +mod imp; + +glib::wrapper! { + pub struct NdiSrc(ObjectSubclass) @extends gst_base::BaseSrc, gst::Element, gst::Object; +} + +unsafe impl Send for NdiSrc {} +unsafe impl Sync for NdiSrc {} + +pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> { + gst::Element::register( + Some(plugin), + "ndisrc", + gst::Rank::None, + NdiSrc::static_type(), + ) +} diff --git a/net/ndi/src/ndisrcdemux/imp.rs b/net/ndi/src/ndisrcdemux/imp.rs new file mode 100644 index 00000000..a9589a75 --- /dev/null +++ b/net/ndi/src/ndisrcdemux/imp.rs @@ -0,0 +1,312 @@ +use gst::prelude::*; +use gst::subclass::prelude::*; +use gst::{gst_debug, gst_error, gst_log}; + +use std::sync::Mutex; + +use once_cell::sync::Lazy; + +use crate::ndisrcmeta; + +static CAT: Lazy = Lazy::new(|| { + gst::DebugCategory::new( + "ndisrcdemux", + gst::DebugColorFlags::empty(), + Some("NewTek NDI Source Demuxer"), + ) +}); + +#[derive(Default)] +struct State { + combiner: gst_base::UniqueFlowCombiner, + video_pad: Option, + video_caps: Option, + + audio_pad: Option, + audio_caps: Option, +} + +pub struct NdiSrcDemux { + sinkpad: gst::Pad, + state: Mutex, +} + +#[glib::object_subclass] +impl ObjectSubclass for NdiSrcDemux { + const NAME: &'static str = "NdiSrcDemux"; + type Type = super::NdiSrcDemux; + type ParentType = gst::Element; + + fn with_class(klass: &Self::Class) -> Self { + let templ = klass.pad_template("sink").unwrap(); + let sinkpad = gst::Pad::builder_with_template(&templ, Some("sink")) + .flags(gst::PadFlags::FIXED_CAPS) + .chain_function(|pad, parent, buffer| { + NdiSrcDemux::catch_panic_pad_function( + parent, + || Err(gst::FlowError::Error), + |self_, element| self_.sink_chain(pad, element, buffer), + ) + }) + .event_function(|pad, parent, event| { + NdiSrcDemux::catch_panic_pad_function( + parent, + || false, + |self_, element| self_.sink_event(pad, element, event), + ) + }) + .build(); + + Self { + sinkpad, + state: Mutex::new(State::default()), + } + } +} + +impl ObjectImpl for NdiSrcDemux { + fn constructed(&self, obj: &Self::Type) { + self.parent_constructed(obj); + + obj.add_pad(&self.sinkpad).unwrap(); + } +} + +impl GstObjectImpl for NdiSrcDemux {} + +impl ElementImpl for NdiSrcDemux { + fn metadata() -> Option<&'static gst::subclass::ElementMetadata> { + static ELEMENT_METADATA: Lazy = Lazy::new(|| { + gst::subclass::ElementMetadata::new( + "NewTek NDI Source Demuxer", + "Demuxer/Audio/Video", + "NewTek NDI source demuxer", + "Sebastian Dröge ", + ) + }); + + Some(&*ELEMENT_METADATA) + } + + fn pad_templates() -> &'static [gst::PadTemplate] { + static PAD_TEMPLATES: Lazy> = Lazy::new(|| { + let sink_pad_template = gst::PadTemplate::new( + "sink", + gst::PadDirection::Sink, + gst::PadPresence::Always, + &gst::Caps::builder("application/x-ndi").build(), + ) + .unwrap(); + + let audio_src_pad_template = gst::PadTemplate::new( + "audio", + gst::PadDirection::Src, + gst::PadPresence::Sometimes, + &gst::Caps::builder("audio/x-raw").build(), + ) + .unwrap(); + + let video_src_pad_template = gst::PadTemplate::new( + "video", + gst::PadDirection::Src, + gst::PadPresence::Sometimes, + &gst::Caps::builder("video/x-raw").build(), + ) + .unwrap(); + + vec![ + sink_pad_template, + audio_src_pad_template, + video_src_pad_template, + ] + }); + + PAD_TEMPLATES.as_ref() + } + + fn change_state( + &self, + element: &Self::Type, + transition: gst::StateChange, + ) -> Result { + let res = self.parent_change_state(element, transition)?; + + match transition { + gst::StateChange::PausedToReady => { + let mut state = self.state.lock().unwrap(); + for pad in [state.audio_pad.take(), state.video_pad.take()] + .iter() + .flatten() + { + element.remove_pad(pad).unwrap(); + } + *state = State::default(); + } + _ => (), + } + + Ok(res) + } +} + +impl NdiSrcDemux { + fn sink_chain( + &self, + pad: &gst::Pad, + element: &super::NdiSrcDemux, + mut buffer: gst::Buffer, + ) -> Result { + gst_log!(CAT, obj: pad, "Handling buffer {:?}", buffer); + + let meta = buffer.make_mut().meta_mut::().ok_or_else(|| { + gst_error!(CAT, obj: element, "Buffer without NDI source meta"); + gst::FlowError::Error + })?; + + let mut events = vec![]; + let srcpad; + let mut add_pad = false; + + let mut state = self.state.lock().unwrap(); + let caps = meta.caps(); + match meta.stream_type() { + ndisrcmeta::StreamType::Audio => { + if let Some(ref pad) = state.audio_pad { + srcpad = pad.clone(); + } else { + gst_debug!(CAT, obj: element, "Adding audio pad with caps {}", caps); + + let klass = element.element_class(); + let templ = klass.pad_template("audio").unwrap(); + let pad = gst::Pad::builder_with_template(&templ, Some("audio")) + .flags(gst::PadFlags::FIXED_CAPS) + .build(); + + let mut caps_event = Some(gst::event::Caps::new(&caps)); + + self.sinkpad.sticky_events_foreach(|ev| { + if ev.type_() < gst::EventType::Caps { + events.push(ev.clone()); + } else { + if let Some(ev) = caps_event.take() { + events.push(ev); + } + + if ev.type_() != gst::EventType::Caps { + events.push(ev.clone()); + } + } + + std::ops::ControlFlow::Continue(gst::EventForeachAction::Keep) + }); + + state.audio_caps = Some(caps.clone()); + state.audio_pad = Some(pad.clone()); + + let _ = pad.set_active(true); + for ev in events.drain(..) { + let _ = pad.store_sticky_event(&ev); + } + + state.combiner.add_pad(&pad); + + add_pad = true; + srcpad = pad; + } + + if state.audio_caps.as_ref() != Some(&caps) { + gst_debug!(CAT, obj: element, "Audio caps changed to {}", caps); + events.push(gst::event::Caps::new(&caps)); + state.audio_caps = Some(caps); + } + } + ndisrcmeta::StreamType::Video => { + if let Some(ref pad) = state.video_pad { + srcpad = pad.clone(); + } else { + gst_debug!(CAT, obj: element, "Adding video pad with caps {}", caps); + + let klass = element.element_class(); + let templ = klass.pad_template("video").unwrap(); + let pad = gst::Pad::builder_with_template(&templ, Some("video")) + .flags(gst::PadFlags::FIXED_CAPS) + .build(); + + let mut caps_event = Some(gst::event::Caps::new(&caps)); + + self.sinkpad.sticky_events_foreach(|ev| { + if ev.type_() < gst::EventType::Caps { + events.push(ev.clone()); + } else { + if let Some(ev) = caps_event.take() { + events.push(ev); + } + + if ev.type_() != gst::EventType::Caps { + events.push(ev.clone()); + } + } + + std::ops::ControlFlow::Continue(gst::EventForeachAction::Keep) + }); + + state.video_caps = Some(caps.clone()); + state.video_pad = Some(pad.clone()); + + let _ = pad.set_active(true); + for ev in events.drain(..) { + let _ = pad.store_sticky_event(&ev); + } + + state.combiner.add_pad(&pad); + + add_pad = true; + srcpad = pad; + } + + if state.video_caps.as_ref() != Some(&caps) { + gst_debug!(CAT, obj: element, "Video caps changed to {}", caps); + events.push(gst::event::Caps::new(&caps)); + state.video_caps = Some(caps); + } + } + } + drop(state); + meta.remove().unwrap(); + + if add_pad { + element.add_pad(&srcpad).unwrap(); + } + + for ev in events { + srcpad.push_event(ev); + } + + let res = srcpad.push(buffer); + + let mut state = self.state.lock().unwrap(); + state.combiner.update_pad_flow(&srcpad, res) + } + + fn sink_event(&self, + pad: &gst::Pad, + element: &super::NdiSrcDemux, + event: gst::Event + ) -> bool { + use gst::EventView; + + gst_log!(CAT, obj: pad, "Handling event {:?}", event); + if let EventView::Eos(_) = event.view() { + if element.num_src_pads() == 0 { + // error out on EOS if no src pad are available + gst::element_error!( + element, + gst::StreamError::Demux, + ["EOS without available srcpad(s)"] + ); + } + } + pad.event_default(Some(element), event) + } + +} diff --git a/net/ndi/src/ndisrcdemux/mod.rs b/net/ndi/src/ndisrcdemux/mod.rs new file mode 100644 index 00000000..12c78f72 --- /dev/null +++ b/net/ndi/src/ndisrcdemux/mod.rs @@ -0,0 +1,19 @@ +use glib::prelude::*; + +mod imp; + +glib::wrapper! { + pub struct NdiSrcDemux(ObjectSubclass) @extends gst::Element, gst::Object; +} + +unsafe impl Send for NdiSrcDemux {} +unsafe impl Sync for NdiSrcDemux {} + +pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> { + gst::Element::register( + Some(plugin), + "ndisrcdemux", + gst::Rank::Primary, + NdiSrcDemux::static_type(), + ) +} diff --git a/net/ndi/src/ndisrcmeta.rs b/net/ndi/src/ndisrcmeta.rs new file mode 100644 index 00000000..c3f04651 --- /dev/null +++ b/net/ndi/src/ndisrcmeta.rs @@ -0,0 +1,158 @@ +use gst::prelude::*; +use std::fmt; +use std::mem; + +#[repr(transparent)] +pub struct NdiSrcMeta(imp::NdiSrcMeta); + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum StreamType { + Audio, + Video, +} + +unsafe impl Send for NdiSrcMeta {} +unsafe impl Sync for NdiSrcMeta {} + +impl NdiSrcMeta { + pub fn add<'a>( + buffer: &'a mut gst::BufferRef, + stream_type: StreamType, + caps: &gst::Caps, + ) -> gst::MetaRefMut<'a, Self, gst::meta::Standalone> { + unsafe { + // Manually dropping because gst_buffer_add_meta() takes ownership of the + // content of the struct + let mut params = mem::ManuallyDrop::new(imp::NdiSrcMetaParams { + caps: caps.clone(), + stream_type, + }); + + let meta = gst::ffi::gst_buffer_add_meta( + buffer.as_mut_ptr(), + imp::ndi_src_meta_get_info(), + &mut *params as *mut imp::NdiSrcMetaParams as glib::ffi::gpointer, + ) as *mut imp::NdiSrcMeta; + + Self::from_mut_ptr(buffer, meta) + } + } + + pub fn stream_type(&self) -> StreamType { + self.0.stream_type + } + + pub fn caps(&self) -> gst::Caps { + self.0.caps.clone() + } +} + +unsafe impl MetaAPI for NdiSrcMeta { + type GstType = imp::NdiSrcMeta; + + fn meta_api() -> glib::Type { + imp::ndi_src_meta_api_get_type() + } +} + +impl fmt::Debug for NdiSrcMeta { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("NdiSrcMeta") + .field("stream_type", &self.stream_type()) + .field("caps", &self.caps()) + .finish() + } +} + +mod imp { + use super::StreamType; + use glib::translate::*; + use once_cell::sync::Lazy; + use std::mem; + use std::ptr; + + pub(super) struct NdiSrcMetaParams { + pub caps: gst::Caps, + pub stream_type: StreamType, + } + + #[repr(C)] + pub struct NdiSrcMeta { + parent: gst::ffi::GstMeta, + pub(super) caps: gst::Caps, + pub(super) stream_type: StreamType, + } + + pub(super) fn ndi_src_meta_api_get_type() -> glib::Type { + static TYPE: Lazy = Lazy::new(|| unsafe { + let t = from_glib(gst::ffi::gst_meta_api_type_register( + b"GstNdiSrcMetaAPI\0".as_ptr() as *const _, + [ptr::null::()].as_ptr() as *mut *const _, + )); + + assert_ne!(t, glib::Type::INVALID); + + t + }); + + *TYPE + } + + unsafe extern "C" fn ndi_src_meta_init( + meta: *mut gst::ffi::GstMeta, + params: glib::ffi::gpointer, + _buffer: *mut gst::ffi::GstBuffer, + ) -> glib::ffi::gboolean { + assert!(!params.is_null()); + + let meta = &mut *(meta as *mut NdiSrcMeta); + let params = ptr::read(params as *const NdiSrcMetaParams); + + ptr::write(&mut meta.stream_type, params.stream_type); + ptr::write(&mut meta.caps, params.caps); + + true.into_glib() + } + + unsafe extern "C" fn ndi_src_meta_free( + meta: *mut gst::ffi::GstMeta, + _buffer: *mut gst::ffi::GstBuffer, + ) { + let meta = &mut *(meta as *mut NdiSrcMeta); + + ptr::drop_in_place(&mut meta.stream_type); + ptr::drop_in_place(&mut meta.caps); + } + + unsafe extern "C" fn ndi_src_meta_transform( + _dest: *mut gst::ffi::GstBuffer, + _meta: *mut gst::ffi::GstMeta, + _buffer: *mut gst::ffi::GstBuffer, + _type_: glib::ffi::GQuark, + _data: glib::ffi::gpointer, + ) -> glib::ffi::gboolean { + false.into_glib() + } + + pub(super) fn ndi_src_meta_get_info() -> *const gst::ffi::GstMetaInfo { + struct MetaInfo(ptr::NonNull); + unsafe impl Send for MetaInfo {} + unsafe impl Sync for MetaInfo {} + + static META_INFO: Lazy = Lazy::new(|| unsafe { + MetaInfo( + ptr::NonNull::new(gst::ffi::gst_meta_register( + ndi_src_meta_api_get_type().into_glib(), + b"GstNdiSrcMeta\0".as_ptr() as *const _, + mem::size_of::(), + Some(ndi_src_meta_init), + Some(ndi_src_meta_free), + Some(ndi_src_meta_transform), + ) as *mut gst::ffi::GstMetaInfo) + .expect("Failed to register meta API"), + ) + }); + + META_INFO.0.as_ptr() + } +} diff --git a/net/ndi/src/ndisys.rs b/net/ndi/src/ndisys.rs new file mode 100644 index 00000000..d5695570 --- /dev/null +++ b/net/ndi/src/ndisys.rs @@ -0,0 +1,326 @@ +#![allow(non_camel_case_types, non_upper_case_globals, non_snake_case)] + +#[cfg_attr( + all(target_arch = "x86_64", target_os = "windows"), + link(name = "Processing.NDI.Lib.x64") +)] +#[cfg_attr( + all(target_arch = "x86", target_os = "windows"), + link(name = "Processing.NDI.Lib.x86") +)] +#[cfg_attr( + not(any(target_os = "windows", target_os = "macos")), + link(name = "ndi") +)] +extern "C" { + pub fn NDIlib_initialize() -> bool; + pub fn NDIlib_destroy(); + pub fn NDIlib_find_create_v2( + p_create_settings: *const NDIlib_find_create_t, + ) -> NDIlib_find_instance_t; + pub fn NDIlib_find_destroy(p_instance: NDIlib_find_instance_t); + pub fn NDIlib_find_wait_for_sources( + p_instance: NDIlib_find_instance_t, + timeout_in_ms: u32, + ) -> bool; + pub fn NDIlib_find_get_current_sources( + p_instance: NDIlib_find_instance_t, + p_no_sources: *mut u32, + ) -> *const NDIlib_source_t; + pub fn NDIlib_recv_create_v3( + p_create_settings: *const NDIlib_recv_create_v3_t, + ) -> NDIlib_recv_instance_t; + pub fn NDIlib_recv_destroy(p_instance: NDIlib_recv_instance_t); + pub fn NDIlib_recv_set_tally( + p_instance: NDIlib_recv_instance_t, + p_tally: *const NDIlib_tally_t, + ) -> bool; + pub fn NDIlib_recv_send_metadata( + p_instance: NDIlib_recv_instance_t, + p_metadata: *const NDIlib_metadata_frame_t, + ) -> bool; + pub fn NDIlib_recv_capture_v3( + p_instance: NDIlib_recv_instance_t, + p_video_data: *mut NDIlib_video_frame_v2_t, + p_audio_data: *mut NDIlib_audio_frame_v3_t, + p_metadata: *mut NDIlib_metadata_frame_t, + timeout_in_ms: u32, + ) -> NDIlib_frame_type_e; + pub fn NDIlib_recv_free_video_v2( + p_instance: NDIlib_recv_instance_t, + p_video_data: *mut NDIlib_video_frame_v2_t, + ); + pub fn NDIlib_recv_free_audio_v3( + p_instance: NDIlib_recv_instance_t, + p_audio_data: *mut NDIlib_audio_frame_v3_t, + ); + pub fn NDIlib_recv_free_metadata( + p_instance: NDIlib_recv_instance_t, + p_metadata: *mut NDIlib_metadata_frame_t, + ); + pub fn NDIlib_recv_get_queue( + p_instance: NDIlib_recv_instance_t, + p_total: *mut NDIlib_recv_queue_t, + ); + pub fn NDIlib_send_create( + p_create_settings: *const NDIlib_send_create_t, + ) -> NDIlib_send_instance_t; + pub fn NDIlib_send_destroy(p_instance: NDIlib_send_instance_t); + pub fn NDIlib_send_send_video_v2( + p_instance: NDIlib_send_instance_t, + p_video_data: *const NDIlib_video_frame_v2_t, + ); + pub fn NDIlib_send_send_audio_v3( + p_instance: NDIlib_send_instance_t, + p_audio_data: *const NDIlib_audio_frame_v3_t, + ); +} + +pub type NDIlib_find_instance_t = *mut ::std::os::raw::c_void; + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct NDIlib_find_create_t { + pub show_local_sources: bool, + pub p_groups: *const ::std::os::raw::c_char, + pub p_extra_ips: *const ::std::os::raw::c_char, +} + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct NDIlib_source_t { + pub p_ndi_name: *const ::std::os::raw::c_char, + pub p_url_address: *const ::std::os::raw::c_char, +} + +#[repr(i32)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum NDIlib_frame_type_e { + NDIlib_frame_type_none = 0, + NDIlib_frame_type_video = 1, + NDIlib_frame_type_audio = 2, + NDIlib_frame_type_metadata = 3, + NDIlib_frame_type_error = 4, + NDIlib_frame_type_status_change = 100, +} + +pub type NDIlib_recv_bandwidth_e = i32; + +pub const NDIlib_recv_bandwidth_metadata_only: NDIlib_recv_bandwidth_e = -10; +pub const NDIlib_recv_bandwidth_audio_only: NDIlib_recv_bandwidth_e = 10; +pub const NDIlib_recv_bandwidth_lowest: NDIlib_recv_bandwidth_e = 0; +pub const NDIlib_recv_bandwidth_highest: NDIlib_recv_bandwidth_e = 100; + +pub type NDIlib_recv_color_format_e = u32; +pub const NDIlib_recv_color_format_BGRX_BGRA: NDIlib_recv_color_format_e = 0; +pub const NDIlib_recv_color_format_UYVY_BGRA: NDIlib_recv_color_format_e = 1; +pub const NDIlib_recv_color_format_RGBX_RGBA: NDIlib_recv_color_format_e = 2; +pub const NDIlib_recv_color_format_UYVY_RGBA: NDIlib_recv_color_format_e = 3; +pub const NDIlib_recv_color_format_fastest: NDIlib_recv_color_format_e = 100; +pub const NDIlib_recv_color_format_best: NDIlib_recv_color_format_e = 101; +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_recv_color_format_ex_compressed: NDIlib_recv_color_format_e = 300; +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_recv_color_format_ex_compressed_v2: NDIlib_recv_color_format_e = 301; +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_recv_color_format_ex_compressed_v3: NDIlib_recv_color_format_e = 302; +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_recv_color_format_ex_compressed_v3_with_audio: NDIlib_recv_color_format_e = 304; +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_recv_color_format_ex_compressed_v4: NDIlib_recv_color_format_e = 303; +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_recv_color_format_ex_compressed_v4_with_audio: NDIlib_recv_color_format_e = 305; +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_recv_color_format_ex_compressed_v5: NDIlib_recv_color_format_e = 307; +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_recv_color_format_ex_compressed_v5_with_audio: NDIlib_recv_color_format_e = 308; + +const fn make_fourcc(fourcc: &[u8; 4]) -> u32 { + ((fourcc[0] as u32) << 0) + | ((fourcc[1] as u32) << 8) + | ((fourcc[2] as u32) << 16) + | ((fourcc[3] as u32) << 24) +} + +pub type NDIlib_FourCC_video_type_e = u32; +pub const NDIlib_FourCC_video_type_UYVY: NDIlib_FourCC_video_type_e = make_fourcc(b"UYVY"); +pub const NDIlib_FourCC_video_type_UYVA: NDIlib_FourCC_video_type_e = make_fourcc(b"UYVA"); +pub const NDIlib_FourCC_video_type_P216: NDIlib_FourCC_video_type_e = make_fourcc(b"P216"); +pub const NDIlib_FourCC_video_type_PA16: NDIlib_FourCC_video_type_e = make_fourcc(b"PA16"); +pub const NDIlib_FourCC_video_type_YV12: NDIlib_FourCC_video_type_e = make_fourcc(b"YV12"); +pub const NDIlib_FourCC_video_type_I420: NDIlib_FourCC_video_type_e = make_fourcc(b"I420"); +pub const NDIlib_FourCC_video_type_NV12: NDIlib_FourCC_video_type_e = make_fourcc(b"NV12"); +pub const NDIlib_FourCC_video_type_BGRA: NDIlib_FourCC_video_type_e = make_fourcc(b"BGRA"); +pub const NDIlib_FourCC_video_type_BGRX: NDIlib_FourCC_video_type_e = make_fourcc(b"BGRX"); +pub const NDIlib_FourCC_video_type_RGBA: NDIlib_FourCC_video_type_e = make_fourcc(b"RGBA"); +pub const NDIlib_FourCC_video_type_RGBX: NDIlib_FourCC_video_type_e = make_fourcc(b"RGBX"); + +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_SHQ0_highest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"SHQ0"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_SHQ2_highest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"SHQ2"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_SHQ7_highest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"SHQ7"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_SHQ0_lowest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"shq0"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_SHQ2_lowest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"shq2"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_SHQ7_lowest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"shq7"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_H264_highest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"H264"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_H264_lowest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"h264"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_HEVC_highest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"HEVC"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_HEVC_lowest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"hevc"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_H264_alpha_highest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"A264"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_H264_alpha_lowest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"a264"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_HEVC_alpha_highest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"AEVC"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_video_type_ex_HEVC_alpha_lowest_bandwidth: NDIlib_FourCC_video_type_e = + make_fourcc(b"aevc"); + +pub type NDIlib_FourCC_audio_type_e = u32; +pub const NDIlib_FourCC_audio_type_FLTp: NDIlib_FourCC_video_type_e = make_fourcc(b"FLTp"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_audio_type_AAC: NDIlib_FourCC_audio_type_e = 0x000000ff; +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_FourCC_audio_type_Opus: NDIlib_FourCC_audio_type_e = make_fourcc(b"Opus"); + +#[cfg(feature = "advanced-sdk")] +pub type NDIlib_compressed_FourCC_type_e = u32; +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_compressed_FourCC_type_H264: NDIlib_compressed_FourCC_type_e = + make_fourcc(b"H264"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_compressed_FourCC_type_HEVC: NDIlib_compressed_FourCC_type_e = + make_fourcc(b"HEVC"); +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_compressed_FourCC_type_AAC: NDIlib_compressed_FourCC_type_e = 0x000000ff; + +#[repr(u32)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum NDIlib_frame_format_type_e { + NDIlib_frame_format_type_progressive = 1, + NDIlib_frame_format_type_interleaved = 0, + NDIlib_frame_format_type_field_0 = 2, + NDIlib_frame_format_type_field_1 = 3, +} + +pub const NDIlib_send_timecode_synthesize: i64 = ::std::i64::MAX; +pub const NDIlib_recv_timestamp_undefined: i64 = ::std::i64::MAX; + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct NDIlib_recv_create_v3_t { + pub source_to_connect_to: NDIlib_source_t, + pub color_format: NDIlib_recv_color_format_e, + pub bandwidth: NDIlib_recv_bandwidth_e, + pub allow_video_fields: bool, + pub p_ndi_recv_name: *const ::std::os::raw::c_char, +} + +pub type NDIlib_recv_instance_t = *mut ::std::os::raw::c_void; + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct NDIlib_send_create_t { + pub p_ndi_name: *const ::std::os::raw::c_char, + pub p_groups: *const ::std::os::raw::c_char, + pub clock_video: bool, + pub clock_audio: bool, +} + +pub type NDIlib_send_instance_t = *mut ::std::os::raw::c_void; + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct NDIlib_tally_t { + pub on_program: bool, + pub on_preview: bool, +} + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct NDIlib_recv_queue_t { + pub video_frames: i32, + pub audio_frames: i32, + pub metadata_frames: i32, +} + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct NDIlib_metadata_frame_t { + pub length: ::std::os::raw::c_int, + pub timecode: i64, + pub p_data: *const ::std::os::raw::c_char, +} + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct NDIlib_video_frame_v2_t { + pub xres: ::std::os::raw::c_int, + pub yres: ::std::os::raw::c_int, + pub FourCC: NDIlib_FourCC_video_type_e, + pub frame_rate_N: ::std::os::raw::c_int, + pub frame_rate_D: ::std::os::raw::c_int, + pub picture_aspect_ratio: ::std::os::raw::c_float, + pub frame_format_type: NDIlib_frame_format_type_e, + pub timecode: i64, + pub p_data: *const ::std::os::raw::c_char, + pub line_stride_or_data_size_in_bytes: ::std::os::raw::c_int, + pub p_metadata: *const ::std::os::raw::c_char, + pub timestamp: i64, +} + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct NDIlib_audio_frame_v3_t { + pub sample_rate: ::std::os::raw::c_int, + pub no_channels: ::std::os::raw::c_int, + pub no_samples: ::std::os::raw::c_int, + pub timecode: i64, + pub FourCC: NDIlib_FourCC_audio_type_e, + pub p_data: *const ::std::os::raw::c_float, + pub channel_stride_or_data_size_in_bytes: ::std::os::raw::c_int, + pub p_metadata: *const ::std::os::raw::c_char, + pub timestamp: i64, +} + +#[cfg(feature = "advanced-sdk")] +#[repr(packed)] +#[derive(Debug, Copy, Clone)] +pub struct NDIlib_compressed_packet_t { + pub version: u32, + pub fourcc: NDIlib_compressed_FourCC_type_e, + pub pts: i64, + pub dts: i64, + pub reserved: u64, + pub flags: u32, + pub data_size: u32, + pub extra_data_size: u32, +} + +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_compressed_packet_flags_keyframe: u32 = 1; + +#[cfg(feature = "advanced-sdk")] +pub const NDIlib_compressed_packet_version_0: u32 = 44; diff --git a/net/ndi/src/receiver.rs b/net/ndi/src/receiver.rs new file mode 100644 index 00000000..15514f9c --- /dev/null +++ b/net/ndi/src/receiver.rs @@ -0,0 +1,1618 @@ +use glib::prelude::*; +use gst::prelude::*; +use gst::{gst_debug, gst_error, gst_log, gst_trace, gst_warning}; +use gst_video::prelude::*; + +use byte_slice_cast::*; + +use std::cmp; +use std::collections::VecDeque; +use std::sync::{Arc, Condvar, Mutex, Weak}; +use std::thread; + +use super::*; + +static CAT: Lazy = Lazy::new(|| { + gst::DebugCategory::new( + "ndireceiver", + gst::DebugColorFlags::empty(), + Some("NewTek NDI receiver"), + ) +}); + +#[derive(Clone)] +pub struct Receiver(Arc); + +#[derive(Debug, PartialEq, Eq)] +pub enum AudioInfo { + AudioInfo(gst_audio::AudioInfo), + #[cfg(feature = "advanced-sdk")] + OpusInfo { + sample_rate: i32, + no_channels: i32, + }, + #[cfg(feature = "advanced-sdk")] + AacInfo { + sample_rate: i32, + no_channels: i32, + codec_data: [u8; 2], + }, +} + +impl AudioInfo { + pub fn to_caps(&self) -> Result { + match self { + AudioInfo::AudioInfo(ref info) => info.to_caps(), + #[cfg(feature = "advanced-sdk")] + AudioInfo::OpusInfo { + sample_rate, + no_channels, + } => Ok(gst::Caps::builder("audio/x-opus") + .field("channels", *no_channels) + .field("rate", *sample_rate) + .field("channel-mapping-family", 0i32) + .build()), + #[cfg(feature = "advanced-sdk")] + AudioInfo::AacInfo { + sample_rate, + no_channels, + codec_data, + } => Ok(gst::Caps::builder("audio/mpeg") + .field("channels", *no_channels) + .field("rate", *sample_rate) + .field("mpegversion", 4i32) + .field("stream-format", "raw") + .field("codec_data", gst::Buffer::from_mut_slice(*codec_data)) + .build()), + } + } +} + +#[derive(Debug, PartialEq, Eq)] +pub enum VideoInfo { + VideoInfo(gst_video::VideoInfo), + #[cfg(feature = "advanced-sdk")] + SpeedHQInfo { + variant: String, + xres: i32, + yres: i32, + fps_n: i32, + fps_d: i32, + par_n: i32, + par_d: i32, + interlace_mode: gst_video::VideoInterlaceMode, + }, + #[cfg(feature = "advanced-sdk")] + H264Info { + xres: i32, + yres: i32, + fps_n: i32, + fps_d: i32, + par_n: i32, + par_d: i32, + interlace_mode: gst_video::VideoInterlaceMode, + }, + #[cfg(feature = "advanced-sdk")] + H265Info { + xres: i32, + yres: i32, + fps_n: i32, + fps_d: i32, + par_n: i32, + par_d: i32, + interlace_mode: gst_video::VideoInterlaceMode, + }, +} + +impl VideoInfo { + pub fn to_caps(&self) -> Result { + match self { + VideoInfo::VideoInfo(ref info) => info.to_caps(), + #[cfg(feature = "advanced-sdk")] + VideoInfo::SpeedHQInfo { + ref variant, + xres, + yres, + fps_n, + fps_d, + par_n, + par_d, + interlace_mode, + } => Ok(gst::Caps::builder("video/x-speedhq") + .field("width", *xres) + .field("height", *yres) + .field("framerate", gst::Fraction::new(*fps_n, *fps_d)) + .field("pixel-aspect-ratio", gst::Fraction::new(*par_n, *par_d)) + .field("interlace-mode", interlace_mode.to_str()) + .field("variant", variant) + .build()), + #[cfg(feature = "advanced-sdk")] + VideoInfo::H264Info { + xres, + yres, + fps_n, + fps_d, + par_n, + par_d, + interlace_mode, + .. + } => Ok(gst::Caps::builder("video/x-h264") + .field("width", *xres) + .field("height", *yres) + .field("framerate", gst::Fraction::new(*fps_n, *fps_d)) + .field("pixel-aspect-ratio", gst::Fraction::new(*par_n, *par_d)) + .field("interlace-mode", interlace_mode.to_str()) + .field("stream-format", "byte-stream") + .field("alignment", "au") + .build()), + #[cfg(feature = "advanced-sdk")] + VideoInfo::H265Info { + xres, + yres, + fps_n, + fps_d, + par_n, + par_d, + interlace_mode, + .. + } => Ok(gst::Caps::builder("video/x-h265") + .field("width", *xres) + .field("height", *yres) + .field("framerate", gst::Fraction::new(*fps_n, *fps_d)) + .field("pixel-aspect-ratio", gst::Fraction::new(*par_n, *par_d)) + .field("interlace-mode", interlace_mode.to_str()) + .field("stream-format", "byte-stream") + .field("alignment", "au") + .build()), + } + } +} + +#[derive(Debug)] +pub enum Buffer { + Audio(gst::Buffer, AudioInfo), + Video(gst::Buffer, VideoInfo), +} + +#[derive(Debug)] +pub enum ReceiverItem { + Buffer(Buffer), + Flushing, + Timeout, + Error(gst::FlowError), +} + +pub struct ReceiverInner { + queue: ReceiverQueue, + max_queue_length: usize, + + observations: Observations, + + element: glib::WeakRef, + timestamp_mode: TimestampMode, + + timeout: u32, + connect_timeout: u32, + + thread: Mutex>>, +} + +#[derive(Clone)] +struct ReceiverQueue(Arc<(Mutex, Condvar)>); + +struct ReceiverQueueInner { + // Set to true when the capture thread should be stopped + shutdown: bool, + + // If we're flushing right now and all buffers should simply be discarded + // and capture() directly returns Flushing + flushing: bool, + + // If we're playing right now or not: if not we simply discard everything captured + playing: bool, + // Queue containing our buffers. This holds at most 5 buffers at a time. + // + // On timeout/error will contain a single item and then never be filled again + buffer_queue: VecDeque, + + error: Option, + timeout: bool, +} + +const WINDOW_LENGTH: u64 = 512; +const WINDOW_DURATION: u64 = 2_000_000_000; + +#[derive(Clone)] +struct Observations(Arc>); + +struct ObservationsInner { + base_remote_time: Option, + base_local_time: Option, + deltas: VecDeque, + min_delta: i64, + skew: i64, + filling: bool, + window_size: usize, +} + +impl Default for ObservationsInner { + fn default() -> ObservationsInner { + ObservationsInner { + base_local_time: None, + base_remote_time: None, + deltas: VecDeque::new(), + min_delta: 0, + skew: 0, + filling: true, + window_size: 0, + } + } +} + +impl Observations { + fn new() -> Self { + Self(Arc::new(Mutex::new(ObservationsInner::default()))) + } + + // Based on the algorithm used in GStreamer's rtpjitterbuffer, which comes from + // Fober, Orlarey and Letz, 2005, "Real Time Clock Skew Estimation over Network Delays": + // http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.102.1546 + fn process( + &self, + element: &gst_base::BaseSrc, + time: (Option, gst::ClockTime), + duration: Option, + ) -> (gst::ClockTime, Option, bool) { + if time.0.is_none() { + return (time.1, duration, false); + } + + let time = (time.0.unwrap(), time.1); + let remote_time = time.0.nseconds(); + let local_time = time.1.nseconds(); + + gst_trace!( + CAT, + obj: element, + "Local time {}, remote time {}", + gst::ClockTime::from_nseconds(local_time), + gst::ClockTime::from_nseconds(remote_time), + ); + + let mut inner = self.0.lock().unwrap(); + + let (base_remote_time, base_local_time) = + match (inner.base_remote_time, inner.base_local_time) { + (Some(remote), Some(local)) => (remote, local), + _ => { + gst_debug!( + CAT, + obj: element, + "Initializing base time: local {}, remote {}", + gst::ClockTime::from_nseconds(local_time), + gst::ClockTime::from_nseconds(remote_time), + ); + inner.base_remote_time = Some(remote_time); + inner.base_local_time = Some(local_time); + + return (gst::ClockTime::from_nseconds(local_time), duration, true); + } + }; + + let remote_diff = remote_time.saturating_sub(base_remote_time); + let local_diff = local_time.saturating_sub(base_local_time); + let delta = (local_diff as i64) - (remote_diff as i64); + + gst_trace!( + CAT, + obj: element, + "Local diff {}, remote diff {}, delta {}", + gst::ClockTime::from_nseconds(local_diff), + gst::ClockTime::from_nseconds(remote_diff), + delta, + ); + + if remote_diff > 0 && local_diff > 0 { + let slope = (local_diff as f64) / (remote_diff as f64); + if !(0.8..1.2).contains(&slope) { + gst_warning!( + CAT, + obj: element, + "Too small/big slope {}, resetting", + slope + ); + + let discont = !inner.deltas.is_empty(); + *inner = ObservationsInner::default(); + + gst_debug!( + CAT, + obj: element, + "Initializing base time: local {}, remote {}", + gst::ClockTime::from_nseconds(local_time), + gst::ClockTime::from_nseconds(remote_time), + ); + inner.base_remote_time = Some(remote_time); + inner.base_local_time = Some(local_time); + + return (gst::ClockTime::from_nseconds(local_time), duration, discont); + } + } + + if (delta > inner.skew && delta - inner.skew > 1_000_000_000) + || (delta < inner.skew && inner.skew - delta > 1_000_000_000) + { + gst_warning!( + CAT, + obj: element, + "Delta {} too far from skew {}, resetting", + delta, + inner.skew + ); + + let discont = !inner.deltas.is_empty(); + *inner = ObservationsInner::default(); + + gst_debug!( + CAT, + obj: element, + "Initializing base time: local {}, remote {}", + gst::ClockTime::from_nseconds(local_time), + gst::ClockTime::from_nseconds(remote_time), + ); + inner.base_remote_time = Some(remote_time); + inner.base_local_time = Some(local_time); + + return (gst::ClockTime::from_nseconds(local_time), duration, discont); + } + + if inner.filling { + if inner.deltas.is_empty() || delta < inner.min_delta { + inner.min_delta = delta; + } + inner.deltas.push_back(delta); + + if remote_diff > WINDOW_DURATION || inner.deltas.len() as u64 == WINDOW_LENGTH { + inner.window_size = inner.deltas.len(); + inner.skew = inner.min_delta; + inner.filling = false; + } else { + let perc_time = remote_diff.mul_div_floor(100, WINDOW_DURATION).unwrap() as i64; + let perc_window = (inner.deltas.len() as u64) + .mul_div_floor(100, WINDOW_LENGTH) + .unwrap() as i64; + let perc = cmp::max(perc_time, perc_window); + + inner.skew = (perc * inner.min_delta + ((10_000 - perc) * inner.skew)) / 10_000; + } + } else { + let old = inner.deltas.pop_front().unwrap(); + inner.deltas.push_back(delta); + + if delta <= inner.min_delta { + inner.min_delta = delta; + } else if old == inner.min_delta { + inner.min_delta = inner.deltas.iter().copied().min().unwrap(); + } + + inner.skew = (inner.min_delta + (124 * inner.skew)) / 125; + } + + let out_time = base_local_time + remote_diff; + let out_time = if inner.skew < 0 { + out_time.saturating_sub((-inner.skew) as u64) + } else { + out_time + (inner.skew as u64) + }; + + gst_trace!( + CAT, + obj: element, + "Skew {}, min delta {}", + inner.skew, + inner.min_delta + ); + gst_trace!( + CAT, + obj: element, + "Outputting {}", + gst::ClockTime::from_nseconds(out_time) + ); + + (gst::ClockTime::from_nseconds(out_time), duration, false) + } +} + +#[derive(Clone)] +pub struct ReceiverControlHandle { + queue: ReceiverQueue, +} + +impl ReceiverControlHandle { + pub fn set_flushing(&self, flushing: bool) { + let mut queue = (self.queue.0).0.lock().unwrap(); + queue.flushing = flushing; + (self.queue.0).1.notify_all(); + } + + pub fn set_playing(&self, playing: bool) { + let mut queue = (self.queue.0).0.lock().unwrap(); + queue.playing = playing; + } + + pub fn shutdown(&self) { + let mut queue = (self.queue.0).0.lock().unwrap(); + queue.shutdown = true; + (self.queue.0).1.notify_all(); + } +} + +impl Drop for ReceiverInner { + fn drop(&mut self) { + // Will shut down the receiver thread on the next iteration + let mut queue = (self.queue.0).0.lock().unwrap(); + queue.shutdown = true; + drop(queue); + + let element = self.element.upgrade(); + + if let Some(ref element) = element { + gst_debug!(CAT, obj: element, "Closed NDI connection"); + } + } +} + +impl Receiver { + fn new( + recv: RecvInstance, + timestamp_mode: TimestampMode, + timeout: u32, + connect_timeout: u32, + max_queue_length: usize, + element: &gst_base::BaseSrc, + ) -> Self { + let receiver = Receiver(Arc::new(ReceiverInner { + queue: ReceiverQueue(Arc::new(( + Mutex::new(ReceiverQueueInner { + shutdown: false, + playing: false, + flushing: false, + buffer_queue: VecDeque::with_capacity(max_queue_length), + error: None, + timeout: false, + }), + Condvar::new(), + ))), + max_queue_length, + observations: Observations::new(), + element: element.downgrade(), + timestamp_mode, + timeout, + connect_timeout, + thread: Mutex::new(None), + })); + + let weak = Arc::downgrade(&receiver.0); + let thread = thread::spawn(move || { + use std::panic; + + let weak_clone = weak.clone(); + match panic::catch_unwind(panic::AssertUnwindSafe(move || { + Self::receive_thread(&weak_clone, recv) + })) { + Ok(_) => (), + Err(_) => { + if let Some(receiver) = weak.upgrade().map(Receiver) { + if let Some(element) = receiver.0.element.upgrade() { + gst::element_error!( + element, + gst::LibraryError::Failed, + ["Panic while connecting to NDI source"] + ); + } + + let mut queue = (receiver.0.queue.0).0.lock().unwrap(); + queue.error = Some(gst::FlowError::Error); + (receiver.0.queue.0).1.notify_one(); + } + } + } + }); + + *receiver.0.thread.lock().unwrap() = Some(thread); + + receiver + } + + pub fn receiver_control_handle(&self) -> ReceiverControlHandle { + ReceiverControlHandle { + queue: self.0.queue.clone(), + } + } + + pub fn set_flushing(&self, flushing: bool) { + let mut queue = (self.0.queue.0).0.lock().unwrap(); + queue.flushing = flushing; + (self.0.queue.0).1.notify_all(); + } + + pub fn set_playing(&self, playing: bool) { + let mut queue = (self.0.queue.0).0.lock().unwrap(); + queue.playing = playing; + } + + pub fn shutdown(&self) { + let mut queue = (self.0.queue.0).0.lock().unwrap(); + queue.shutdown = true; + (self.0.queue.0).1.notify_all(); + } + + pub fn capture(&self) -> ReceiverItem { + let mut queue = (self.0.queue.0).0.lock().unwrap(); + loop { + if let Some(err) = queue.error { + return ReceiverItem::Error(err); + } else if queue.buffer_queue.is_empty() && queue.timeout { + return ReceiverItem::Timeout; + } else if queue.flushing || queue.shutdown { + return ReceiverItem::Flushing; + } else if let Some(buffer) = queue.buffer_queue.pop_front() { + return ReceiverItem::Buffer(buffer); + } + + queue = (self.0.queue.0).1.wait(queue).unwrap(); + } + } + + pub fn connect( + element: &gst_base::BaseSrc, + ndi_name: Option<&str>, + url_address: Option<&str>, + receiver_ndi_name: &str, + connect_timeout: u32, + bandwidth: NDIlib_recv_bandwidth_e, + color_format: NDIlib_recv_color_format_e, + timestamp_mode: TimestampMode, + timeout: u32, + max_queue_length: usize, + ) -> Option { + gst_debug!(CAT, obj: element, "Starting NDI connection..."); + + assert!(ndi_name.is_some() || url_address.is_some()); + + gst_debug!( + CAT, + obj: element, + "Connecting to NDI source with NDI name '{:?}' and URL/Address {:?}", + ndi_name, + url_address, + ); + + // FIXME: Ideally we would use NDIlib_recv_color_format_fastest here but that seems to be + // broken with interlaced content currently + let recv = RecvInstance::builder(ndi_name, url_address, receiver_ndi_name) + .bandwidth(bandwidth) + .color_format(color_format) + .allow_video_fields(true) + .build(); + let recv = match recv { + None => { + gst::element_error!( + element, + gst::CoreError::Negotiation, + ["Failed to connect to source"] + ); + return None; + } + Some(recv) => recv, + }; + + recv.set_tally(&Tally::default()); + + let enable_hw_accel = MetadataFrame::new(0, Some("")); + recv.send_metadata(&enable_hw_accel); + + // This will set info.audio/video accordingly + let receiver = Receiver::new( + recv, + timestamp_mode, + timeout, + connect_timeout, + max_queue_length, + element, + ); + + Some(receiver) + } + + fn receive_thread(receiver: &Weak, recv: RecvInstance) { + let mut first_video_frame = true; + let mut first_audio_frame = true; + let mut first_frame = true; + let mut timer = time::Instant::now(); + + // Capture until error or shutdown + loop { + let receiver = match receiver.upgrade().map(Receiver) { + None => break, + Some(receiver) => receiver, + }; + + let element = match receiver.0.element.upgrade() { + None => return, + Some(element) => element, + }; + + let flushing = { + let queue = (receiver.0.queue.0).0.lock().unwrap(); + if queue.shutdown { + gst_debug!(CAT, obj: &element, "Shutting down"); + break; + } + + // If an error happened in the meantime, just go out of here + if queue.error.is_some() { + gst_error!(CAT, obj: &element, "Error while waiting for connection"); + return; + } + + queue.flushing + }; + + let timeout = if first_frame { + receiver.0.connect_timeout + } else { + receiver.0.timeout + }; + + let res = match recv.capture(50) { + _ if flushing => { + gst_debug!(CAT, obj: &element, "Flushing"); + Err(gst::FlowError::Flushing) + } + Err(_) => { + gst::element_error!( + element, + gst::ResourceError::Read, + ["Error receiving frame"] + ); + Err(gst::FlowError::Error) + } + Ok(None) if timeout > 0 && timer.elapsed().as_millis() >= timeout as u128 => { + gst_debug!(CAT, obj: &element, "Timed out -- assuming EOS",); + Err(gst::FlowError::Eos) + } + Ok(None) => { + gst_debug!(CAT, obj: &element, "No frame received yet, retry"); + continue; + } + Ok(Some(Frame::Video(frame))) => { + first_frame = false; + let mut buffer = receiver.create_video_buffer_and_info(&element, frame); + if first_video_frame { + if let Ok(Buffer::Video(ref mut buffer, _)) = buffer { + buffer + .get_mut() + .unwrap() + .set_flags(gst::BufferFlags::DISCONT); + first_video_frame = false; + } + } + buffer + } + Ok(Some(Frame::Audio(frame))) => { + first_frame = false; + let mut buffer = receiver.create_audio_buffer_and_info(&element, frame); + if first_audio_frame { + if let Ok(Buffer::Audio(ref mut buffer, _)) = buffer { + buffer + .get_mut() + .unwrap() + .set_flags(gst::BufferFlags::DISCONT); + first_audio_frame = false; + } + } + buffer + } + Ok(Some(Frame::Metadata(frame))) => { + if let Some(metadata) = frame.metadata() { + gst_debug!( + CAT, + obj: &element, + "Received metadata at timecode {}: {}", + gst::ClockTime::from_nseconds(frame.timecode() as u64 * 100), + metadata, + ); + } + + continue; + } + }; + + match res { + Ok(item) => { + let mut queue = (receiver.0.queue.0).0.lock().unwrap(); + while queue.buffer_queue.len() > receiver.0.max_queue_length { + gst_warning!( + CAT, + obj: &element, + "Dropping old buffer -- queue has {} items", + queue.buffer_queue.len() + ); + queue.buffer_queue.pop_front(); + } + queue.buffer_queue.push_back(item); + (receiver.0.queue.0).1.notify_one(); + timer = time::Instant::now(); + } + Err(gst::FlowError::Eos) => { + gst_debug!(CAT, obj: &element, "Signalling EOS"); + let mut queue = (receiver.0.queue.0).0.lock().unwrap(); + queue.timeout = true; + (receiver.0.queue.0).1.notify_one(); + break; + } + Err(gst::FlowError::Flushing) => { + // Flushing, nothing to be done here except for emptying our queue + let mut queue = (receiver.0.queue.0).0.lock().unwrap(); + queue.buffer_queue.clear(); + (receiver.0.queue.0).1.notify_one(); + timer = time::Instant::now(); + } + Err(err) => { + gst_error!(CAT, obj: &element, "Signalling error"); + let mut queue = (receiver.0.queue.0).0.lock().unwrap(); + if queue.error.is_none() { + queue.error = Some(err); + } + (receiver.0.queue.0).1.notify_one(); + break; + } + } + } + } + + fn calculate_timestamp( + &self, + element: &gst_base::BaseSrc, + timestamp: i64, + timecode: i64, + duration: Option, + ) -> Option<(gst::ClockTime, Option, bool)> { + let receive_time = element.current_running_time()?; + + let real_time_now = gst::ClockTime::from_nseconds(glib::real_time() as u64 * 1000); + let timestamp = if timestamp == ndisys::NDIlib_recv_timestamp_undefined { + gst::ClockTime::NONE + } else { + Some(gst::ClockTime::from_nseconds(timestamp as u64 * 100)) + }; + let timecode = gst::ClockTime::from_nseconds(timecode as u64 * 100); + + gst_log!( + CAT, + obj: element, + "Received frame with timecode {}, timestamp {}, duration {}, receive time {}, local time now {}", + timecode, + timestamp.display(), + duration.display(), + receive_time.display(), + real_time_now, + ); + + let (pts, duration, discont) = match self.0.timestamp_mode { + TimestampMode::ReceiveTimeTimecode => { + self.0 + .observations + .process(element, (Some(timecode), receive_time), duration) + } + TimestampMode::ReceiveTimeTimestamp => { + self.0 + .observations + .process(element, (timestamp, receive_time), duration) + } + TimestampMode::Timecode => (timecode, duration, false), + TimestampMode::Timestamp if timestamp.is_none() => (receive_time, duration, false), + TimestampMode::Timestamp => { + // Timestamps are relative to the UNIX epoch + let timestamp = timestamp?; + if real_time_now > timestamp { + let diff = real_time_now - timestamp; + if diff > receive_time { + (gst::ClockTime::ZERO, duration, false) + } else { + (receive_time - diff, duration, false) + } + } else { + let diff = timestamp - real_time_now; + (receive_time + diff, duration, false) + } + } + TimestampMode::ReceiveTime => (receive_time, duration, false), + }; + + gst_log!( + CAT, + obj: element, + "Calculated PTS {}, duration {}", + pts.display(), + duration.display(), + ); + + Some((pts, duration, discont)) + } + + fn create_video_buffer_and_info( + &self, + element: &gst_base::BaseSrc, + video_frame: VideoFrame, + ) -> Result { + gst_debug!(CAT, obj: element, "Received video frame {:?}", video_frame); + + let (pts, duration, discont) = self + .calculate_video_timestamp(element, &video_frame) + .ok_or_else(|| { + gst_debug!(CAT, obj: element, "Flushing, dropping buffer"); + gst::FlowError::Flushing + })?; + + let info = self.create_video_info(element, &video_frame)?; + + let mut buffer = self.create_video_buffer(element, pts, duration, &info, &video_frame)?; + if discont { + buffer + .get_mut() + .unwrap() + .set_flags(gst::BufferFlags::RESYNC); + } + + gst_log!(CAT, obj: element, "Produced video buffer {:?}", buffer); + + Ok(Buffer::Video(buffer, info)) + } + + fn calculate_video_timestamp( + &self, + element: &gst_base::BaseSrc, + video_frame: &VideoFrame, + ) -> Option<(gst::ClockTime, Option, bool)> { + let duration = gst::ClockTime::SECOND.mul_div_floor( + video_frame.frame_rate().1 as u64, + video_frame.frame_rate().0 as u64, + ); + + self.calculate_timestamp( + element, + video_frame.timestamp(), + video_frame.timecode(), + duration, + ) + } + + fn create_video_info( + &self, + element: &gst_base::BaseSrc, + video_frame: &VideoFrame, + ) -> Result { + let fourcc = video_frame.fourcc(); + + let par = gst::Fraction::approximate_f32(video_frame.picture_aspect_ratio()) + .unwrap_or_else(|| gst::Fraction::new(1, 1)) + * gst::Fraction::new(video_frame.yres(), video_frame.xres()); + let interlace_mode = match video_frame.frame_format_type() { + ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_progressive => { + gst_video::VideoInterlaceMode::Progressive + } + ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved => { + gst_video::VideoInterlaceMode::Interleaved + } + #[cfg(feature = "interlaced-fields")] + _ => gst_video::VideoInterlaceMode::Alternate, + #[cfg(not(feature = "interlaced-fields"))] + _ => { + gst::element_error!( + element, + gst::StreamError::Format, + ["Separate field interlacing not supported"] + ); + return Err(gst::FlowError::NotNegotiated); + } + }; + + if [ + ndisys::NDIlib_FourCC_video_type_UYVY, + ndisys::NDIlib_FourCC_video_type_UYVA, + ndisys::NDIlib_FourCC_video_type_YV12, + ndisys::NDIlib_FourCC_video_type_NV12, + ndisys::NDIlib_FourCC_video_type_I420, + ndisys::NDIlib_FourCC_video_type_BGRA, + ndisys::NDIlib_FourCC_video_type_BGRX, + ndisys::NDIlib_FourCC_video_type_RGBA, + ndisys::NDIlib_FourCC_video_type_BGRX, + ] + .contains(&fourcc) + { + // YV12 and I420 are swapped in the NDI SDK compared to GStreamer + let format = match video_frame.fourcc() { + ndisys::NDIlib_FourCC_video_type_UYVY => gst_video::VideoFormat::Uyvy, + // FIXME: This drops the alpha plane! + ndisys::NDIlib_FourCC_video_type_UYVA => gst_video::VideoFormat::Uyvy, + ndisys::NDIlib_FourCC_video_type_YV12 => gst_video::VideoFormat::I420, + ndisys::NDIlib_FourCC_video_type_NV12 => gst_video::VideoFormat::Nv12, + ndisys::NDIlib_FourCC_video_type_I420 => gst_video::VideoFormat::Yv12, + ndisys::NDIlib_FourCC_video_type_BGRA => gst_video::VideoFormat::Bgra, + ndisys::NDIlib_FourCC_video_type_BGRX => gst_video::VideoFormat::Bgrx, + ndisys::NDIlib_FourCC_video_type_RGBA => gst_video::VideoFormat::Rgba, + ndisys::NDIlib_FourCC_video_type_RGBX => gst_video::VideoFormat::Rgbx, + _ => { + gst::element_error!( + element, + gst::StreamError::Format, + ["Unsupported video fourcc {:08x}", video_frame.fourcc()] + ); + + return Err(gst::FlowError::NotNegotiated); + } // TODO: NDIlib_FourCC_video_type_P216 and NDIlib_FourCC_video_type_PA16 not + // supported by GStreamer + }; + + #[cfg(feature = "interlaced-fields")] + { + let mut builder = gst_video::VideoInfo::builder( + format, + video_frame.xres() as u32, + video_frame.yres() as u32, + ) + .fps(gst::Fraction::from(video_frame.frame_rate())) + .par(par) + .interlace_mode(interlace_mode); + + if video_frame.frame_format_type() + == ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved + { + builder = builder.field_order(gst_video::VideoFieldOrder::TopFieldFirst); + } + + return Ok(VideoInfo::VideoInfo(builder.build().map_err(|_| { + gst::element_error!( + element, + gst::StreamError::Format, + ["Invalid video format configuration"] + ); + + gst::FlowError::NotNegotiated + })?)); + } + + #[cfg(not(feature = "interlaced-fields"))] + { + let mut builder = gst_video::VideoInfo::builder( + format, + video_frame.xres() as u32, + video_frame.yres() as u32, + ) + .fps(gst::Fraction::from(video_frame.frame_rate())) + .par(par) + .interlace_mode(interlace_mode); + + if video_frame.frame_format_type() + == ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved + { + builder = builder.field_order(gst_video::VideoFieldOrder::TopFieldFirst); + } + + return Ok(VideoInfo::VideoInfo(builder.build().map_err(|_| { + gst::element_error!( + element, + gst::StreamError::Format, + ["Invalid video format configuration"] + ); + + gst::FlowError::NotNegotiated + })?)); + } + } + + #[cfg(feature = "advanced-sdk")] + if [ + ndisys::NDIlib_FourCC_video_type_ex_SHQ0_highest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_SHQ2_highest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_SHQ7_highest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_SHQ0_lowest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_SHQ2_lowest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_SHQ7_lowest_bandwidth, + ] + .contains(&fourcc) + { + let variant = match fourcc { + ndisys::NDIlib_FourCC_video_type_ex_SHQ0_highest_bandwidth + | ndisys::NDIlib_FourCC_video_type_ex_SHQ0_lowest_bandwidth => String::from("SHQ0"), + ndisys::NDIlib_FourCC_video_type_ex_SHQ2_highest_bandwidth + | ndisys::NDIlib_FourCC_video_type_ex_SHQ2_lowest_bandwidth => String::from("SHQ2"), + ndisys::NDIlib_FourCC_video_type_ex_SHQ7_highest_bandwidth + | ndisys::NDIlib_FourCC_video_type_ex_SHQ7_lowest_bandwidth => String::from("SHQ7"), + _ => { + gst::element_error!( + element, + gst::StreamError::Format, + [ + "Unsupported SpeedHQ video fourcc {:08x}", + video_frame.fourcc() + ] + ); + + return Err(gst::FlowError::NotNegotiated); + } + }; + + return Ok(VideoInfo::SpeedHQInfo { + variant, + xres: video_frame.xres(), + yres: video_frame.yres(), + fps_n: video_frame.frame_rate().0, + fps_d: video_frame.frame_rate().1, + par_n: par.numer(), + par_d: par.denom(), + interlace_mode, + }); + } + + #[cfg(feature = "advanced-sdk")] + if [ + ndisys::NDIlib_FourCC_video_type_ex_H264_highest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_H264_lowest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_H264_alpha_highest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_H264_alpha_lowest_bandwidth, + ] + .contains(&fourcc) + { + let compressed_packet = video_frame.compressed_packet().ok_or_else(|| { + gst_error!( + CAT, + obj: element, + "Video packet doesn't have compressed packet start" + ); + gst::element_error!(element, gst::StreamError::Format, ["Invalid video packet"]); + + gst::FlowError::Error + })?; + + if compressed_packet.fourcc != NDIlib_compressed_FourCC_type_H264 { + gst_error!(CAT, obj: element, "Non-H264 video packet"); + gst::element_error!(element, gst::StreamError::Format, ["Invalid video packet"]); + + return Err(gst::FlowError::Error); + } + + return Ok(VideoInfo::H264Info { + xres: video_frame.xres(), + yres: video_frame.yres(), + fps_n: video_frame.frame_rate().0, + fps_d: video_frame.frame_rate().1, + par_n: par.numer(), + par_d: par.denom(), + interlace_mode, + }); + } + + #[cfg(feature = "advanced-sdk")] + if [ + ndisys::NDIlib_FourCC_video_type_ex_HEVC_highest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_HEVC_lowest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_HEVC_alpha_highest_bandwidth, + ndisys::NDIlib_FourCC_video_type_ex_HEVC_alpha_lowest_bandwidth, + ] + .contains(&fourcc) + { + let compressed_packet = video_frame.compressed_packet().ok_or_else(|| { + gst_error!( + CAT, + obj: element, + "Video packet doesn't have compressed packet start" + ); + gst::element_error!(element, gst::StreamError::Format, ["Invalid video packet"]); + + gst::FlowError::Error + })?; + + if compressed_packet.fourcc != NDIlib_compressed_FourCC_type_HEVC { + gst_error!(CAT, obj: element, "Non-H265 video packet"); + gst::element_error!(element, gst::StreamError::Format, ["Invalid video packet"]); + + return Err(gst::FlowError::Error); + } + + return Ok(VideoInfo::H265Info { + xres: video_frame.xres(), + yres: video_frame.yres(), + fps_n: video_frame.frame_rate().0, + fps_d: video_frame.frame_rate().1, + par_n: par.numer(), + par_d: par.denom(), + interlace_mode, + }); + } + + gst::element_error!( + element, + gst::StreamError::Format, + ["Unsupported video fourcc {:08x}", video_frame.fourcc()] + ); + Err(gst::FlowError::NotNegotiated) + } + + fn create_video_buffer( + &self, + element: &gst_base::BaseSrc, + pts: gst::ClockTime, + duration: Option, + info: &VideoInfo, + video_frame: &VideoFrame, + ) -> Result { + let mut buffer = self.copy_video_frame(element, info, video_frame)?; + { + let buffer = buffer.get_mut().unwrap(); + buffer.set_pts(pts); + buffer.set_duration(duration); + + #[cfg(feature = "reference-timestamps")] + { + gst::ReferenceTimestampMeta::add( + buffer, + &*TIMECODE_CAPS, + gst::ClockTime::from_nseconds(video_frame.timecode() as u64 * 100), + gst::ClockTime::NONE, + ); + if video_frame.timestamp() != ndisys::NDIlib_recv_timestamp_undefined { + gst::ReferenceTimestampMeta::add( + buffer, + &*TIMESTAMP_CAPS, + gst::ClockTime::from_nseconds(video_frame.timestamp() as u64 * 100), + gst::ClockTime::NONE, + ); + } + } + + #[cfg(feature = "interlaced-fields")] + { + match video_frame.frame_format_type() { + ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved => { + buffer.set_video_flags( + gst_video::VideoBufferFlags::INTERLACED + | gst_video::VideoBufferFlags::TFF, + ); + } + ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_0 => { + buffer.set_video_flags( + gst_video::VideoBufferFlags::INTERLACED + | gst_video::VideoBufferFlags::TOP_FIELD, + ); + } + ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_1 => { + buffer.set_video_flags( + gst_video::VideoBufferFlags::INTERLACED + | gst_video::VideoBufferFlags::BOTTOM_FIELD, + ); + } + _ => (), + }; + } + + #[cfg(not(feature = "interlaced-fields"))] + { + if video_frame.frame_format_type() + == ndisys::NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved + { + buffer.set_video_flags( + gst_video::VideoBufferFlags::INTERLACED | gst_video::VideoBufferFlags::TFF, + ); + } + } + } + + Ok(buffer) + } + + fn copy_video_frame( + &self, + #[allow(unused_variables)] element: &gst_base::BaseSrc, + info: &VideoInfo, + video_frame: &VideoFrame, + ) -> Result { + match info { + VideoInfo::VideoInfo(ref info) => { + let src = video_frame.data().ok_or(gst::FlowError::Error)?; + + let buffer = gst::Buffer::with_size(info.size()).unwrap(); + let mut vframe = gst_video::VideoFrame::from_buffer_writable(buffer, info).unwrap(); + + match info.format() { + gst_video::VideoFormat::Uyvy + | gst_video::VideoFormat::Bgra + | gst_video::VideoFormat::Bgrx + | gst_video::VideoFormat::Rgba + | gst_video::VideoFormat::Rgbx => { + let line_bytes = if info.format() == gst_video::VideoFormat::Uyvy { + 2 * vframe.width() as usize + } else { + 4 * vframe.width() as usize + }; + let dest_stride = vframe.plane_stride()[0] as usize; + let dest = vframe.plane_data_mut(0).unwrap(); + let src_stride = video_frame.line_stride_or_data_size_in_bytes() as usize; + + for (dest, src) in dest + .chunks_exact_mut(dest_stride) + .zip(src.chunks_exact(src_stride)) + { + dest.copy_from_slice(src); + dest.copy_from_slice(&src[..line_bytes]); + } + } + gst_video::VideoFormat::Nv12 => { + // First plane + { + let line_bytes = vframe.width() as usize; + let dest_stride = vframe.plane_stride()[0] as usize; + let dest = vframe.plane_data_mut(0).unwrap(); + let src_stride = + video_frame.line_stride_or_data_size_in_bytes() as usize; + + for (dest, src) in dest + .chunks_exact_mut(dest_stride) + .zip(src.chunks_exact(src_stride)) + { + dest.copy_from_slice(&src[..line_bytes]); + } + } + + // Second plane + { + let line_bytes = vframe.width() as usize; + let dest_stride = vframe.plane_stride()[1] as usize; + let dest = vframe.plane_data_mut(1).unwrap(); + let src_stride = + video_frame.line_stride_or_data_size_in_bytes() as usize; + let src = &src[(video_frame.yres() as usize * src_stride)..]; + + for (dest, src) in dest + .chunks_exact_mut(dest_stride) + .zip(src.chunks_exact(src_stride)) + { + dest.copy_from_slice(&src[..line_bytes]); + } + } + } + gst_video::VideoFormat::Yv12 | gst_video::VideoFormat::I420 => { + // First plane + { + let line_bytes = vframe.width() as usize; + let dest_stride = vframe.plane_stride()[0] as usize; + let dest = vframe.plane_data_mut(0).unwrap(); + let src_stride = + video_frame.line_stride_or_data_size_in_bytes() as usize; + + for (dest, src) in dest + .chunks_exact_mut(dest_stride) + .zip(src.chunks_exact(src_stride)) + { + dest.copy_from_slice(&src[..line_bytes]); + } + } + + // Second plane + { + let line_bytes = (vframe.width() as usize + 1) / 2; + let dest_stride = vframe.plane_stride()[1] as usize; + let dest = vframe.plane_data_mut(1).unwrap(); + let src_stride = + video_frame.line_stride_or_data_size_in_bytes() as usize; + let src_stride1 = + video_frame.line_stride_or_data_size_in_bytes() as usize / 2; + let src = &src[(video_frame.yres() as usize * src_stride)..]; + + for (dest, src) in dest + .chunks_exact_mut(dest_stride) + .zip(src.chunks_exact(src_stride1)) + { + dest.copy_from_slice(&src[..line_bytes]); + } + } + + // Third plane + { + let line_bytes = (vframe.width() as usize + 1) / 2; + let dest_stride = vframe.plane_stride()[2] as usize; + let dest = vframe.plane_data_mut(2).unwrap(); + let src_stride = + video_frame.line_stride_or_data_size_in_bytes() as usize; + let src_stride1 = + video_frame.line_stride_or_data_size_in_bytes() as usize / 2; + let src = &src[(video_frame.yres() as usize * src_stride + + (video_frame.yres() as usize + 1) / 2 * src_stride1)..]; + + for (dest, src) in dest + .chunks_exact_mut(dest_stride) + .zip(src.chunks_exact(src_stride1)) + { + dest.copy_from_slice(&src[..line_bytes]); + } + } + } + _ => unreachable!(), + } + + Ok(vframe.into_buffer()) + } + #[cfg(feature = "advanced-sdk")] + VideoInfo::SpeedHQInfo { .. } => { + let data = video_frame.data().ok_or_else(|| { + gst_error!(CAT, obj: element, "Video packet has no data"); + gst::element_error!( + element, + gst::StreamError::Format, + ["Invalid video packet"] + ); + + gst::FlowError::Error + })?; + + Ok(gst::Buffer::from_mut_slice(Vec::from(data))) + } + #[cfg(feature = "advanced-sdk")] + VideoInfo::H264Info { .. } | VideoInfo::H265Info { .. } => { + let compressed_packet = video_frame.compressed_packet().ok_or_else(|| { + gst_error!( + CAT, + obj: element, + "Video packet doesn't have compressed packet start" + ); + gst::element_error!( + element, + gst::StreamError::Format, + ["Invalid video packet"] + ); + + gst::FlowError::Error + })?; + + let mut buffer = Vec::new(); + if let Some(extra_data) = compressed_packet.extra_data { + buffer.extend_from_slice(extra_data); + } + buffer.extend_from_slice(compressed_packet.data); + let mut buffer = gst::Buffer::from_mut_slice(buffer); + if !compressed_packet.key_frame { + let buffer = buffer.get_mut().unwrap(); + buffer.set_flags(gst::BufferFlags::DELTA_UNIT); + } + + Ok(buffer) + } + } + } + + fn create_audio_buffer_and_info( + &self, + element: &gst_base::BaseSrc, + audio_frame: AudioFrame, + ) -> Result { + gst_debug!(CAT, obj: element, "Received audio frame {:?}", audio_frame); + + let (pts, duration, discont) = self + .calculate_audio_timestamp(element, &audio_frame) + .ok_or_else(|| { + gst_debug!(CAT, obj: element, "Flushing, dropping buffer"); + gst::FlowError::Flushing + })?; + + let info = self.create_audio_info(element, &audio_frame)?; + + let mut buffer = self.create_audio_buffer(element, pts, duration, &info, &audio_frame)?; + if discont { + buffer + .get_mut() + .unwrap() + .set_flags(gst::BufferFlags::RESYNC); + } + + gst_log!(CAT, obj: element, "Produced audio buffer {:?}", buffer); + + Ok(Buffer::Audio(buffer, info)) + } + + fn calculate_audio_timestamp( + &self, + element: &gst_base::BaseSrc, + audio_frame: &AudioFrame, + ) -> Option<(gst::ClockTime, Option, bool)> { + let duration = gst::ClockTime::SECOND.mul_div_floor( + audio_frame.no_samples() as u64, + audio_frame.sample_rate() as u64, + ); + + self.calculate_timestamp( + element, + audio_frame.timestamp(), + audio_frame.timecode(), + duration, + ) + } + + fn create_audio_info( + &self, + element: &gst_base::BaseSrc, + audio_frame: &AudioFrame, + ) -> Result { + let fourcc = audio_frame.fourcc(); + + if [NDIlib_FourCC_audio_type_FLTp].contains(&fourcc) { + let builder = gst_audio::AudioInfo::builder( + gst_audio::AUDIO_FORMAT_F32, + audio_frame.sample_rate() as u32, + audio_frame.no_channels() as u32, + ); + + let info = builder.build().map_err(|_| { + gst::element_error!( + element, + gst::StreamError::Format, + ["Invalid audio format configuration"] + ); + + gst::FlowError::NotNegotiated + })?; + + return Ok(AudioInfo::AudioInfo(info)); + } + + #[cfg(feature = "advanced-sdk")] + if [NDIlib_FourCC_audio_type_AAC].contains(&fourcc) { + use std::convert::TryInto; + + let compressed_packet = audio_frame.compressed_packet().ok_or_else(|| { + gst_error!( + CAT, + obj: element, + "Audio packet doesn't have compressed packet start" + ); + gst::element_error!(element, gst::StreamError::Format, ["Invalid audio packet"]); + + gst::FlowError::Error + })?; + + if compressed_packet.fourcc != NDIlib_compressed_FourCC_type_AAC { + gst_error!(CAT, obj: element, "Non-AAC audio packet"); + gst::element_error!(element, gst::StreamError::Format, ["Invalid audio packet"]); + + return Err(gst::FlowError::Error); + } + + return Ok(AudioInfo::AacInfo { + sample_rate: audio_frame.sample_rate(), + no_channels: audio_frame.no_channels(), + codec_data: compressed_packet + .extra_data + .ok_or(gst::FlowError::NotNegotiated)? + .try_into() + .map_err(|_| gst::FlowError::NotNegotiated)?, + }); + } + + #[cfg(feature = "advanced-sdk")] + if [NDIlib_FourCC_audio_type_Opus].contains(&fourcc) {} + + gst::element_error!( + element, + gst::StreamError::Format, + ["Unsupported audio fourcc {:08x}", audio_frame.fourcc()] + ); + Err(gst::FlowError::NotNegotiated) + } + + fn create_audio_buffer( + &self, + #[allow(unused_variables)] element: &gst_base::BaseSrc, + pts: gst::ClockTime, + duration: Option, + info: &AudioInfo, + audio_frame: &AudioFrame, + ) -> Result { + match info { + AudioInfo::AudioInfo(ref info) => { + let src = audio_frame.data().ok_or(gst::FlowError::Error)?; + let buff_size = (audio_frame.no_samples() as u32 * info.bpf()) as usize; + + let mut buffer = gst::Buffer::with_size(buff_size).unwrap(); + { + let buffer = buffer.get_mut().unwrap(); + + buffer.set_pts(pts); + buffer.set_duration(duration); + + #[cfg(feature = "reference-timestamps")] + { + gst::ReferenceTimestampMeta::add( + buffer, + &*TIMECODE_CAPS, + gst::ClockTime::from_nseconds(audio_frame.timecode() as u64 * 100), + gst::ClockTime::NONE, + ); + if audio_frame.timestamp() != ndisys::NDIlib_recv_timestamp_undefined { + gst::ReferenceTimestampMeta::add( + buffer, + &*TIMESTAMP_CAPS, + gst::ClockTime::from_nseconds(audio_frame.timestamp() as u64 * 100), + gst::ClockTime::NONE, + ); + } + } + + let mut dest = buffer.map_writable().unwrap(); + let dest = dest + .as_mut_slice_of::() + .map_err(|_| gst::FlowError::NotNegotiated)?; + assert!( + dest.len() + == audio_frame.no_samples() as usize + * audio_frame.no_channels() as usize + ); + + for (channel, samples) in src + .chunks_exact(audio_frame.channel_stride_or_data_size_in_bytes() as usize) + .enumerate() + { + let samples = samples + .as_slice_of::() + .map_err(|_| gst::FlowError::NotNegotiated)?; + + for (i, sample) in samples[..audio_frame.no_samples() as usize] + .iter() + .enumerate() + { + dest[i * (audio_frame.no_channels() as usize) + channel] = *sample; + } + } + } + + Ok(buffer) + } + #[cfg(feature = "advanced-sdk")] + AudioInfo::OpusInfo { .. } => { + let data = audio_frame.data().ok_or_else(|| { + gst_error!(CAT, obj: element, "Audio packet has no data"); + gst::element_error!( + element, + gst::StreamError::Format, + ["Invalid audio packet"] + ); + + gst::FlowError::Error + })?; + + Ok(gst::Buffer::from_mut_slice(Vec::from(data))) + } + #[cfg(feature = "advanced-sdk")] + AudioInfo::AacInfo { .. } => { + let compressed_packet = audio_frame.compressed_packet().ok_or_else(|| { + gst_error!( + CAT, + obj: element, + "Audio packet doesn't have compressed packet start" + ); + gst::element_error!( + element, + gst::StreamError::Format, + ["Invalid audio packet"] + ); + + gst::FlowError::Error + })?; + + Ok(gst::Buffer::from_mut_slice(Vec::from( + compressed_packet.data, + ))) + } + } + } +}