mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer-rs.git
synced 2024-11-25 19:11:06 +00:00
gstreamer-video: Update manual code to 2018 edition
This commit is contained in:
parent
4bd7f7af1a
commit
d31badf9ac
25 changed files with 1266 additions and 1447 deletions
|
@ -11,19 +11,16 @@ homepage = "https://gstreamer.freedesktop.org"
|
|||
documentation = "https://gstreamer.pages.freedesktop.org/gstreamer-rs/gstreamer_video/"
|
||||
keywords = ["gstreamer", "multimedia", "audio", "video", "gnome"]
|
||||
build = "build.rs"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1.0"
|
||||
libc = "0.2"
|
||||
cfg-if = "1.0"
|
||||
glib-sys = { git = "https://github.com/gtk-rs/gtk-rs" }
|
||||
gobject-sys = { git = "https://github.com/gtk-rs/gtk-rs" }
|
||||
gstreamer-sys = { path = "../gstreamer/sys", features = ["v1_8"] }
|
||||
gstreamer-base-sys = { path = "../gstreamer-base/sys", features = ["v1_8"] }
|
||||
gstreamer-video-sys = { path = "../gstreamer-video/sys", features = ["v1_8"] }
|
||||
ffi = { package = "gstreamer-video-sys", path = "../gstreamer-video/sys", features = ["v1_8"] }
|
||||
glib = { git = "https://github.com/gtk-rs/gtk-rs" }
|
||||
gstreamer = { path = "../gstreamer" }
|
||||
gstreamer-base = { path = "../gstreamer-base" }
|
||||
gst = { package = "gstreamer", path = "../gstreamer" }
|
||||
gst-base = { package = "gstreamer-base", path = "../gstreamer-base" }
|
||||
once_cell = "1.0"
|
||||
futures-channel = "0.3"
|
||||
futures-util = "0.3"
|
||||
|
@ -36,14 +33,14 @@ itertools = "0.9"
|
|||
|
||||
[features]
|
||||
default = []
|
||||
v1_10 = ["gstreamer/v1_10", "gstreamer-base/v1_10", "gstreamer-video-sys/v1_10"]
|
||||
v1_12 = ["gstreamer/v1_12", "gstreamer-base/v1_12", "gstreamer-video-sys/v1_12", "v1_10"]
|
||||
v1_14 = ["gstreamer/v1_14", "gstreamer-base/v1_14", "gstreamer-video-sys/v1_14", "v1_12"]
|
||||
v1_16 = ["gstreamer/v1_16", "gstreamer-base/v1_16", "gstreamer-video-sys/v1_16", "v1_14"]
|
||||
v1_18 = ["gstreamer/v1_18", "gstreamer-base/v1_18", "gstreamer-video-sys/v1_18", "v1_16"]
|
||||
v1_10 = ["gst/v1_10", "gst-base/v1_10", "ffi/v1_10"]
|
||||
v1_12 = ["gst/v1_12", "gst-base/v1_12", "ffi/v1_12", "v1_10"]
|
||||
v1_14 = ["gst/v1_14", "gst-base/v1_14", "ffi/v1_14", "v1_12"]
|
||||
v1_16 = ["gst/v1_16", "gst-base/v1_16", "ffi/v1_16", "v1_14"]
|
||||
v1_18 = ["gst/v1_18", "gst-base/v1_18", "ffi/v1_18", "v1_16"]
|
||||
embed-lgpl-docs = ["gstreamer-rs-lgpl-docs"]
|
||||
purge-lgpl-docs = ["gstreamer-rs-lgpl-docs"]
|
||||
dox = ["v1_18", "gstreamer-video-sys/dox", "glib/dox", "gstreamer/dox", "gstreamer-base/dox"]
|
||||
dox = ["v1_18", "ffi/dox", "glib/dox", "gst/dox", "gst-base/dox"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
features = ["dox", "embed-lgpl-docs"]
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
// except according to those terms.
|
||||
|
||||
use gst::CapsFeatures;
|
||||
use gst_video_sys;
|
||||
use std::ffi::CStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
|
@ -15,7 +14,7 @@ use once_cell::sync::Lazy;
|
|||
#[cfg(any(feature = "v1_16", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_16")))]
|
||||
pub static CAPS_FEATURE_FORMAT_INTERLACED: Lazy<&'static str> = Lazy::new(|| unsafe {
|
||||
CStr::from_ptr(gst_video_sys::GST_CAPS_FEATURE_FORMAT_INTERLACED)
|
||||
CStr::from_ptr(ffi::GST_CAPS_FEATURE_FORMAT_INTERLACED)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
});
|
||||
|
@ -26,7 +25,7 @@ pub static CAPS_FEATURES_FORMAT_INTERLACED: Lazy<CapsFeatures> =
|
|||
|
||||
pub static CAPS_FEATURE_META_GST_VIDEO_AFFINE_TRANSFORMATION_META: Lazy<&'static str> =
|
||||
Lazy::new(|| unsafe {
|
||||
CStr::from_ptr(gst_video_sys::GST_CAPS_FEATURE_META_GST_VIDEO_AFFINE_TRANSFORMATION_META)
|
||||
CStr::from_ptr(ffi::GST_CAPS_FEATURE_META_GST_VIDEO_AFFINE_TRANSFORMATION_META)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
});
|
||||
|
@ -35,7 +34,7 @@ pub static CAPS_FEATURES_META_GST_VIDEO_AFFINE_TRANSFORMATION_META: Lazy<CapsFea
|
|||
|
||||
pub static CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META: Lazy<&'static str> =
|
||||
Lazy::new(|| unsafe {
|
||||
CStr::from_ptr(gst_video_sys::GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META)
|
||||
CStr::from_ptr(ffi::GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
});
|
||||
|
@ -43,7 +42,7 @@ pub static CAPS_FEATURES_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META: Lazy<CapsFeature
|
|||
Lazy::new(|| CapsFeatures::new(&[*CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META]));
|
||||
|
||||
pub static CAPS_FEATURE_META_GST_VIDEO_META: Lazy<&'static str> = Lazy::new(|| unsafe {
|
||||
CStr::from_ptr(gst_video_sys::GST_CAPS_FEATURE_META_GST_VIDEO_META)
|
||||
CStr::from_ptr(ffi::GST_CAPS_FEATURE_META_GST_VIDEO_META)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
});
|
||||
|
@ -52,7 +51,7 @@ pub static CAPS_FEATURES_META_GST_VIDEO_META: Lazy<CapsFeatures> =
|
|||
|
||||
pub static CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION: Lazy<&'static str> =
|
||||
Lazy::new(|| unsafe {
|
||||
CStr::from_ptr(gst_video_sys::GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION)
|
||||
CStr::from_ptr(ffi::GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
});
|
||||
|
|
|
@ -6,14 +6,8 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use glib_sys;
|
||||
use gst_sys;
|
||||
use gst_video_sys;
|
||||
|
||||
use glib;
|
||||
use glib::translate::{from_glib, from_glib_full, ToGlib, ToGlibPtr};
|
||||
use glib::ToSendValue;
|
||||
use gst;
|
||||
|
||||
use std::i32;
|
||||
use std::mem;
|
||||
|
@ -27,7 +21,7 @@ pub fn convert_sample(
|
|||
skip_assert_initialized!();
|
||||
unsafe {
|
||||
let mut error = ptr::null_mut();
|
||||
let ret = gst_video_sys::gst_video_convert_sample(
|
||||
let ret = ffi::gst_video_convert_sample(
|
||||
sample.to_glib_none().0,
|
||||
caps.to_glib_none().0,
|
||||
timeout.to_glib(),
|
||||
|
@ -78,9 +72,9 @@ unsafe fn convert_sample_async_unsafe<F>(
|
|||
F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
|
||||
{
|
||||
unsafe extern "C" fn convert_sample_async_trampoline<F>(
|
||||
sample: *mut gst_sys::GstSample,
|
||||
error: *mut glib_sys::GError,
|
||||
user_data: glib_sys::gpointer,
|
||||
sample: *mut gst::ffi::GstSample,
|
||||
error: *mut glib::ffi::GError,
|
||||
user_data: glib::ffi::gpointer,
|
||||
) where
|
||||
F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
|
||||
{
|
||||
|
@ -93,7 +87,7 @@ unsafe fn convert_sample_async_unsafe<F>(
|
|||
callback(Err(from_glib_full(error)))
|
||||
}
|
||||
}
|
||||
unsafe extern "C" fn convert_sample_async_free<F>(user_data: glib_sys::gpointer)
|
||||
unsafe extern "C" fn convert_sample_async_free<F>(user_data: glib::ffi::gpointer)
|
||||
where
|
||||
F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
|
||||
{
|
||||
|
@ -102,12 +96,12 @@ unsafe fn convert_sample_async_unsafe<F>(
|
|||
|
||||
let user_data: Box<Option<F>> = Box::new(Some(func));
|
||||
|
||||
gst_video_sys::gst_video_convert_sample_async(
|
||||
ffi::gst_video_convert_sample_async(
|
||||
sample.to_glib_none().0,
|
||||
caps.to_glib_none().0,
|
||||
timeout.to_glib(),
|
||||
Some(convert_sample_async_trampoline::<F>),
|
||||
Box::into_raw(user_data) as glib_sys::gpointer,
|
||||
Box::into_raw(user_data) as glib::ffi::gpointer,
|
||||
Some(convert_sample_async_free::<F>),
|
||||
);
|
||||
}
|
||||
|
@ -155,7 +149,7 @@ pub fn calculate_display_ratio(
|
|||
let mut dar_n = mem::MaybeUninit::uninit();
|
||||
let mut dar_d = mem::MaybeUninit::uninit();
|
||||
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_calculate_display_ratio(
|
||||
let res: bool = from_glib(ffi::gst_video_calculate_display_ratio(
|
||||
dar_n.as_mut_ptr(),
|
||||
dar_d.as_mut_ptr(),
|
||||
video_width,
|
||||
|
@ -182,7 +176,7 @@ pub fn guess_framerate(duration: gst::ClockTime) -> Option<gst::Fraction> {
|
|||
unsafe {
|
||||
let mut dest_n = mem::MaybeUninit::uninit();
|
||||
let mut dest_d = mem::MaybeUninit::uninit();
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_guess_framerate(
|
||||
let res: bool = from_glib(ffi::gst_video_guess_framerate(
|
||||
duration.to_glib(),
|
||||
dest_n.as_mut_ptr(),
|
||||
dest_d.as_mut_ptr(),
|
||||
|
@ -198,14 +192,16 @@ pub fn guess_framerate(duration: gst::ClockTime) -> Option<gst::Fraction> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn video_make_raw_caps(formats: &[::VideoFormat]) -> gst::caps::Builder<gst::caps::NoFeature> {
|
||||
pub fn video_make_raw_caps(
|
||||
formats: &[crate::VideoFormat],
|
||||
) -> gst::caps::Builder<gst::caps::NoFeature> {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
let formats: Vec<glib::SendValue> = formats
|
||||
.iter()
|
||||
.map(|f| match f {
|
||||
::VideoFormat::Encoded => panic!("Invalid encoded format"),
|
||||
::VideoFormat::Unknown => panic!("Invalid unknown format"),
|
||||
crate::VideoFormat::Encoded => panic!("Invalid encoded format"),
|
||||
crate::VideoFormat::Unknown => panic!("Invalid unknown format"),
|
||||
_ => f.to_string().to_send_value(),
|
||||
})
|
||||
.collect();
|
||||
|
@ -223,8 +219,6 @@ pub fn video_make_raw_caps(formats: &[::VideoFormat]) -> gst::caps::Builder<gst:
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use glib;
|
||||
use gst;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
#[test]
|
||||
|
@ -245,7 +239,7 @@ mod tests {
|
|||
p[3] = 255;
|
||||
}
|
||||
}
|
||||
let in_caps = ::VideoInfo::builder(::VideoFormat::Rgba, 320, 240)
|
||||
let in_caps = crate::VideoInfo::builder(crate::VideoFormat::Rgba, 320, 240)
|
||||
.build()
|
||||
.unwrap()
|
||||
.to_caps()
|
||||
|
@ -255,7 +249,7 @@ mod tests {
|
|||
.caps(&in_caps)
|
||||
.build();
|
||||
|
||||
let out_caps = ::VideoInfo::builder(::VideoFormat::Abgr, 320, 240)
|
||||
let out_caps = crate::VideoInfo::builder(crate::VideoFormat::Abgr, 320, 240)
|
||||
.build()
|
||||
.unwrap()
|
||||
.to_caps()
|
||||
|
@ -290,26 +284,26 @@ mod tests {
|
|||
fn video_caps() {
|
||||
gst::init().unwrap();
|
||||
|
||||
let caps = video_make_raw_caps(&[::VideoFormat::Nv12, ::VideoFormat::Nv16]).build();
|
||||
let caps =
|
||||
video_make_raw_caps(&[crate::VideoFormat::Nv12, crate::VideoFormat::Nv16]).build();
|
||||
assert_eq!(caps.to_string(), "video/x-raw, format=(string){ NV12, NV16 }, width=(int)[ 1, 2147483647 ], height=(int)[ 1, 2147483647 ], framerate=(fraction)[ 0/1, 2147483647/1 ]");
|
||||
|
||||
#[cfg(feature = "v1_18")]
|
||||
{
|
||||
/* video_make_raw_caps() is a re-implementation so ensure it returns the same caps as the C API */
|
||||
let c_caps = unsafe {
|
||||
let formats: Vec<gst_video_sys::GstVideoFormat> =
|
||||
[::VideoFormat::Nv12, ::VideoFormat::Nv16]
|
||||
let formats: Vec<ffi::GstVideoFormat> =
|
||||
[crate::VideoFormat::Nv12, crate::VideoFormat::Nv16]
|
||||
.iter()
|
||||
.map(|f| f.to_glib())
|
||||
.collect();
|
||||
let caps =
|
||||
gst_video_sys::gst_video_make_raw_caps(formats.as_ptr(), formats.len() as u32);
|
||||
let caps = ffi::gst_video_make_raw_caps(formats.as_ptr(), formats.len() as u32);
|
||||
from_glib_full(caps)
|
||||
};
|
||||
assert_eq!(caps, c_caps);
|
||||
}
|
||||
|
||||
let caps = video_make_raw_caps(&[::VideoFormat::Nv12, ::VideoFormat::Nv16])
|
||||
let caps = video_make_raw_caps(&[crate::VideoFormat::Nv12, crate::VideoFormat::Nv16])
|
||||
.field("width", &800)
|
||||
.field("height", &600)
|
||||
.field("framerate", &gst::Fraction::new(30, 1))
|
||||
|
@ -321,13 +315,13 @@ mod tests {
|
|||
#[should_panic(expected = "Invalid encoded format")]
|
||||
fn video_caps_encoded() {
|
||||
gst::init().unwrap();
|
||||
video_make_raw_caps(&[::VideoFormat::Encoded]);
|
||||
video_make_raw_caps(&[crate::VideoFormat::Encoded]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Invalid unknown format")]
|
||||
fn video_caps_unknown() {
|
||||
gst::init().unwrap();
|
||||
video_make_raw_caps(&[::VideoFormat::Unknown]);
|
||||
video_make_raw_caps(&[crate::VideoFormat::Unknown]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,33 +8,11 @@
|
|||
|
||||
#![cfg_attr(feature = "dox", feature(doc_cfg))]
|
||||
|
||||
#[macro_use]
|
||||
extern crate bitflags;
|
||||
extern crate libc;
|
||||
extern crate once_cell;
|
||||
|
||||
#[macro_use]
|
||||
extern crate cfg_if;
|
||||
|
||||
#[macro_use]
|
||||
extern crate glib;
|
||||
extern crate glib_sys;
|
||||
extern crate gobject_sys;
|
||||
#[macro_use]
|
||||
extern crate gstreamer as gst;
|
||||
extern crate futures_channel;
|
||||
extern crate futures_util;
|
||||
extern crate gstreamer_base as gst_base;
|
||||
extern crate gstreamer_base_sys as gst_base_sys;
|
||||
extern crate gstreamer_sys as gst_sys;
|
||||
extern crate gstreamer_video_sys as gst_video_sys;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate itertools;
|
||||
pub use ffi;
|
||||
|
||||
macro_rules! assert_initialized_main_thread {
|
||||
() => {
|
||||
if unsafe { ::gst_sys::gst_is_initialized() } != ::glib_sys::GTRUE {
|
||||
if unsafe { gst::ffi::gst_is_initialized() } != glib::ffi::GTRUE {
|
||||
panic!("GStreamer has not been initialized. Call `gst::init` first.");
|
||||
}
|
||||
};
|
||||
|
@ -49,13 +27,13 @@ macro_rules! skip_assert_initialized {
|
|||
#[allow(clippy::match_same_arms)]
|
||||
#[allow(unused_imports)]
|
||||
mod auto;
|
||||
pub use auto::*;
|
||||
pub use crate::auto::*;
|
||||
|
||||
mod caps_features;
|
||||
#[cfg(any(feature = "v1_16", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_16")))]
|
||||
pub use caps_features::{CAPS_FEATURES_FORMAT_INTERLACED, CAPS_FEATURE_FORMAT_INTERLACED};
|
||||
pub use caps_features::{
|
||||
pub use crate::caps_features::{CAPS_FEATURES_FORMAT_INTERLACED, CAPS_FEATURE_FORMAT_INTERLACED};
|
||||
pub use crate::caps_features::{
|
||||
CAPS_FEATURES_META_GST_VIDEO_AFFINE_TRANSFORMATION_META,
|
||||
CAPS_FEATURES_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, CAPS_FEATURES_META_GST_VIDEO_META,
|
||||
CAPS_FEATURES_META_GST_VIDEO_OVERLAY_COMPOSITION,
|
||||
|
@ -64,33 +42,35 @@ pub use caps_features::{
|
|||
CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
|
||||
};
|
||||
mod video_format;
|
||||
pub use video_format::*;
|
||||
pub use crate::video_format::*;
|
||||
mod video_format_info;
|
||||
pub use video_format_info::*;
|
||||
pub use crate::video_format_info::*;
|
||||
mod video_info;
|
||||
pub use video_info::*;
|
||||
pub use crate::video_info::*;
|
||||
pub mod video_frame;
|
||||
pub use video_frame::{VideoBufferExt, VideoFrame, VideoFrameRef};
|
||||
pub use crate::video_frame::{VideoBufferExt, VideoFrame, VideoFrameRef};
|
||||
mod video_overlay;
|
||||
pub use video_overlay::{is_video_overlay_prepare_window_handle_message, VideoOverlayExtManual};
|
||||
pub use crate::video_overlay::{
|
||||
is_video_overlay_prepare_window_handle_message, VideoOverlayExtManual,
|
||||
};
|
||||
pub mod video_event;
|
||||
pub use video_event::{
|
||||
pub use crate::video_event::{
|
||||
DownstreamForceKeyUnitEvent, ForceKeyUnitEvent, StillFrameEvent, UpstreamForceKeyUnitEvent,
|
||||
};
|
||||
mod functions;
|
||||
pub use functions::*;
|
||||
pub use crate::functions::*;
|
||||
mod video_rectangle;
|
||||
pub use video_rectangle::*;
|
||||
pub use crate::video_rectangle::*;
|
||||
mod video_overlay_composition;
|
||||
pub use video_overlay_composition::*;
|
||||
pub use crate::video_overlay_composition::*;
|
||||
pub mod video_meta;
|
||||
#[cfg(any(feature = "v1_16", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_16")))]
|
||||
pub use video_meta::VideoCaptionMeta;
|
||||
pub use crate::video_meta::VideoCaptionMeta;
|
||||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
pub use video_meta::{VideoAFDMeta, VideoBarMeta};
|
||||
pub use video_meta::{
|
||||
pub use crate::video_meta::{VideoAFDMeta, VideoBarMeta};
|
||||
pub use crate::video_meta::{
|
||||
VideoAffineTransformationMeta, VideoCropMeta, VideoMeta, VideoOverlayCompositionMeta,
|
||||
VideoRegionOfInterestMeta,
|
||||
};
|
||||
|
@ -99,30 +79,30 @@ pub use video_meta::{
|
|||
mod video_time_code;
|
||||
#[cfg(any(feature = "v1_10", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_10")))]
|
||||
pub use video_time_code::{ValidVideoTimeCode, VideoTimeCode, VideoTimeCodeMeta};
|
||||
pub use crate::video_time_code::{ValidVideoTimeCode, VideoTimeCode, VideoTimeCodeMeta};
|
||||
#[cfg(any(feature = "v1_12", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_12")))]
|
||||
mod video_time_code_interval;
|
||||
#[cfg(any(feature = "v1_12", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_12")))]
|
||||
pub use video_time_code_interval::VideoTimeCodeInterval;
|
||||
pub use crate::video_time_code_interval::VideoTimeCodeInterval;
|
||||
mod video_buffer_pool;
|
||||
pub use video_buffer_pool::{
|
||||
pub use crate::video_buffer_pool::{
|
||||
VideoAlignment, VideoBufferPoolConfig, BUFFER_POOL_OPTION_VIDEO_AFFINE_TRANSFORMATION_META,
|
||||
BUFFER_POOL_OPTION_VIDEO_ALIGNMENT, BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META,
|
||||
BUFFER_POOL_OPTION_VIDEO_META,
|
||||
};
|
||||
pub mod video_converter;
|
||||
pub use video_converter::{VideoConverter, VideoConverterConfig};
|
||||
pub use crate::video_converter::{VideoConverter, VideoConverterConfig};
|
||||
|
||||
mod video_codec_frame;
|
||||
mod video_decoder;
|
||||
pub use video_decoder::VideoDecoderExtManual;
|
||||
pub use crate::video_decoder::VideoDecoderExtManual;
|
||||
mod video_encoder;
|
||||
pub use video_codec_frame::VideoCodecFrame;
|
||||
pub use video_encoder::VideoEncoderExtManual;
|
||||
pub use crate::video_codec_frame::VideoCodecFrame;
|
||||
pub use crate::video_encoder::VideoEncoderExtManual;
|
||||
pub mod video_codec_state;
|
||||
pub use video_codec_state::{VideoCodecState, VideoCodecStateContext};
|
||||
pub use crate::video_codec_state::{VideoCodecState, VideoCodecStateContext};
|
||||
mod utils;
|
||||
|
||||
pub const VIDEO_ENCODER_FLOW_NEED_DATA: gst::FlowSuccess = gst::FlowSuccess::CustomSuccess;
|
||||
|
@ -134,13 +114,13 @@ pub mod prelude {
|
|||
pub use glib::prelude::*;
|
||||
pub use gst::prelude::*;
|
||||
|
||||
pub use auto::traits::*;
|
||||
pub use video_buffer_pool::VideoBufferPoolConfig;
|
||||
pub use video_decoder::VideoDecoderExtManual;
|
||||
pub use video_encoder::VideoEncoderExtManual;
|
||||
pub use video_format::VideoFormatIteratorExt;
|
||||
pub use video_frame::VideoBufferExt;
|
||||
pub use video_overlay::VideoOverlayExtManual;
|
||||
pub use crate::auto::traits::*;
|
||||
pub use crate::video_buffer_pool::VideoBufferPoolConfig;
|
||||
pub use crate::video_decoder::VideoDecoderExtManual;
|
||||
pub use crate::video_encoder::VideoEncoderExtManual;
|
||||
pub use crate::video_format::VideoFormatIteratorExt;
|
||||
pub use crate::video_frame::VideoBufferExt;
|
||||
pub use crate::video_overlay::VideoOverlayExtManual;
|
||||
}
|
||||
|
||||
pub mod subclass;
|
||||
|
|
|
@ -7,22 +7,16 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use glib_sys;
|
||||
use gst_sys;
|
||||
use gst_video_sys;
|
||||
|
||||
use glib::prelude::*;
|
||||
use glib::subclass::prelude::*;
|
||||
use glib::translate::*;
|
||||
|
||||
use gst;
|
||||
use gst::subclass::prelude::*;
|
||||
use gst_base;
|
||||
|
||||
use crate::prelude::*;
|
||||
use video_codec_state::{Readable, VideoCodecState};
|
||||
use VideoCodecFrame;
|
||||
use VideoDecoder;
|
||||
use crate::video_codec_state::{Readable, VideoCodecState};
|
||||
use crate::VideoCodecFrame;
|
||||
use crate::VideoDecoder;
|
||||
|
||||
pub trait VideoDecoderImpl: VideoDecoderImplExt + ElementImpl {
|
||||
fn open(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
|
||||
|
@ -184,8 +178,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_open(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.open
|
||||
.map(|f| {
|
||||
|
@ -196,7 +189,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `open` failed"]
|
||||
))
|
||||
|
@ -209,8 +202,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_close(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.close
|
||||
.map(|f| {
|
||||
|
@ -221,7 +213,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `close` failed"]
|
||||
))
|
||||
|
@ -234,8 +226,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_start(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.start
|
||||
.map(|f| {
|
||||
|
@ -246,7 +237,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `start` failed"]
|
||||
))
|
||||
|
@ -259,8 +250,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_stop(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.stop
|
||||
.map(|f| {
|
||||
|
@ -271,7 +261,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `stop` failed"]
|
||||
))
|
||||
|
@ -284,8 +274,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_finish(&self, element: &Self::Type) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.finish
|
||||
.map(|f| {
|
||||
|
@ -302,8 +291,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_drain(&self, element: &Self::Type) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.drain
|
||||
.map(|f| {
|
||||
|
@ -324,12 +312,11 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
) -> Result<(), gst::LoggableError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.set_format
|
||||
.map(|f| {
|
||||
gst_result_from_gboolean!(
|
||||
gst::gst_result_from_gboolean!(
|
||||
f(
|
||||
element.unsafe_cast_ref::<VideoDecoder>().to_glib_none().0,
|
||||
state.as_mut_ptr()
|
||||
|
@ -351,8 +338,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.parse
|
||||
.map(|f| {
|
||||
|
@ -375,8 +361,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.handle_frame
|
||||
.map(|f| {
|
||||
|
@ -393,8 +378,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_flush(&self, element: &Self::Type) -> bool {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.flush
|
||||
.map(|f| {
|
||||
|
@ -410,12 +394,11 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_negotiate(&self, element: &Self::Type) -> Result<(), gst::LoggableError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.negotiate
|
||||
.map(|f| {
|
||||
gst_result_from_gboolean!(
|
||||
gst::gst_result_from_gboolean!(
|
||||
f(element.unsafe_cast_ref::<VideoDecoder>().to_glib_none().0),
|
||||
gst::CAT_RUST,
|
||||
"Parent function `negotiate` failed"
|
||||
|
@ -428,8 +411,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_get_caps(&self, element: &Self::Type, filter: Option<&gst::Caps>) -> gst::Caps {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.getcaps
|
||||
.map(|f| {
|
||||
|
@ -449,8 +431,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_sink_event(&self, element: &Self::Type, event: gst::Event) -> bool {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
let f = (*parent_class)
|
||||
.sink_event
|
||||
.expect("Missing parent function `sink_event`");
|
||||
|
@ -464,8 +445,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_sink_query(&self, element: &Self::Type, query: &mut gst::QueryRef) -> bool {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
let f = (*parent_class)
|
||||
.sink_query
|
||||
.expect("Missing parent function `sink_query`");
|
||||
|
@ -479,8 +459,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_src_event(&self, element: &Self::Type, event: gst::Event) -> bool {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
let f = (*parent_class)
|
||||
.src_event
|
||||
.expect("Missing parent function `src_event`");
|
||||
|
@ -494,8 +473,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
fn parent_src_query(&self, element: &Self::Type, query: &mut gst::QueryRef) -> bool {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
let f = (*parent_class)
|
||||
.src_query
|
||||
.expect("Missing parent function `src_query`");
|
||||
|
@ -513,8 +491,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.propose_allocation
|
||||
.map(|f| {
|
||||
|
@ -524,7 +501,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
)) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `propose_allocation` failed"]
|
||||
))
|
||||
|
@ -541,8 +518,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoDecoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoDecoderClass;
|
||||
(*parent_class)
|
||||
.decide_allocation
|
||||
.map(|f| {
|
||||
|
@ -552,7 +528,7 @@ impl<T: VideoDecoderImpl> VideoDecoderImplExt for T {
|
|||
)) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `decide_allocation` failed"]
|
||||
))
|
||||
|
@ -592,8 +568,8 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_open<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -601,7 +577,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.open(wrap.unsafe_cast_ref()) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -614,8 +590,8 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_close<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -623,7 +599,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.close(wrap.unsafe_cast_ref()) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -636,8 +612,8 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_start<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -645,7 +621,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.start(wrap.unsafe_cast_ref()) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -658,8 +634,8 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_stop<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -667,7 +643,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.stop(wrap.unsafe_cast_ref()) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -680,8 +656,8 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_finish<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
) -> gst_sys::GstFlowReturn
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
) -> gst::ffi::GstFlowReturn
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -689,15 +665,15 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
imp.finish(wrap.unsafe_cast_ref()).into()
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_drain<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
) -> gst_sys::GstFlowReturn
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
) -> gst::ffi::GstFlowReturn
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -705,26 +681,26 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
imp.drain(wrap.unsafe_cast_ref()).into()
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_set_format<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
state: *mut gst_video_sys::GstVideoCodecState,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
state: *mut ffi::GstVideoCodecState,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
let instance = &*(ptr as *mut T::Instance);
|
||||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
gst_video_sys::gst_video_codec_state_ref(state);
|
||||
ffi::gst_video_codec_state_ref(state);
|
||||
let wrap_state = VideoCodecState::<Readable>::new(state);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.set_format(wrap.unsafe_cast_ref(), &wrap_state) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -737,23 +713,23 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_parse<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
frame: *mut gst_video_sys::GstVideoCodecFrame,
|
||||
adapter: *mut gst_base_sys::GstAdapter,
|
||||
at_eos: glib_sys::gboolean,
|
||||
) -> gst_sys::GstFlowReturn
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
frame: *mut ffi::GstVideoCodecFrame,
|
||||
adapter: *mut gst_base::ffi::GstAdapter,
|
||||
at_eos: glib::ffi::gboolean,
|
||||
) -> gst::ffi::GstFlowReturn
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
let instance = &*(ptr as *mut T::Instance);
|
||||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
gst_video_sys::gst_video_codec_frame_ref(frame);
|
||||
ffi::gst_video_codec_frame_ref(frame);
|
||||
let wrap_frame = VideoCodecFrame::new(frame, &*wrap);
|
||||
let wrap_adapter: Borrowed<gst_base::Adapter> = from_glib_borrow(adapter);
|
||||
let at_eos: bool = from_glib(at_eos);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
imp.parse(wrap.unsafe_cast_ref(), &wrap_frame, &wrap_adapter, at_eos)
|
||||
.into()
|
||||
})
|
||||
|
@ -761,9 +737,9 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_handle_frame<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
frame: *mut gst_video_sys::GstVideoCodecFrame,
|
||||
) -> gst_sys::GstFlowReturn
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
frame: *mut ffi::GstVideoCodecFrame,
|
||||
) -> gst::ffi::GstFlowReturn
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -772,15 +748,15 @@ where
|
|||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
let wrap_frame = VideoCodecFrame::new(frame, &*wrap);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
imp.handle_frame(wrap.unsafe_cast_ref(), wrap_frame).into()
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_flush<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -788,15 +764,15 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
VideoDecoderImpl::flush(imp, wrap.unsafe_cast_ref())
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_negotiate<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -804,7 +780,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.negotiate(wrap.unsafe_cast_ref()) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -817,9 +793,9 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_getcaps<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
filter: *mut gst_sys::GstCaps,
|
||||
) -> *mut gst_sys::GstCaps
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
filter: *mut gst::ffi::GstCaps,
|
||||
) -> *mut gst::ffi::GstCaps
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -827,7 +803,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), gst::Caps::new_empty(), {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), gst::Caps::new_empty(), {
|
||||
VideoDecoderImpl::get_caps(
|
||||
imp,
|
||||
wrap.unsafe_cast_ref(),
|
||||
|
@ -840,9 +816,9 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_sink_event<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
event: *mut gst_sys::GstEvent,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
event: *mut gst::ffi::GstEvent,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -850,16 +826,16 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
imp.sink_event(wrap.unsafe_cast_ref(), from_glib_full(event))
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_sink_query<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
query: *mut gst_sys::GstQuery,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
query: *mut gst::ffi::GstQuery,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -867,16 +843,16 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
imp.sink_query(wrap.unsafe_cast_ref(), gst::QueryRef::from_mut_ptr(query))
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_src_event<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
event: *mut gst_sys::GstEvent,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
event: *mut gst::ffi::GstEvent,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -884,16 +860,16 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
imp.src_event(wrap.unsafe_cast_ref(), from_glib_full(event))
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_src_query<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
query: *mut gst_sys::GstQuery,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
query: *mut gst::ffi::GstQuery,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -901,16 +877,16 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
imp.src_query(wrap.unsafe_cast_ref(), gst::QueryRef::from_mut_ptr(query))
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_propose_allocation<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
query: *mut gst_sys::GstQuery,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
query: *mut gst::ffi::GstQuery,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -919,7 +895,7 @@ where
|
|||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
let query = gst::QueryRef::from_mut_ptr(query);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.propose_allocation(wrap.unsafe_cast_ref(), query) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -932,9 +908,9 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_decoder_decide_allocation<T: VideoDecoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoDecoder,
|
||||
query: *mut gst_sys::GstQuery,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoDecoder,
|
||||
query: *mut gst::ffi::GstQuery,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -943,7 +919,7 @@ where
|
|||
let wrap: Borrowed<VideoDecoder> = from_glib_borrow(ptr);
|
||||
let query = gst::QueryRef::from_mut_ptr(query);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.decide_allocation(wrap.unsafe_cast_ref(), query) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
|
|
@ -7,21 +7,16 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use glib_sys;
|
||||
use gst_sys;
|
||||
use gst_video_sys;
|
||||
|
||||
use glib::prelude::*;
|
||||
use glib::subclass::prelude::*;
|
||||
use glib::translate::*;
|
||||
|
||||
use gst;
|
||||
use gst::subclass::prelude::*;
|
||||
|
||||
use crate::prelude::*;
|
||||
use video_codec_state::{Readable, VideoCodecState};
|
||||
use VideoCodecFrame;
|
||||
use VideoEncoder;
|
||||
use crate::video_codec_state::{Readable, VideoCodecState};
|
||||
use crate::VideoCodecFrame;
|
||||
use crate::VideoEncoder;
|
||||
|
||||
pub trait VideoEncoderImpl: VideoEncoderImplExt + ElementImpl {
|
||||
fn open(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
|
||||
|
@ -159,8 +154,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_open(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.open
|
||||
.map(|f| {
|
||||
|
@ -171,7 +165,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `open` failed"]
|
||||
))
|
||||
|
@ -184,8 +178,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_close(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.close
|
||||
.map(|f| {
|
||||
|
@ -196,7 +189,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `close` failed"]
|
||||
))
|
||||
|
@ -209,8 +202,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_start(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.start
|
||||
.map(|f| {
|
||||
|
@ -221,7 +213,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `start` failed"]
|
||||
))
|
||||
|
@ -234,8 +226,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_stop(&self, element: &Self::Type) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.stop
|
||||
.map(|f| {
|
||||
|
@ -246,7 +237,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `stop` failed"]
|
||||
))
|
||||
|
@ -259,8 +250,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_finish(&self, element: &Self::Type) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.finish
|
||||
.map(|f| {
|
||||
|
@ -281,12 +271,11 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
) -> Result<(), gst::LoggableError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.set_format
|
||||
.map(|f| {
|
||||
gst_result_from_gboolean!(
|
||||
gst::gst_result_from_gboolean!(
|
||||
f(
|
||||
element.unsafe_cast_ref::<VideoEncoder>().to_glib_none().0,
|
||||
state.as_mut_ptr()
|
||||
|
@ -306,8 +295,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.handle_frame
|
||||
.map(|f| {
|
||||
|
@ -324,8 +312,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_flush(&self, element: &Self::Type) -> bool {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.flush
|
||||
.map(|f| {
|
||||
|
@ -341,12 +328,11 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_negotiate(&self, element: &Self::Type) -> Result<(), gst::LoggableError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.negotiate
|
||||
.map(|f| {
|
||||
gst_result_from_gboolean!(
|
||||
gst::gst_result_from_gboolean!(
|
||||
f(element.unsafe_cast_ref::<VideoEncoder>().to_glib_none().0),
|
||||
gst::CAT_RUST,
|
||||
"Parent function `negotiate` failed"
|
||||
|
@ -359,8 +345,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_get_caps(&self, element: &Self::Type, filter: Option<&gst::Caps>) -> gst::Caps {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.getcaps
|
||||
.map(|f| {
|
||||
|
@ -380,8 +365,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_sink_event(&self, element: &Self::Type, event: gst::Event) -> bool {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
let f = (*parent_class)
|
||||
.sink_event
|
||||
.expect("Missing parent function `sink_event`");
|
||||
|
@ -395,8 +379,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_sink_query(&self, element: &Self::Type, query: &mut gst::QueryRef) -> bool {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
let f = (*parent_class)
|
||||
.sink_query
|
||||
.expect("Missing parent function `sink_query`");
|
||||
|
@ -410,8 +393,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_src_event(&self, element: &Self::Type, event: gst::Event) -> bool {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
let f = (*parent_class)
|
||||
.src_event
|
||||
.expect("Missing parent function `src_event`");
|
||||
|
@ -425,8 +407,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
fn parent_src_query(&self, element: &Self::Type, query: &mut gst::QueryRef) -> bool {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
let f = (*parent_class)
|
||||
.src_query
|
||||
.expect("Missing parent function `src_query`");
|
||||
|
@ -444,8 +425,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.propose_allocation
|
||||
.map(|f| {
|
||||
|
@ -455,7 +435,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
)) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `propose_allocation` failed"]
|
||||
))
|
||||
|
@ -472,8 +452,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
) -> Result<(), gst::ErrorMessage> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoEncoderClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoEncoderClass;
|
||||
(*parent_class)
|
||||
.decide_allocation
|
||||
.map(|f| {
|
||||
|
@ -483,7 +462,7 @@ impl<T: VideoEncoderImpl> VideoEncoderImplExt for T {
|
|||
)) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(gst_error_msg!(
|
||||
Err(gst::gst_error_msg!(
|
||||
gst::CoreError::StateChange,
|
||||
["Parent function `decide_allocation` failed"]
|
||||
))
|
||||
|
@ -521,8 +500,8 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_open<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -530,7 +509,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.open(wrap.unsafe_cast_ref()) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -543,8 +522,8 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_close<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -552,7 +531,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.close(wrap.unsafe_cast_ref()) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -565,8 +544,8 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_start<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -574,7 +553,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.start(wrap.unsafe_cast_ref()) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -587,8 +566,8 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_stop<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -596,7 +575,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.stop(wrap.unsafe_cast_ref()) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -609,8 +588,8 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_finish<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
) -> gst_sys::GstFlowReturn
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
) -> gst::ffi::GstFlowReturn
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -618,26 +597,26 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
imp.finish(wrap.unsafe_cast_ref()).into()
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_set_format<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
state: *mut gst_video_sys::GstVideoCodecState,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
state: *mut ffi::GstVideoCodecState,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
let instance = &*(ptr as *mut T::Instance);
|
||||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
gst_video_sys::gst_video_codec_state_ref(state);
|
||||
ffi::gst_video_codec_state_ref(state);
|
||||
let wrap_state = VideoCodecState::<Readable>::new(state);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.set_format(wrap.unsafe_cast_ref(), &wrap_state) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -650,9 +629,9 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_handle_frame<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
frame: *mut gst_video_sys::GstVideoCodecFrame,
|
||||
) -> gst_sys::GstFlowReturn
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
frame: *mut ffi::GstVideoCodecFrame,
|
||||
) -> gst::ffi::GstFlowReturn
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -661,15 +640,15 @@ where
|
|||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
let wrap_frame = VideoCodecFrame::new(frame, &*wrap);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
imp.handle_frame(wrap.unsafe_cast_ref(), wrap_frame).into()
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_flush<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -677,15 +656,15 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
VideoEncoderImpl::flush(imp, wrap.unsafe_cast_ref())
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_negotiate<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -693,7 +672,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.negotiate(wrap.unsafe_cast_ref()) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -706,9 +685,9 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_getcaps<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
filter: *mut gst_sys::GstCaps,
|
||||
) -> *mut gst_sys::GstCaps
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
filter: *mut gst::ffi::GstCaps,
|
||||
) -> *mut gst::ffi::GstCaps
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -716,7 +695,7 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), gst::Caps::new_empty(), {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), gst::Caps::new_empty(), {
|
||||
VideoEncoderImpl::get_caps(
|
||||
imp,
|
||||
wrap.unsafe_cast_ref(),
|
||||
|
@ -729,9 +708,9 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_sink_event<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
event: *mut gst_sys::GstEvent,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
event: *mut gst::ffi::GstEvent,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -739,16 +718,16 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
imp.sink_event(wrap.unsafe_cast_ref(), from_glib_full(event))
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_sink_query<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
query: *mut gst_sys::GstQuery,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
query: *mut gst::ffi::GstQuery,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -756,16 +735,16 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
imp.sink_query(wrap.unsafe_cast_ref(), gst::QueryRef::from_mut_ptr(query))
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_src_event<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
event: *mut gst_sys::GstEvent,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
event: *mut gst::ffi::GstEvent,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -773,16 +752,16 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
imp.src_event(wrap.unsafe_cast_ref(), from_glib_full(event))
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_src_query<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
query: *mut gst_sys::GstQuery,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
query: *mut gst::ffi::GstQuery,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -790,16 +769,16 @@ where
|
|||
let imp = instance.get_impl();
|
||||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
imp.src_query(wrap.unsafe_cast_ref(), gst::QueryRef::from_mut_ptr(query))
|
||||
})
|
||||
.to_glib()
|
||||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_propose_allocation<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
query: *mut gst_sys::GstQuery,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
query: *mut gst::ffi::GstQuery,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -808,7 +787,7 @@ where
|
|||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
let query = gst::QueryRef::from_mut_ptr(query);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.propose_allocation(wrap.unsafe_cast_ref(), query) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
@ -821,9 +800,9 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_encoder_decide_allocation<T: VideoEncoderImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoEncoder,
|
||||
query: *mut gst_sys::GstQuery,
|
||||
) -> glib_sys::gboolean
|
||||
ptr: *mut ffi::GstVideoEncoder,
|
||||
query: *mut gst::ffi::GstQuery,
|
||||
) -> glib::ffi::gboolean
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -832,7 +811,7 @@ where
|
|||
let wrap: Borrowed<VideoEncoder> = from_glib_borrow(ptr);
|
||||
let query = gst::QueryRef::from_mut_ptr(query);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), false, {
|
||||
match imp.decide_allocation(wrap.unsafe_cast_ref(), query) {
|
||||
Ok(()) => true,
|
||||
Err(err) => {
|
||||
|
|
|
@ -6,18 +6,14 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use gst_sys;
|
||||
use gst_video_sys;
|
||||
|
||||
use glib::prelude::*;
|
||||
use glib::subclass::prelude::*;
|
||||
use glib::translate::*;
|
||||
|
||||
use gst;
|
||||
use gst::subclass::prelude::*;
|
||||
use gst_base::subclass::prelude::*;
|
||||
|
||||
use VideoSink;
|
||||
use crate::VideoSink;
|
||||
|
||||
pub trait VideoSinkImpl: VideoSinkImplExt + BaseSinkImpl + ElementImpl {
|
||||
fn show_frame(
|
||||
|
@ -45,8 +41,7 @@ impl<T: VideoSinkImpl> VideoSinkImplExt for T {
|
|||
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
unsafe {
|
||||
let data = T::type_data();
|
||||
let parent_class =
|
||||
data.as_ref().get_parent_class() as *mut gst_video_sys::GstVideoSinkClass;
|
||||
let parent_class = data.as_ref().get_parent_class() as *mut ffi::GstVideoSinkClass;
|
||||
(*parent_class)
|
||||
.show_frame
|
||||
.map(|f| {
|
||||
|
@ -73,9 +68,9 @@ where
|
|||
}
|
||||
|
||||
unsafe extern "C" fn video_sink_show_frame<T: VideoSinkImpl>(
|
||||
ptr: *mut gst_video_sys::GstVideoSink,
|
||||
buffer: *mut gst_sys::GstBuffer,
|
||||
) -> gst_sys::GstFlowReturn
|
||||
ptr: *mut ffi::GstVideoSink,
|
||||
buffer: *mut gst::ffi::GstBuffer,
|
||||
) -> gst::ffi::GstFlowReturn
|
||||
where
|
||||
T::Instance: PanicPoison,
|
||||
{
|
||||
|
@ -84,7 +79,7 @@ where
|
|||
let wrap: Borrowed<VideoSink> = from_glib_borrow(ptr);
|
||||
let buffer = from_glib_borrow(buffer);
|
||||
|
||||
gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
gst::gst_panic_to_error!(&wrap, &instance.panicked(), gst::FlowReturn::Error, {
|
||||
imp.show_frame(wrap.unsafe_cast_ref(), &buffer).into()
|
||||
})
|
||||
.to_glib()
|
||||
|
|
|
@ -7,6 +7,6 @@
|
|||
// except according to those terms.
|
||||
|
||||
pub trait HasStreamLock {
|
||||
fn get_stream_lock(&self) -> *mut glib_sys::GRecMutex;
|
||||
fn get_element_as_ptr(&self) -> *const gst_sys::GstElement;
|
||||
fn get_stream_lock(&self) -> *mut glib::ffi::GRecMutex;
|
||||
fn get_element_as_ptr(&self) -> *const gst::ffi::GstElement;
|
||||
}
|
||||
|
|
|
@ -16,29 +16,29 @@ use once_cell::sync::Lazy;
|
|||
|
||||
pub static BUFFER_POOL_OPTION_VIDEO_AFFINE_TRANSFORMATION_META: Lazy<&'static str> =
|
||||
Lazy::new(|| unsafe {
|
||||
CStr::from_ptr(gst_video_sys::GST_BUFFER_POOL_OPTION_VIDEO_AFFINE_TRANSFORMATION_META)
|
||||
CStr::from_ptr(ffi::GST_BUFFER_POOL_OPTION_VIDEO_AFFINE_TRANSFORMATION_META)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
});
|
||||
pub static BUFFER_POOL_OPTION_VIDEO_ALIGNMENT: Lazy<&'static str> = Lazy::new(|| unsafe {
|
||||
CStr::from_ptr(gst_video_sys::GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)
|
||||
CStr::from_ptr(ffi::GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
});
|
||||
pub static BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META: Lazy<&'static str> =
|
||||
Lazy::new(|| unsafe {
|
||||
CStr::from_ptr(gst_video_sys::GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META)
|
||||
CStr::from_ptr(ffi::GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
});
|
||||
pub static BUFFER_POOL_OPTION_VIDEO_META: Lazy<&'static str> = Lazy::new(|| unsafe {
|
||||
CStr::from_ptr(gst_video_sys::GST_BUFFER_POOL_OPTION_VIDEO_META)
|
||||
CStr::from_ptr(ffi::GST_BUFFER_POOL_OPTION_VIDEO_META)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VideoAlignment(pub(crate) gst_video_sys::GstVideoAlignment);
|
||||
pub struct VideoAlignment(pub(crate) ffi::GstVideoAlignment);
|
||||
|
||||
impl VideoAlignment {
|
||||
pub fn get_padding_top(&self) -> u32 {
|
||||
|
@ -53,7 +53,7 @@ impl VideoAlignment {
|
|||
pub fn get_padding_right(&self) -> u32 {
|
||||
self.0.padding_right
|
||||
}
|
||||
pub fn get_stride_align(&self) -> &[u32; gst_video_sys::GST_VIDEO_MAX_PLANES as usize] {
|
||||
pub fn get_stride_align(&self) -> &[u32; ffi::GST_VIDEO_MAX_PLANES as usize] {
|
||||
&self.0.stride_align
|
||||
}
|
||||
|
||||
|
@ -62,12 +62,12 @@ impl VideoAlignment {
|
|||
padding_bottom: u32,
|
||||
padding_left: u32,
|
||||
padding_right: u32,
|
||||
stride_align: &[u32; gst_video_sys::GST_VIDEO_MAX_PLANES as usize],
|
||||
stride_align: &[u32; ffi::GST_VIDEO_MAX_PLANES as usize],
|
||||
) -> Self {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
let videoalignment = unsafe {
|
||||
let mut videoalignment: gst_video_sys::GstVideoAlignment = mem::zeroed();
|
||||
let mut videoalignment: ffi::GstVideoAlignment = mem::zeroed();
|
||||
|
||||
videoalignment.padding_top = padding_top;
|
||||
videoalignment.padding_bottom = padding_bottom;
|
||||
|
@ -92,7 +92,7 @@ impl VideoBufferPoolConfig for gst::BufferPoolConfig {
|
|||
fn get_video_alignment(&self) -> Option<VideoAlignment> {
|
||||
unsafe {
|
||||
let mut alignment = mem::MaybeUninit::zeroed();
|
||||
let ret = from_glib(gst_video_sys::gst_buffer_pool_config_get_video_alignment(
|
||||
let ret = from_glib(ffi::gst_buffer_pool_config_get_video_alignment(
|
||||
self.as_ref().as_mut_ptr(),
|
||||
alignment.as_mut_ptr(),
|
||||
));
|
||||
|
@ -106,7 +106,7 @@ impl VideoBufferPoolConfig for gst::BufferPoolConfig {
|
|||
|
||||
fn set_video_alignment(&mut self, align: &VideoAlignment) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_buffer_pool_config_set_video_alignment(
|
||||
ffi::gst_buffer_pool_config_set_video_alignment(
|
||||
self.as_mut().as_mut_ptr(),
|
||||
&align.0 as *const _ as *mut _,
|
||||
)
|
||||
|
|
|
@ -7,34 +7,28 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use crate::utils::HasStreamLock;
|
||||
use crate::VideoCodecFrameFlags;
|
||||
use glib::translate::*;
|
||||
use gst;
|
||||
use gst_video_sys;
|
||||
use std::fmt;
|
||||
use std::mem;
|
||||
use utils::HasStreamLock;
|
||||
use VideoCodecFrameFlags;
|
||||
|
||||
pub struct VideoCodecFrame<'a> {
|
||||
frame: *mut gst_video_sys::GstVideoCodecFrame,
|
||||
frame: *mut ffi::GstVideoCodecFrame,
|
||||
/* GstVideoCodecFrame API isn't safe so protect the frame using the
|
||||
* element (decoder or encoder) stream lock */
|
||||
element: &'a dyn HasStreamLock,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a> ::glib::translate::ToGlibPtr<'a, *mut gst_video_sys::GstVideoCodecFrame>
|
||||
for VideoCodecFrame<'a>
|
||||
{
|
||||
impl<'a> ::glib::translate::ToGlibPtr<'a, *mut ffi::GstVideoCodecFrame> for VideoCodecFrame<'a> {
|
||||
type Storage = &'a Self;
|
||||
|
||||
fn to_glib_none(
|
||||
&'a self,
|
||||
) -> ::glib::translate::Stash<'a, *mut gst_video_sys::GstVideoCodecFrame, Self> {
|
||||
fn to_glib_none(&'a self) -> ::glib::translate::Stash<'a, *mut ffi::GstVideoCodecFrame, Self> {
|
||||
Stash(self.frame, self)
|
||||
}
|
||||
|
||||
fn to_glib_full(&self) -> *mut gst_video_sys::GstVideoCodecFrame {
|
||||
fn to_glib_full(&self) -> *mut ffi::GstVideoCodecFrame {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
@ -65,12 +59,12 @@ impl<'a> fmt::Debug for VideoCodecFrame<'a> {
|
|||
impl<'a> VideoCodecFrame<'a> {
|
||||
// Take ownership of @frame
|
||||
pub(crate) unsafe fn new<T: HasStreamLock>(
|
||||
frame: *mut gst_video_sys::GstVideoCodecFrame,
|
||||
frame: *mut ffi::GstVideoCodecFrame,
|
||||
element: &'a T,
|
||||
) -> Self {
|
||||
skip_assert_initialized!();
|
||||
let stream_lock = element.get_stream_lock();
|
||||
glib_sys::g_rec_mutex_lock(stream_lock);
|
||||
glib::ffi::g_rec_mutex_lock(stream_lock);
|
||||
Self { frame, element }
|
||||
}
|
||||
|
||||
|
@ -161,8 +155,8 @@ impl<'a> VideoCodecFrame<'a> {
|
|||
if ptr.is_null() {
|
||||
None
|
||||
} else {
|
||||
let writable: bool = from_glib(gst_sys::gst_mini_object_is_writable(
|
||||
ptr as *const gst_sys::GstMiniObject,
|
||||
let writable: bool = from_glib(gst::ffi::gst_mini_object_is_writable(
|
||||
ptr as *const gst::ffi::GstMiniObject,
|
||||
));
|
||||
assert!(writable);
|
||||
|
||||
|
@ -176,12 +170,12 @@ impl<'a> VideoCodecFrame<'a> {
|
|||
let prev = (*self.to_glib_none().0).output_buffer;
|
||||
|
||||
if !prev.is_null() {
|
||||
gst_sys::gst_mini_object_unref(prev as *mut gst_sys::GstMiniObject);
|
||||
gst::ffi::gst_mini_object_unref(prev as *mut gst::ffi::GstMiniObject);
|
||||
}
|
||||
|
||||
let ptr = output_buffer.into_ptr();
|
||||
let writable: bool = from_glib(gst_sys::gst_mini_object_is_writable(
|
||||
ptr as *const gst_sys::GstMiniObject,
|
||||
let writable: bool = from_glib(gst::ffi::gst_mini_object_is_writable(
|
||||
ptr as *const gst::ffi::GstMiniObject,
|
||||
));
|
||||
assert!(writable);
|
||||
|
||||
|
@ -194,9 +188,9 @@ impl<'a> VideoCodecFrame<'a> {
|
|||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub unsafe fn into_ptr(self) -> *mut gst_video_sys::GstVideoCodecFrame {
|
||||
pub unsafe fn into_ptr(self) -> *mut ffi::GstVideoCodecFrame {
|
||||
let stream_lock = self.element.get_stream_lock();
|
||||
glib_sys::g_rec_mutex_unlock(stream_lock);
|
||||
glib::ffi::g_rec_mutex_unlock(stream_lock);
|
||||
|
||||
let s = mem::ManuallyDrop::new(self);
|
||||
s.to_glib_none().0
|
||||
|
@ -207,9 +201,9 @@ impl<'a> Drop for VideoCodecFrame<'a> {
|
|||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
let stream_lock = self.element.get_stream_lock();
|
||||
glib_sys::g_rec_mutex_unlock(stream_lock);
|
||||
glib::ffi::g_rec_mutex_unlock(stream_lock);
|
||||
|
||||
gst_video_sys::gst_video_codec_frame_unref(self.frame);
|
||||
ffi::gst_video_codec_frame_unref(self.frame);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,21 +7,17 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use crate::utils::HasStreamLock;
|
||||
use glib::translate::*;
|
||||
use gst_sys;
|
||||
use gst_video_sys;
|
||||
use std::fmt;
|
||||
use std::marker::PhantomData;
|
||||
use std::ptr;
|
||||
use utils::HasStreamLock;
|
||||
|
||||
use gst;
|
||||
|
||||
use video_info::VideoInfo;
|
||||
use crate::video_info::VideoInfo;
|
||||
|
||||
pub trait VideoCodecStateContext<'a> {
|
||||
fn get_element(&self) -> Option<&'a dyn HasStreamLock>;
|
||||
fn get_element_as_ptr(&self) -> *const gst_sys::GstElement;
|
||||
fn get_element_as_ptr(&self) -> *const gst::ffi::GstElement;
|
||||
}
|
||||
|
||||
pub struct InNegotiation<'a> {
|
||||
|
@ -36,7 +32,7 @@ impl<'a> VideoCodecStateContext<'a> for InNegotiation<'a> {
|
|||
Some(self.element)
|
||||
}
|
||||
|
||||
fn get_element_as_ptr(&self) -> *const gst_sys::GstElement {
|
||||
fn get_element_as_ptr(&self) -> *const gst::ffi::GstElement {
|
||||
self.element.get_element_as_ptr()
|
||||
}
|
||||
}
|
||||
|
@ -46,13 +42,13 @@ impl<'a> VideoCodecStateContext<'a> for Readable {
|
|||
None
|
||||
}
|
||||
|
||||
fn get_element_as_ptr(&self) -> *const gst_sys::GstElement {
|
||||
fn get_element_as_ptr(&self) -> *const gst::ffi::GstElement {
|
||||
ptr::null()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VideoCodecState<'a, T: VideoCodecStateContext<'a>> {
|
||||
state: *mut gst_video_sys::GstVideoCodecState,
|
||||
state: *mut ffi::GstVideoCodecState,
|
||||
pub(crate) context: T,
|
||||
phantom: PhantomData<&'a T>,
|
||||
}
|
||||
|
@ -70,7 +66,7 @@ impl<'a, T: VideoCodecStateContext<'a>> fmt::Debug for VideoCodecState<'a, T> {
|
|||
|
||||
impl<'a> VideoCodecState<'a, Readable> {
|
||||
// Take ownership of @state
|
||||
pub(crate) unsafe fn new(state: *mut gst_video_sys::GstVideoCodecState) -> Self {
|
||||
pub(crate) unsafe fn new(state: *mut ffi::GstVideoCodecState) -> Self {
|
||||
skip_assert_initialized!();
|
||||
Self {
|
||||
state,
|
||||
|
@ -83,12 +79,12 @@ impl<'a> VideoCodecState<'a, Readable> {
|
|||
impl<'a> VideoCodecState<'a, InNegotiation<'a>> {
|
||||
// Take ownership of @state
|
||||
pub(crate) unsafe fn new<T: HasStreamLock>(
|
||||
state: *mut gst_video_sys::GstVideoCodecState,
|
||||
state: *mut ffi::GstVideoCodecState,
|
||||
element: &'a T,
|
||||
) -> Self {
|
||||
skip_assert_initialized!();
|
||||
let stream_lock = element.get_stream_lock();
|
||||
glib_sys::g_rec_mutex_lock(stream_lock);
|
||||
glib::ffi::g_rec_mutex_lock(stream_lock);
|
||||
Self {
|
||||
state,
|
||||
context: InNegotiation { element },
|
||||
|
@ -141,7 +137,7 @@ impl<'a, T: VideoCodecStateContext<'a>> VideoCodecState<'a, T> {
|
|||
}
|
||||
}
|
||||
#[doc(hidden)]
|
||||
pub fn as_mut_ptr(&self) -> *mut gst_video_sys::GstVideoCodecState {
|
||||
pub fn as_mut_ptr(&self) -> *mut ffi::GstVideoCodecState {
|
||||
self.state
|
||||
}
|
||||
}
|
||||
|
@ -151,9 +147,9 @@ impl<'a, T: VideoCodecStateContext<'a>> Drop for VideoCodecState<'a, T> {
|
|||
unsafe {
|
||||
if let Some(element) = self.context.get_element() {
|
||||
let stream_lock = element.get_stream_lock();
|
||||
glib_sys::g_rec_mutex_unlock(stream_lock);
|
||||
glib::ffi::g_rec_mutex_unlock(stream_lock);
|
||||
}
|
||||
gst_video_sys::gst_video_codec_state_unref(self.state);
|
||||
ffi::gst_video_codec_state_unref(self.state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -170,12 +166,12 @@ impl<'a> VideoCodecState<'a, InNegotiation<'a>> {
|
|||
let prev = (*self.as_mut_ptr()).caps;
|
||||
|
||||
if !prev.is_null() {
|
||||
gst_sys::gst_mini_object_unref(prev as *mut gst_sys::GstMiniObject)
|
||||
gst::ffi::gst_mini_object_unref(prev as *mut gst::ffi::GstMiniObject)
|
||||
}
|
||||
|
||||
ptr::write(
|
||||
&mut (*self.as_mut_ptr()).caps,
|
||||
gst_sys::gst_mini_object_ref(caps.as_mut_ptr() as *mut _) as *mut _,
|
||||
gst::ffi::gst_mini_object_ref(caps.as_mut_ptr() as *mut _) as *mut _,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -185,12 +181,12 @@ impl<'a> VideoCodecState<'a, InNegotiation<'a>> {
|
|||
let prev = (*self.as_mut_ptr()).codec_data;
|
||||
|
||||
if !prev.is_null() {
|
||||
gst_sys::gst_mini_object_unref(prev as *mut gst_sys::GstMiniObject)
|
||||
gst::ffi::gst_mini_object_unref(prev as *mut gst::ffi::GstMiniObject)
|
||||
}
|
||||
|
||||
ptr::write(
|
||||
&mut (*self.as_mut_ptr()).codec_data,
|
||||
gst_sys::gst_mini_object_ref(codec_data.as_mut_ptr() as *mut _) as *mut _,
|
||||
gst::ffi::gst_mini_object_ref(codec_data.as_mut_ptr() as *mut _) as *mut _,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -200,12 +196,12 @@ impl<'a> VideoCodecState<'a, InNegotiation<'a>> {
|
|||
let prev = (*self.as_mut_ptr()).allocation_caps;
|
||||
|
||||
if !prev.is_null() {
|
||||
gst_sys::gst_mini_object_unref(prev as *mut gst_sys::GstMiniObject)
|
||||
gst::ffi::gst_mini_object_unref(prev as *mut gst::ffi::GstMiniObject)
|
||||
}
|
||||
|
||||
ptr::write(
|
||||
&mut (*self.as_mut_ptr()).allocation_caps,
|
||||
gst_sys::gst_mini_object_ref(allocation_caps.as_mut_ptr() as *mut _) as *mut _,
|
||||
gst::ffi::gst_mini_object_ref(allocation_caps.as_mut_ptr() as *mut _) as *mut _,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -214,7 +210,7 @@ impl<'a> VideoCodecState<'a, InNegotiation<'a>> {
|
|||
impl<'a> Clone for VideoCodecState<'a, Readable> {
|
||||
fn clone(&self) -> Self {
|
||||
unsafe {
|
||||
let state = gst_video_sys::gst_video_codec_state_ref(self.state);
|
||||
let state = ffi::gst_video_codec_state_ref(self.state);
|
||||
Self::new(state)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,23 +6,19 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use gst_video_sys;
|
||||
|
||||
use glib;
|
||||
use glib::translate::ToGlibPtr;
|
||||
use gst;
|
||||
|
||||
use std::convert;
|
||||
use std::ops;
|
||||
use std::ptr;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct VideoConverter(ptr::NonNull<gst_video_sys::GstVideoConverter>);
|
||||
pub struct VideoConverter(ptr::NonNull<ffi::GstVideoConverter>);
|
||||
|
||||
impl Drop for VideoConverter {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_converter_free(self.0.as_ptr());
|
||||
ffi::gst_video_converter_free(self.0.as_ptr());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -32,27 +28,27 @@ unsafe impl Sync for VideoConverter {}
|
|||
|
||||
impl VideoConverter {
|
||||
pub fn new(
|
||||
in_info: &::VideoInfo,
|
||||
out_info: &::VideoInfo,
|
||||
in_info: &crate::VideoInfo,
|
||||
out_info: &crate::VideoInfo,
|
||||
config: Option<VideoConverterConfig>,
|
||||
) -> Result<Self, glib::BoolError> {
|
||||
assert_initialized_main_thread!();
|
||||
if in_info.fps() != out_info.fps() {
|
||||
return Err(glib_bool_error!("Can't do framerate conversion"));
|
||||
return Err(glib::glib_bool_error!("Can't do framerate conversion"));
|
||||
}
|
||||
|
||||
if in_info.interlace_mode() != out_info.interlace_mode() {
|
||||
return Err(glib_bool_error!("Can't do interlacing conversion"));
|
||||
return Err(glib::glib_bool_error!("Can't do interlacing conversion"));
|
||||
}
|
||||
|
||||
unsafe {
|
||||
let ptr = gst_video_sys::gst_video_converter_new(
|
||||
let ptr = ffi::gst_video_converter_new(
|
||||
in_info.to_glib_none().0 as *mut _,
|
||||
out_info.to_glib_none().0 as *mut _,
|
||||
config.map(|s| s.0.into_ptr()).unwrap_or(ptr::null_mut()),
|
||||
);
|
||||
if ptr.is_null() {
|
||||
Err(glib_bool_error!("Failed to create video converter"))
|
||||
Err(glib::glib_bool_error!("Failed to create video converter"))
|
||||
} else {
|
||||
Ok(VideoConverter(ptr::NonNull::new_unchecked(ptr)))
|
||||
}
|
||||
|
@ -62,7 +58,7 @@ impl VideoConverter {
|
|||
pub fn get_config(&self) -> VideoConverterConfig {
|
||||
unsafe {
|
||||
VideoConverterConfig(
|
||||
gst::StructureRef::from_glib_borrow(gst_video_sys::gst_video_converter_get_config(
|
||||
gst::StructureRef::from_glib_borrow(ffi::gst_video_converter_get_config(
|
||||
self.0.as_ptr(),
|
||||
))
|
||||
.to_owned(),
|
||||
|
@ -72,35 +68,27 @@ impl VideoConverter {
|
|||
|
||||
pub fn set_config(&mut self, config: VideoConverterConfig) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_converter_set_config(self.0.as_ptr(), config.0.into_ptr());
|
||||
ffi::gst_video_converter_set_config(self.0.as_ptr(), config.0.into_ptr());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn frame<T>(
|
||||
&self,
|
||||
src: &::VideoFrame<T>,
|
||||
dest: &mut ::VideoFrame<::video_frame::Writable>,
|
||||
src: &crate::VideoFrame<T>,
|
||||
dest: &mut crate::VideoFrame<crate::video_frame::Writable>,
|
||||
) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_converter_frame(
|
||||
self.0.as_ptr(),
|
||||
src.as_ptr(),
|
||||
dest.as_mut_ptr(),
|
||||
);
|
||||
ffi::gst_video_converter_frame(self.0.as_ptr(), src.as_ptr(), dest.as_mut_ptr());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn frame_ref<T>(
|
||||
&self,
|
||||
src: &::VideoFrameRef<T>,
|
||||
dest: &mut ::VideoFrameRef<&mut gst::BufferRef>,
|
||||
src: &crate::VideoFrameRef<T>,
|
||||
dest: &mut crate::VideoFrameRef<&mut gst::BufferRef>,
|
||||
) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_converter_frame(
|
||||
self.0.as_ptr(),
|
||||
src.as_ptr(),
|
||||
dest.as_mut_ptr(),
|
||||
);
|
||||
ffi::gst_video_converter_frame(self.0.as_ptr(), src.as_ptr(), dest.as_mut_ptr());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -148,7 +136,9 @@ impl convert::TryFrom<gst::Structure> for VideoConverterConfig {
|
|||
if v.get_name() == "GstVideoConverter" {
|
||||
Ok(VideoConverterConfig(v))
|
||||
} else {
|
||||
Err(glib_bool_error!("Structure is no VideoConverterConfig"))
|
||||
Err(glib::glib_bool_error!(
|
||||
"Structure is no VideoConverterConfig"
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -174,26 +164,26 @@ impl VideoConverterConfig {
|
|||
VideoConverterConfig(gst::Structure::new_empty("GstVideoConverter"))
|
||||
}
|
||||
|
||||
pub fn set_resampler_method(&mut self, v: ::VideoResamplerMethod) {
|
||||
pub fn set_resampler_method(&mut self, v: crate::VideoResamplerMethod) {
|
||||
self.0.set("GstVideoConverter.resampler-method", &v);
|
||||
}
|
||||
|
||||
pub fn get_resampler_method(&self) -> ::VideoResamplerMethod {
|
||||
pub fn get_resampler_method(&self) -> crate::VideoResamplerMethod {
|
||||
self.0
|
||||
.get_optional("GstVideoConverter.resampler-method")
|
||||
.expect("Wrong type")
|
||||
.unwrap_or(::VideoResamplerMethod::Cubic)
|
||||
.unwrap_or(crate::VideoResamplerMethod::Cubic)
|
||||
}
|
||||
|
||||
pub fn set_chroma_resampler_method(&mut self, v: ::VideoResamplerMethod) {
|
||||
pub fn set_chroma_resampler_method(&mut self, v: crate::VideoResamplerMethod) {
|
||||
self.0.set("GstVideoConverter.chroma-resampler-method", &v);
|
||||
}
|
||||
|
||||
pub fn get_chroma_resampler_method(&self) -> ::VideoResamplerMethod {
|
||||
pub fn get_chroma_resampler_method(&self) -> crate::VideoResamplerMethod {
|
||||
self.0
|
||||
.get_optional("GstVideoConverter.chroma-resampler-method")
|
||||
.expect("Wrong type")
|
||||
.unwrap_or(::VideoResamplerMethod::Linear)
|
||||
.unwrap_or(crate::VideoResamplerMethod::Linear)
|
||||
}
|
||||
|
||||
pub fn set_resampler_taps(&mut self, v: u32) {
|
||||
|
@ -207,15 +197,15 @@ impl VideoConverterConfig {
|
|||
.unwrap_or(0)
|
||||
}
|
||||
|
||||
pub fn set_dither_method(&mut self, v: ::VideoDitherMethod) {
|
||||
pub fn set_dither_method(&mut self, v: crate::VideoDitherMethod) {
|
||||
self.0.set("GstVideoConverter.dither-method", &v);
|
||||
}
|
||||
|
||||
pub fn get_dither_method(&self) -> ::VideoDitherMethod {
|
||||
pub fn get_dither_method(&self) -> crate::VideoDitherMethod {
|
||||
self.0
|
||||
.get_optional("GstVideoConverter.dither-method")
|
||||
.expect("Wrong type")
|
||||
.unwrap_or(::VideoDitherMethod::Bayer)
|
||||
.unwrap_or(crate::VideoDitherMethod::Bayer)
|
||||
}
|
||||
|
||||
pub fn set_dither_quantization(&mut self, v: u32) {
|
||||
|
@ -351,15 +341,15 @@ impl VideoConverterConfig {
|
|||
.unwrap_or(1.0)
|
||||
}
|
||||
|
||||
pub fn set_alpha_mode(&mut self, v: ::VideoAlphaMode) {
|
||||
pub fn set_alpha_mode(&mut self, v: crate::VideoAlphaMode) {
|
||||
self.0.set("GstVideoConverter.alpha-mode", &v);
|
||||
}
|
||||
|
||||
pub fn get_alpha_mode(&self) -> ::VideoAlphaMode {
|
||||
pub fn get_alpha_mode(&self) -> crate::VideoAlphaMode {
|
||||
self.0
|
||||
.get_optional("GstVideoConverter.alpha-mode")
|
||||
.expect("Wrong type")
|
||||
.unwrap_or(::VideoAlphaMode::Copy)
|
||||
.unwrap_or(crate::VideoAlphaMode::Copy)
|
||||
}
|
||||
|
||||
pub fn set_border_argb(&mut self, v: u32) {
|
||||
|
@ -373,48 +363,48 @@ impl VideoConverterConfig {
|
|||
.unwrap_or(0xff_00_00_00)
|
||||
}
|
||||
|
||||
pub fn set_chroma_mode(&mut self, v: ::VideoChromaMode) {
|
||||
pub fn set_chroma_mode(&mut self, v: crate::VideoChromaMode) {
|
||||
self.0.set("GstVideoConverter.chroma-mode", &v);
|
||||
}
|
||||
|
||||
pub fn get_chroma_mode(&self) -> ::VideoChromaMode {
|
||||
pub fn get_chroma_mode(&self) -> crate::VideoChromaMode {
|
||||
self.0
|
||||
.get_optional("GstVideoConverter.chroma-mode")
|
||||
.expect("Wrong type")
|
||||
.unwrap_or(::VideoChromaMode::Full)
|
||||
.unwrap_or(crate::VideoChromaMode::Full)
|
||||
}
|
||||
|
||||
pub fn set_matrix_mode(&mut self, v: ::VideoMatrixMode) {
|
||||
pub fn set_matrix_mode(&mut self, v: crate::VideoMatrixMode) {
|
||||
self.0.set("GstVideoConverter.matrix-mode", &v);
|
||||
}
|
||||
|
||||
pub fn get_matrix_mode(&self) -> ::VideoMatrixMode {
|
||||
pub fn get_matrix_mode(&self) -> crate::VideoMatrixMode {
|
||||
self.0
|
||||
.get_optional("GstVideoConverter.matrix-mode")
|
||||
.expect("Wrong type")
|
||||
.unwrap_or(::VideoMatrixMode::Full)
|
||||
.unwrap_or(crate::VideoMatrixMode::Full)
|
||||
}
|
||||
|
||||
pub fn set_gamma_mode(&mut self, v: ::VideoGammaMode) {
|
||||
pub fn set_gamma_mode(&mut self, v: crate::VideoGammaMode) {
|
||||
self.0.set("GstVideoConverter.gamma-mode", &v);
|
||||
}
|
||||
|
||||
pub fn get_gamma_mode(&self) -> ::VideoGammaMode {
|
||||
pub fn get_gamma_mode(&self) -> crate::VideoGammaMode {
|
||||
self.0
|
||||
.get_optional("GstVideoConverter.gamma-mode")
|
||||
.expect("Wrong type")
|
||||
.unwrap_or(::VideoGammaMode::None)
|
||||
.unwrap_or(crate::VideoGammaMode::None)
|
||||
}
|
||||
|
||||
pub fn set_primaries_mode(&mut self, v: ::VideoPrimariesMode) {
|
||||
pub fn set_primaries_mode(&mut self, v: crate::VideoPrimariesMode) {
|
||||
self.0.set("GstVideoConverter.primaries-mode", &v);
|
||||
}
|
||||
|
||||
pub fn get_primaries_mode(&self) -> ::VideoPrimariesMode {
|
||||
pub fn get_primaries_mode(&self) -> crate::VideoPrimariesMode {
|
||||
self.0
|
||||
.get_optional("GstVideoConverter.primaries-mode")
|
||||
.expect("Wrong type")
|
||||
.unwrap_or(::VideoPrimariesMode::None)
|
||||
.unwrap_or(crate::VideoPrimariesMode::None)
|
||||
}
|
||||
|
||||
pub fn set_threads(&mut self, v: u32) {
|
||||
|
|
|
@ -7,33 +7,31 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use glib::object::IsA;
|
||||
use glib::translate::*;
|
||||
use gst;
|
||||
use gst_video_sys;
|
||||
use std::mem;
|
||||
use std::ptr;
|
||||
use utils::HasStreamLock;
|
||||
use video_codec_state::{InNegotiation, Readable, VideoCodecState, VideoCodecStateContext};
|
||||
use VideoCodecFrame;
|
||||
use VideoDecoder;
|
||||
use VideoFormat;
|
||||
use crate::utils::HasStreamLock;
|
||||
use crate::video_codec_state::{InNegotiation, Readable, VideoCodecState, VideoCodecStateContext};
|
||||
use crate::VideoCodecFrame;
|
||||
use crate::VideoDecoder;
|
||||
use crate::VideoFormat;
|
||||
#[cfg(any(feature = "v1_16", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_16")))]
|
||||
use VideoInterlaceMode;
|
||||
use crate::VideoInterlaceMode;
|
||||
use glib::object::IsA;
|
||||
use glib::translate::*;
|
||||
use std::mem;
|
||||
use std::ptr;
|
||||
|
||||
extern "C" {
|
||||
fn _gst_video_decoder_error(
|
||||
dec: *mut gst_video_sys::GstVideoDecoder,
|
||||
dec: *mut ffi::GstVideoDecoder,
|
||||
weight: i32,
|
||||
domain: glib_sys::GQuark,
|
||||
domain: glib::ffi::GQuark,
|
||||
code: i32,
|
||||
txt: *mut libc::c_char,
|
||||
debug: *mut libc::c_char,
|
||||
file: *const libc::c_char,
|
||||
function: *const libc::c_char,
|
||||
line: i32,
|
||||
) -> gst_sys::GstFlowReturn;
|
||||
) -> gst::ffi::GstFlowReturn;
|
||||
}
|
||||
|
||||
pub trait VideoDecoderExtManual: 'static {
|
||||
|
@ -106,13 +104,11 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
let ret: gst::FlowReturn = unsafe {
|
||||
let params_ptr = params.to_glib_none().0 as *mut _;
|
||||
from_glib(
|
||||
gst_video_sys::gst_video_decoder_allocate_output_frame_with_params(
|
||||
self.as_ref().to_glib_none().0,
|
||||
frame.to_glib_none().0,
|
||||
params_ptr,
|
||||
),
|
||||
)
|
||||
from_glib(ffi::gst_video_decoder_allocate_output_frame_with_params(
|
||||
self.as_ref().to_glib_none().0,
|
||||
frame.to_glib_none().0,
|
||||
params_ptr,
|
||||
))
|
||||
};
|
||||
ret.into_result()
|
||||
}
|
||||
|
@ -121,7 +117,7 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
unsafe {
|
||||
let mut allocator = ptr::null_mut();
|
||||
let mut params = mem::zeroed();
|
||||
gst_video_sys::gst_video_decoder_get_allocator(
|
||||
ffi::gst_video_decoder_get_allocator(
|
||||
self.as_ref().to_glib_none().0,
|
||||
&mut allocator,
|
||||
&mut params,
|
||||
|
@ -132,7 +128,7 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
|
||||
fn have_frame(&self) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
let ret: gst::FlowReturn = unsafe {
|
||||
from_glib(gst_video_sys::gst_video_decoder_have_frame(
|
||||
from_glib(ffi::gst_video_decoder_have_frame(
|
||||
self.as_ref().to_glib_none().0,
|
||||
))
|
||||
};
|
||||
|
@ -141,7 +137,7 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
|
||||
fn finish_frame(&self, frame: VideoCodecFrame) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
let ret: gst::FlowReturn = unsafe {
|
||||
from_glib(gst_video_sys::gst_video_decoder_finish_frame(
|
||||
from_glib(ffi::gst_video_decoder_finish_frame(
|
||||
self.as_ref().to_glib_none().0,
|
||||
frame.into_ptr(),
|
||||
))
|
||||
|
@ -151,16 +147,13 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
|
||||
fn release_frame(&self, frame: VideoCodecFrame) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_decoder_release_frame(
|
||||
self.as_ref().to_glib_none().0,
|
||||
frame.into_ptr(),
|
||||
)
|
||||
ffi::gst_video_decoder_release_frame(self.as_ref().to_glib_none().0, frame.into_ptr())
|
||||
}
|
||||
}
|
||||
|
||||
fn drop_frame(&self, frame: VideoCodecFrame) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
let ret: gst::FlowReturn = unsafe {
|
||||
from_glib(gst_video_sys::gst_video_decoder_drop_frame(
|
||||
from_glib(ffi::gst_video_decoder_drop_frame(
|
||||
self.as_ref().to_glib_none().0,
|
||||
frame.into_ptr(),
|
||||
))
|
||||
|
@ -169,11 +162,11 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
}
|
||||
|
||||
fn get_latency(&self) -> (gst::ClockTime, gst::ClockTime) {
|
||||
let mut min_latency = gst_sys::GST_CLOCK_TIME_NONE;
|
||||
let mut max_latency = gst_sys::GST_CLOCK_TIME_NONE;
|
||||
let mut min_latency = gst::ffi::GST_CLOCK_TIME_NONE;
|
||||
let mut max_latency = gst::ffi::GST_CLOCK_TIME_NONE;
|
||||
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_decoder_get_latency(
|
||||
ffi::gst_video_decoder_get_latency(
|
||||
self.as_ref().to_glib_none().0,
|
||||
&mut min_latency,
|
||||
&mut max_latency,
|
||||
|
@ -185,7 +178,7 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
|
||||
fn set_latency(&self, min_latency: gst::ClockTime, max_latency: gst::ClockTime) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_decoder_set_latency(
|
||||
ffi::gst_video_decoder_set_latency(
|
||||
self.as_ref().to_glib_none().0,
|
||||
min_latency.to_glib(),
|
||||
max_latency.to_glib(),
|
||||
|
@ -195,7 +188,7 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
|
||||
fn get_frame(&self, frame_number: i32) -> Option<VideoCodecFrame> {
|
||||
let frame = unsafe {
|
||||
gst_video_sys::gst_video_decoder_get_frame(self.as_ref().to_glib_none().0, frame_number)
|
||||
ffi::gst_video_decoder_get_frame(self.as_ref().to_glib_none().0, frame_number)
|
||||
};
|
||||
|
||||
if frame.is_null() {
|
||||
|
@ -207,9 +200,8 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
|
||||
fn get_frames(&self) -> Vec<VideoCodecFrame> {
|
||||
unsafe {
|
||||
let frames =
|
||||
gst_video_sys::gst_video_decoder_get_frames(self.as_ref().to_glib_none().0);
|
||||
let mut iter: *const glib_sys::GList = frames;
|
||||
let frames = ffi::gst_video_decoder_get_frames(self.as_ref().to_glib_none().0);
|
||||
let mut iter: *const glib::ffi::GList = frames;
|
||||
let mut vec = Vec::new();
|
||||
|
||||
while !iter.is_null() {
|
||||
|
@ -220,15 +212,14 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
iter = (*iter).next;
|
||||
}
|
||||
|
||||
glib_sys::g_list_free(frames);
|
||||
glib::ffi::g_list_free(frames);
|
||||
vec
|
||||
}
|
||||
}
|
||||
|
||||
fn get_oldest_frame(&self) -> Option<VideoCodecFrame> {
|
||||
let frame = unsafe {
|
||||
gst_video_sys::gst_video_decoder_get_oldest_frame(self.as_ref().to_glib_none().0)
|
||||
};
|
||||
let frame =
|
||||
unsafe { ffi::gst_video_decoder_get_oldest_frame(self.as_ref().to_glib_none().0) };
|
||||
|
||||
if frame.is_null() {
|
||||
None
|
||||
|
@ -238,9 +229,8 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
}
|
||||
|
||||
fn get_output_state(&self) -> Option<VideoCodecState<'static, Readable>> {
|
||||
let state = unsafe {
|
||||
gst_video_sys::gst_video_decoder_get_output_state(self.as_ref().to_glib_none().0)
|
||||
};
|
||||
let state =
|
||||
unsafe { ffi::gst_video_decoder_get_output_state(self.as_ref().to_glib_none().0) };
|
||||
|
||||
if state.is_null() {
|
||||
None
|
||||
|
@ -261,7 +251,7 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
Some(reference) => reference.as_mut_ptr(),
|
||||
None => ptr::null_mut(),
|
||||
};
|
||||
gst_video_sys::gst_video_decoder_set_output_state(
|
||||
ffi::gst_video_decoder_set_output_state(
|
||||
self.as_ref().to_glib_none().0,
|
||||
fmt.to_glib(),
|
||||
width,
|
||||
|
@ -292,7 +282,7 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
Some(reference) => reference.as_mut_ptr(),
|
||||
None => ptr::null_mut(),
|
||||
};
|
||||
gst_video_sys::gst_video_decoder_set_interlaced_output_state(
|
||||
ffi::gst_video_decoder_set_interlaced_output_state(
|
||||
self.as_ref().to_glib_none().0,
|
||||
fmt.to_glib(),
|
||||
mode.to_glib(),
|
||||
|
@ -314,11 +304,11 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
output_state: VideoCodecState<'a, InNegotiation<'a>>,
|
||||
) -> Result<(), gst::FlowError> {
|
||||
// Consume output_state so user won't be able to modify it anymore
|
||||
let self_ptr = self.to_glib_none().0 as *const gst_sys::GstElement;
|
||||
let self_ptr = self.to_glib_none().0 as *const gst::ffi::GstElement;
|
||||
assert_eq!(output_state.context.get_element_as_ptr(), self_ptr);
|
||||
|
||||
let ret = unsafe {
|
||||
from_glib(gst_video_sys::gst_video_decoder_negotiate(
|
||||
from_glib(ffi::gst_video_decoder_negotiate(
|
||||
self.as_ref().to_glib_none().0,
|
||||
))
|
||||
};
|
||||
|
@ -356,14 +346,14 @@ impl<O: IsA<VideoDecoder>> VideoDecoderExtManual for O {
|
|||
}
|
||||
|
||||
impl HasStreamLock for VideoDecoder {
|
||||
fn get_stream_lock(&self) -> *mut glib_sys::GRecMutex {
|
||||
let decoder_sys: *const gstreamer_video_sys::GstVideoDecoder = self.to_glib_none().0;
|
||||
fn get_stream_lock(&self) -> *mut glib::ffi::GRecMutex {
|
||||
let decoder_sys: *const ffi::GstVideoDecoder = self.to_glib_none().0;
|
||||
unsafe { &(*decoder_sys).stream_lock as *const _ as usize as *mut _ }
|
||||
}
|
||||
|
||||
fn get_element_as_ptr(&self) -> *const gst_sys::GstElement {
|
||||
let decoder_sys: *const gstreamer_video_sys::GstVideoDecoder = self.to_glib_none().0;
|
||||
decoder_sys as *const gst_sys::GstElement
|
||||
fn get_element_as_ptr(&self) -> *const gst::ffi::GstElement {
|
||||
let decoder_sys: *const ffi::GstVideoDecoder = self.to_glib_none().0;
|
||||
decoder_sys as *const gst::ffi::GstElement
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,16 +8,14 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use crate::utils::HasStreamLock;
|
||||
use crate::video_codec_state::{InNegotiation, Readable, VideoCodecState, VideoCodecStateContext};
|
||||
use crate::VideoCodecFrame;
|
||||
use crate::VideoEncoder;
|
||||
use glib::object::IsA;
|
||||
use glib::translate::*;
|
||||
use gst;
|
||||
use gst_video_sys;
|
||||
use std::mem;
|
||||
use std::ptr;
|
||||
use utils::HasStreamLock;
|
||||
use video_codec_state::{InNegotiation, Readable, VideoCodecState, VideoCodecStateContext};
|
||||
use VideoCodecFrame;
|
||||
use VideoEncoder;
|
||||
|
||||
pub trait VideoEncoderExtManual: 'static {
|
||||
#[cfg(any(feature = "v1_12", feature = "dox"))]
|
||||
|
@ -68,7 +66,7 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
size: usize,
|
||||
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
let ret: gst::FlowReturn = unsafe {
|
||||
from_glib(gst_video_sys::gst_video_encoder_allocate_output_frame(
|
||||
from_glib(ffi::gst_video_encoder_allocate_output_frame(
|
||||
self.as_ref().to_glib_none().0,
|
||||
frame.to_glib_none().0,
|
||||
size,
|
||||
|
@ -81,7 +79,7 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
unsafe {
|
||||
let mut allocator = ptr::null_mut();
|
||||
let mut params = mem::zeroed();
|
||||
gst_video_sys::gst_video_encoder_get_allocator(
|
||||
ffi::gst_video_encoder_get_allocator(
|
||||
self.as_ref().to_glib_none().0,
|
||||
&mut allocator,
|
||||
&mut params,
|
||||
|
@ -95,7 +93,7 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
frame: Option<VideoCodecFrame>,
|
||||
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
let ret: gst::FlowReturn = unsafe {
|
||||
from_glib(gst_video_sys::gst_video_encoder_finish_frame(
|
||||
from_glib(ffi::gst_video_encoder_finish_frame(
|
||||
self.as_ref().to_glib_none().0,
|
||||
frame.map(|f| f.into_ptr()).unwrap_or(ptr::null_mut()),
|
||||
))
|
||||
|
@ -107,7 +105,7 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
fn finish_subframe(&self, frame: &VideoCodecFrame) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
let ret: gst::FlowReturn = unsafe {
|
||||
from_glib(gst_video_sys::gst_video_encoder_finish_subframe(
|
||||
from_glib(ffi::gst_video_encoder_finish_subframe(
|
||||
self.as_ref().to_glib_none().0,
|
||||
frame.to_glib_none().0,
|
||||
))
|
||||
|
@ -116,11 +114,11 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
}
|
||||
|
||||
fn get_latency(&self) -> (gst::ClockTime, gst::ClockTime) {
|
||||
let mut min_latency = gst_sys::GST_CLOCK_TIME_NONE;
|
||||
let mut max_latency = gst_sys::GST_CLOCK_TIME_NONE;
|
||||
let mut min_latency = gst::ffi::GST_CLOCK_TIME_NONE;
|
||||
let mut max_latency = gst::ffi::GST_CLOCK_TIME_NONE;
|
||||
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_encoder_get_latency(
|
||||
ffi::gst_video_encoder_get_latency(
|
||||
self.as_ref().to_glib_none().0,
|
||||
&mut min_latency,
|
||||
&mut max_latency,
|
||||
|
@ -132,7 +130,7 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
|
||||
fn set_latency(&self, min_latency: gst::ClockTime, max_latency: gst::ClockTime) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_encoder_set_latency(
|
||||
ffi::gst_video_encoder_set_latency(
|
||||
self.as_ref().to_glib_none().0,
|
||||
min_latency.to_glib(),
|
||||
max_latency.to_glib(),
|
||||
|
@ -142,7 +140,7 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
|
||||
fn get_frame(&self, frame_number: i32) -> Option<VideoCodecFrame> {
|
||||
let frame = unsafe {
|
||||
gst_video_sys::gst_video_encoder_get_frame(self.as_ref().to_glib_none().0, frame_number)
|
||||
ffi::gst_video_encoder_get_frame(self.as_ref().to_glib_none().0, frame_number)
|
||||
};
|
||||
|
||||
if frame.is_null() {
|
||||
|
@ -154,9 +152,8 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
|
||||
fn get_frames(&self) -> Vec<VideoCodecFrame> {
|
||||
unsafe {
|
||||
let frames =
|
||||
gst_video_sys::gst_video_encoder_get_frames(self.as_ref().to_glib_none().0);
|
||||
let mut iter: *const glib_sys::GList = frames;
|
||||
let frames = ffi::gst_video_encoder_get_frames(self.as_ref().to_glib_none().0);
|
||||
let mut iter: *const glib::ffi::GList = frames;
|
||||
let mut vec = Vec::new();
|
||||
|
||||
while !iter.is_null() {
|
||||
|
@ -167,15 +164,14 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
iter = (*iter).next;
|
||||
}
|
||||
|
||||
glib_sys::g_list_free(frames);
|
||||
glib::ffi::g_list_free(frames);
|
||||
vec
|
||||
}
|
||||
}
|
||||
|
||||
fn get_oldest_frame(&self) -> Option<VideoCodecFrame> {
|
||||
let frame = unsafe {
|
||||
gst_video_sys::gst_video_encoder_get_oldest_frame(self.as_ref().to_glib_none().0)
|
||||
};
|
||||
let frame =
|
||||
unsafe { ffi::gst_video_encoder_get_oldest_frame(self.as_ref().to_glib_none().0) };
|
||||
|
||||
if frame.is_null() {
|
||||
None
|
||||
|
@ -185,9 +181,8 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
}
|
||||
|
||||
fn get_output_state(&self) -> Option<VideoCodecState<'static, Readable>> {
|
||||
let state = unsafe {
|
||||
gst_video_sys::gst_video_encoder_get_output_state(self.as_ref().to_glib_none().0)
|
||||
};
|
||||
let state =
|
||||
unsafe { ffi::gst_video_encoder_get_output_state(self.as_ref().to_glib_none().0) };
|
||||
|
||||
if state.is_null() {
|
||||
None
|
||||
|
@ -206,7 +201,7 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
Some(reference) => reference.as_mut_ptr(),
|
||||
None => ptr::null_mut(),
|
||||
};
|
||||
gst_video_sys::gst_video_encoder_set_output_state(
|
||||
ffi::gst_video_encoder_set_output_state(
|
||||
self.as_ref().to_glib_none().0,
|
||||
caps.into_ptr(),
|
||||
reference,
|
||||
|
@ -225,11 +220,11 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
output_state: VideoCodecState<'a, InNegotiation<'a>>,
|
||||
) -> Result<(), gst::FlowError> {
|
||||
// Consume output_state so user won't be able to modify it anymore
|
||||
let self_ptr = self.to_glib_none().0 as *const gst_sys::GstElement;
|
||||
let self_ptr = self.to_glib_none().0 as *const gst::ffi::GstElement;
|
||||
assert_eq!(output_state.context.get_element_as_ptr(), self_ptr);
|
||||
|
||||
let ret = unsafe {
|
||||
from_glib(gst_video_sys::gst_video_encoder_negotiate(
|
||||
from_glib(ffi::gst_video_encoder_negotiate(
|
||||
self.as_ref().to_glib_none().0,
|
||||
))
|
||||
};
|
||||
|
@ -242,13 +237,13 @@ impl<O: IsA<VideoEncoder>> VideoEncoderExtManual for O {
|
|||
}
|
||||
|
||||
impl HasStreamLock for VideoEncoder {
|
||||
fn get_stream_lock(&self) -> *mut glib_sys::GRecMutex {
|
||||
let encoder_sys: *const gstreamer_video_sys::GstVideoEncoder = self.to_glib_none().0;
|
||||
fn get_stream_lock(&self) -> *mut glib::ffi::GRecMutex {
|
||||
let encoder_sys: *const ffi::GstVideoEncoder = self.to_glib_none().0;
|
||||
unsafe { &(*encoder_sys).stream_lock as *const _ as usize as *mut _ }
|
||||
}
|
||||
|
||||
fn get_element_as_ptr(&self) -> *const gst_sys::GstElement {
|
||||
let encoder_sys: *const gstreamer_video_sys::GstVideoEncoder = self.to_glib_none().0;
|
||||
encoder_sys as *const gst_sys::GstElement
|
||||
fn get_element_as_ptr(&self) -> *const gst::ffi::GstElement {
|
||||
let encoder_sys: *const ffi::GstVideoEncoder = self.to_glib_none().0;
|
||||
encoder_sys as *const gst::ffi::GstElement
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,12 +6,8 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use gst_sys;
|
||||
use gst_video_sys;
|
||||
|
||||
use glib::translate::{from_glib, from_glib_full, ToGlib};
|
||||
use glib::ToSendValue;
|
||||
use gst;
|
||||
use std::mem;
|
||||
|
||||
// FIXME: Copy from gstreamer/src/event.rs
|
||||
|
@ -48,16 +44,16 @@ macro_rules! event_builder_generic_impl {
|
|||
unsafe {
|
||||
let event = $new_fn(&mut self);
|
||||
if let Some(seqnum) = self.seqnum {
|
||||
gst_sys::gst_event_set_seqnum(event, seqnum.to_glib());
|
||||
gst::ffi::gst_event_set_seqnum(event, seqnum.to_glib());
|
||||
}
|
||||
|
||||
if let Some(running_time_offset) = self.running_time_offset {
|
||||
gst_sys::gst_event_set_running_time_offset(event, running_time_offset);
|
||||
gst::ffi::gst_event_set_running_time_offset(event, running_time_offset);
|
||||
}
|
||||
|
||||
{
|
||||
let s = gst::StructureRef::from_glib_borrow_mut(
|
||||
gst_sys::gst_event_writable_structure(event),
|
||||
gst::ffi::gst_event_writable_structure(event),
|
||||
);
|
||||
|
||||
for (k, v) in self.other_fields {
|
||||
|
@ -127,7 +123,7 @@ impl<'a> DownstreamForceKeyUnitEventBuilder<'a> {
|
|||
}
|
||||
|
||||
event_builder_generic_impl!(|s: &mut Self| {
|
||||
gst_video_sys::gst_video_event_new_downstream_force_key_unit(
|
||||
ffi::gst_video_event_new_downstream_force_key_unit(
|
||||
s.timestamp.to_glib(),
|
||||
s.stream_time.to_glib(),
|
||||
s.running_time.to_glib(),
|
||||
|
@ -162,16 +158,14 @@ impl DownstreamForceKeyUnitEvent {
|
|||
let mut all_headers = mem::MaybeUninit::uninit();
|
||||
let mut count = mem::MaybeUninit::uninit();
|
||||
|
||||
let res: bool = from_glib(
|
||||
gst_video_sys::gst_video_event_parse_downstream_force_key_unit(
|
||||
event.as_mut_ptr(),
|
||||
timestamp.as_mut_ptr(),
|
||||
stream_time.as_mut_ptr(),
|
||||
running_time.as_mut_ptr(),
|
||||
all_headers.as_mut_ptr(),
|
||||
count.as_mut_ptr(),
|
||||
),
|
||||
);
|
||||
let res: bool = from_glib(ffi::gst_video_event_parse_downstream_force_key_unit(
|
||||
event.as_mut_ptr(),
|
||||
timestamp.as_mut_ptr(),
|
||||
stream_time.as_mut_ptr(),
|
||||
running_time.as_mut_ptr(),
|
||||
all_headers.as_mut_ptr(),
|
||||
count.as_mut_ptr(),
|
||||
));
|
||||
if res {
|
||||
Ok(DownstreamForceKeyUnitEvent {
|
||||
timestamp: from_glib(timestamp.assume_init()),
|
||||
|
@ -181,7 +175,7 @@ impl DownstreamForceKeyUnitEvent {
|
|||
count: count.assume_init(),
|
||||
})
|
||||
} else {
|
||||
Err(glib_bool_error!("Failed to parse GstEvent"))
|
||||
Err(glib::glib_bool_error!("Failed to parse GstEvent"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -228,7 +222,7 @@ impl<'a> UpstreamForceKeyUnitEventBuilder<'a> {
|
|||
}
|
||||
|
||||
event_builder_generic_impl!(|s: &mut Self| {
|
||||
gst_video_sys::gst_video_event_new_upstream_force_key_unit(
|
||||
ffi::gst_video_event_new_upstream_force_key_unit(
|
||||
s.running_time.to_glib(),
|
||||
s.all_headers.to_glib(),
|
||||
s.count,
|
||||
|
@ -257,14 +251,12 @@ impl UpstreamForceKeyUnitEvent {
|
|||
let mut all_headers = mem::MaybeUninit::uninit();
|
||||
let mut count = mem::MaybeUninit::uninit();
|
||||
|
||||
let res: bool = from_glib(
|
||||
gst_video_sys::gst_video_event_parse_upstream_force_key_unit(
|
||||
event.as_mut_ptr(),
|
||||
running_time.as_mut_ptr(),
|
||||
all_headers.as_mut_ptr(),
|
||||
count.as_mut_ptr(),
|
||||
),
|
||||
);
|
||||
let res: bool = from_glib(ffi::gst_video_event_parse_upstream_force_key_unit(
|
||||
event.as_mut_ptr(),
|
||||
running_time.as_mut_ptr(),
|
||||
all_headers.as_mut_ptr(),
|
||||
count.as_mut_ptr(),
|
||||
));
|
||||
if res {
|
||||
Ok(UpstreamForceKeyUnitEvent {
|
||||
running_time: from_glib(running_time.assume_init()),
|
||||
|
@ -272,7 +264,7 @@ impl UpstreamForceKeyUnitEvent {
|
|||
count: count.assume_init(),
|
||||
})
|
||||
} else {
|
||||
Err(glib_bool_error!("Failed to parse GstEvent"))
|
||||
Err(glib::glib_bool_error!("Failed to parse GstEvent"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -287,11 +279,7 @@ pub enum ForceKeyUnitEvent {
|
|||
impl ForceKeyUnitEvent {
|
||||
pub fn is(event: &gst::EventRef) -> bool {
|
||||
skip_assert_initialized!();
|
||||
unsafe {
|
||||
from_glib(gst_video_sys::gst_video_event_is_force_key_unit(
|
||||
event.as_mut_ptr(),
|
||||
))
|
||||
}
|
||||
unsafe { from_glib(ffi::gst_video_event_is_force_key_unit(event.as_mut_ptr())) }
|
||||
}
|
||||
|
||||
pub fn parse(event: &gst::EventRef) -> Result<ForceKeyUnitEvent, glib::error::BoolError> {
|
||||
|
@ -322,9 +310,9 @@ impl<'a> StillFrameEventBuilder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
event_builder_generic_impl!(
|
||||
|s: &mut Self| gst_video_sys::gst_video_event_new_still_frame(s.in_still.to_glib())
|
||||
);
|
||||
event_builder_generic_impl!(|s: &mut Self| ffi::gst_video_event_new_still_frame(
|
||||
s.in_still.to_glib()
|
||||
));
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
|
@ -343,7 +331,7 @@ impl StillFrameEvent {
|
|||
unsafe {
|
||||
let mut in_still = mem::MaybeUninit::uninit();
|
||||
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_event_parse_still_frame(
|
||||
let res: bool = from_glib(ffi::gst_video_event_parse_still_frame(
|
||||
event.as_mut_ptr(),
|
||||
in_still.as_mut_ptr(),
|
||||
));
|
||||
|
@ -352,7 +340,7 @@ impl StillFrameEvent {
|
|||
in_still: from_glib(in_still.assume_init()),
|
||||
})
|
||||
} else {
|
||||
Err(glib_bool_error!("Invalid still-frame event"))
|
||||
Err(glib::glib_bool_error!("Invalid still-frame event"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use gst_video_sys;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use std::ffi::CStr;
|
||||
use std::fmt;
|
||||
|
@ -16,10 +14,10 @@ use std::str;
|
|||
use glib::translate::{from_glib, FromGlib, ToGlib, ToGlibPtr};
|
||||
|
||||
#[cfg(feature = "v1_18")]
|
||||
pub static VIDEO_FORMATS_ALL: Lazy<Box<[::VideoFormat]>> = Lazy::new(|| unsafe {
|
||||
pub static VIDEO_FORMATS_ALL: Lazy<Box<[crate::VideoFormat]>> = Lazy::new(|| unsafe {
|
||||
let mut len: u32 = 0;
|
||||
let mut res = Vec::with_capacity(len as usize);
|
||||
let formats = gst_video_sys::gst_video_formats_raw(&mut len);
|
||||
let formats = ffi::gst_video_formats_raw(&mut len);
|
||||
for i in 0..len {
|
||||
let format = formats.offset(i as isize);
|
||||
res.push(from_glib(*format));
|
||||
|
@ -28,233 +26,233 @@ pub static VIDEO_FORMATS_ALL: Lazy<Box<[::VideoFormat]>> = Lazy::new(|| unsafe {
|
|||
});
|
||||
|
||||
#[cfg(not(feature = "v1_18"))]
|
||||
pub static VIDEO_FORMATS_ALL: Lazy<Box<[::VideoFormat]>> = Lazy::new(|| {
|
||||
pub static VIDEO_FORMATS_ALL: Lazy<Box<[crate::VideoFormat]>> = Lazy::new(|| {
|
||||
#[cfg(target_endian = "little")]
|
||||
{
|
||||
Box::new([
|
||||
::VideoFormat::Ayuv64,
|
||||
::VideoFormat::Argb64,
|
||||
crate::VideoFormat::Ayuv64,
|
||||
crate::VideoFormat::Argb64,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbra12le,
|
||||
crate::VideoFormat::Gbra12le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbra12be,
|
||||
::VideoFormat::A44410le,
|
||||
crate::VideoFormat::Gbra12be,
|
||||
crate::VideoFormat::A44410le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbra10le,
|
||||
::VideoFormat::A44410be,
|
||||
crate::VideoFormat::Gbra10le,
|
||||
crate::VideoFormat::A44410be,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbra10be,
|
||||
::VideoFormat::A42210le,
|
||||
::VideoFormat::A42210be,
|
||||
::VideoFormat::A42010le,
|
||||
::VideoFormat::A42010be,
|
||||
crate::VideoFormat::Gbra10be,
|
||||
crate::VideoFormat::A42210le,
|
||||
crate::VideoFormat::A42210be,
|
||||
crate::VideoFormat::A42010le,
|
||||
crate::VideoFormat::A42010be,
|
||||
#[cfg(feature = "v1_16")]
|
||||
::VideoFormat::Bgr10a2Le,
|
||||
crate::VideoFormat::Bgr10a2Le,
|
||||
#[cfg(feature = "v1_16")]
|
||||
::VideoFormat::Y410,
|
||||
crate::VideoFormat::Y410,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbra,
|
||||
::VideoFormat::Abgr,
|
||||
crate::VideoFormat::Gbra,
|
||||
crate::VideoFormat::Abgr,
|
||||
#[cfg(feature = "v1_16")]
|
||||
::VideoFormat::Vuya,
|
||||
::VideoFormat::Bgra,
|
||||
::VideoFormat::Ayuv,
|
||||
::VideoFormat::Argb,
|
||||
::VideoFormat::Rgba,
|
||||
::VideoFormat::A420,
|
||||
::VideoFormat::V216,
|
||||
crate::VideoFormat::Vuya,
|
||||
crate::VideoFormat::Bgra,
|
||||
crate::VideoFormat::Ayuv,
|
||||
crate::VideoFormat::Argb,
|
||||
crate::VideoFormat::Rgba,
|
||||
crate::VideoFormat::A420,
|
||||
crate::VideoFormat::V216,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Y44412le,
|
||||
crate::VideoFormat::Y44412le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbr12le,
|
||||
crate::VideoFormat::Gbr12le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Y44412be,
|
||||
crate::VideoFormat::Y44412be,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbr12be,
|
||||
crate::VideoFormat::Gbr12be,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::I42212le,
|
||||
crate::VideoFormat::I42212le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::I42212be,
|
||||
crate::VideoFormat::I42212be,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::I42012le,
|
||||
crate::VideoFormat::I42012le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::I42012be,
|
||||
::VideoFormat::Y44410le,
|
||||
::VideoFormat::Gbr10le,
|
||||
::VideoFormat::Y44410be,
|
||||
::VideoFormat::Gbr10be,
|
||||
::VideoFormat::R210,
|
||||
::VideoFormat::I42210le,
|
||||
::VideoFormat::I42210be,
|
||||
crate::VideoFormat::I42012be,
|
||||
crate::VideoFormat::Y44410le,
|
||||
crate::VideoFormat::Gbr10le,
|
||||
crate::VideoFormat::Y44410be,
|
||||
crate::VideoFormat::Gbr10be,
|
||||
crate::VideoFormat::R210,
|
||||
crate::VideoFormat::I42210le,
|
||||
crate::VideoFormat::I42210be,
|
||||
#[cfg(feature = "v1_14")]
|
||||
::VideoFormat::Nv1610le32,
|
||||
crate::VideoFormat::Nv1610le32,
|
||||
#[cfg(feature = "v1_16")]
|
||||
::VideoFormat::Y210,
|
||||
::VideoFormat::V210,
|
||||
::VideoFormat::Uyvp,
|
||||
::VideoFormat::I42010le,
|
||||
::VideoFormat::I42010be,
|
||||
crate::VideoFormat::Y210,
|
||||
crate::VideoFormat::V210,
|
||||
crate::VideoFormat::Uyvp,
|
||||
crate::VideoFormat::I42010le,
|
||||
crate::VideoFormat::I42010be,
|
||||
#[cfg(feature = "v1_10")]
|
||||
::VideoFormat::P01010le,
|
||||
crate::VideoFormat::P01010le,
|
||||
#[cfg(feature = "v1_14")]
|
||||
::VideoFormat::Nv1210le32,
|
||||
crate::VideoFormat::Nv1210le32,
|
||||
#[cfg(feature = "v1_16")]
|
||||
::VideoFormat::Nv1210le40,
|
||||
crate::VideoFormat::Nv1210le40,
|
||||
#[cfg(feature = "v1_10")]
|
||||
::VideoFormat::P01010be,
|
||||
::VideoFormat::Y444,
|
||||
::VideoFormat::Gbr,
|
||||
::VideoFormat::Nv24,
|
||||
::VideoFormat::Xbgr,
|
||||
::VideoFormat::Bgrx,
|
||||
::VideoFormat::Xrgb,
|
||||
::VideoFormat::Rgbx,
|
||||
::VideoFormat::Bgr,
|
||||
crate::VideoFormat::P01010be,
|
||||
crate::VideoFormat::Y444,
|
||||
crate::VideoFormat::Gbr,
|
||||
crate::VideoFormat::Nv24,
|
||||
crate::VideoFormat::Xbgr,
|
||||
crate::VideoFormat::Bgrx,
|
||||
crate::VideoFormat::Xrgb,
|
||||
crate::VideoFormat::Rgbx,
|
||||
crate::VideoFormat::Bgr,
|
||||
#[cfg(feature = "v1_10")]
|
||||
::VideoFormat::Iyu2,
|
||||
::VideoFormat::V308,
|
||||
::VideoFormat::Rgb,
|
||||
::VideoFormat::Y42b,
|
||||
::VideoFormat::Nv61,
|
||||
::VideoFormat::Nv16,
|
||||
crate::VideoFormat::Iyu2,
|
||||
crate::VideoFormat::V308,
|
||||
crate::VideoFormat::Rgb,
|
||||
crate::VideoFormat::Y42b,
|
||||
crate::VideoFormat::Nv61,
|
||||
crate::VideoFormat::Nv16,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Vyuy,
|
||||
::VideoFormat::Uyvy,
|
||||
::VideoFormat::Yvyu,
|
||||
::VideoFormat::Yuy2,
|
||||
::VideoFormat::I420,
|
||||
::VideoFormat::Yv12,
|
||||
::VideoFormat::Nv21,
|
||||
::VideoFormat::Nv12,
|
||||
::VideoFormat::Nv1264z32,
|
||||
::VideoFormat::Y41b,
|
||||
::VideoFormat::Iyu1,
|
||||
::VideoFormat::Yvu9,
|
||||
::VideoFormat::Yuv9,
|
||||
::VideoFormat::Rgb16,
|
||||
::VideoFormat::Bgr16,
|
||||
::VideoFormat::Rgb15,
|
||||
::VideoFormat::Bgr15,
|
||||
::VideoFormat::Rgb8p,
|
||||
::VideoFormat::Gray16Le,
|
||||
::VideoFormat::Gray16Be,
|
||||
crate::VideoFormat::Vyuy,
|
||||
crate::VideoFormat::Uyvy,
|
||||
crate::VideoFormat::Yvyu,
|
||||
crate::VideoFormat::Yuy2,
|
||||
crate::VideoFormat::I420,
|
||||
crate::VideoFormat::Yv12,
|
||||
crate::VideoFormat::Nv21,
|
||||
crate::VideoFormat::Nv12,
|
||||
crate::VideoFormat::Nv1264z32,
|
||||
crate::VideoFormat::Y41b,
|
||||
crate::VideoFormat::Iyu1,
|
||||
crate::VideoFormat::Yvu9,
|
||||
crate::VideoFormat::Yuv9,
|
||||
crate::VideoFormat::Rgb16,
|
||||
crate::VideoFormat::Bgr16,
|
||||
crate::VideoFormat::Rgb15,
|
||||
crate::VideoFormat::Bgr15,
|
||||
crate::VideoFormat::Rgb8p,
|
||||
crate::VideoFormat::Gray16Le,
|
||||
crate::VideoFormat::Gray16Be,
|
||||
#[cfg(feature = "v1_14")]
|
||||
::VideoFormat::Gray10Le32,
|
||||
::VideoFormat::Gray8,
|
||||
crate::VideoFormat::Gray10Le32,
|
||||
crate::VideoFormat::Gray8,
|
||||
])
|
||||
}
|
||||
#[cfg(target_endian = "big")]
|
||||
{
|
||||
Box::new([
|
||||
::VideoFormat::Ayuv64,
|
||||
::VideoFormat::Argb64,
|
||||
crate::VideoFormat::Ayuv64,
|
||||
crate::VideoFormat::Argb64,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbra12be,
|
||||
crate::VideoFormat::Gbra12be,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbra12le,
|
||||
::VideoFormat::A44410be,
|
||||
crate::VideoFormat::Gbra12le,
|
||||
crate::VideoFormat::A44410be,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbra10be,
|
||||
::VideoFormat::A44410le,
|
||||
crate::VideoFormat::Gbra10be,
|
||||
crate::VideoFormat::A44410le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbra10le,
|
||||
::VideoFormat::A42210be,
|
||||
::VideoFormat::A42210le,
|
||||
::VideoFormat::A42010be,
|
||||
::VideoFormat::A42010le,
|
||||
crate::VideoFormat::Gbra10le,
|
||||
crate::VideoFormat::A42210be,
|
||||
crate::VideoFormat::A42210le,
|
||||
crate::VideoFormat::A42010be,
|
||||
crate::VideoFormat::A42010le,
|
||||
#[cfg(feature = "v1_16")]
|
||||
::VideoFormat::Y410,
|
||||
crate::VideoFormat::Y410,
|
||||
#[cfg(feature = "v1_16")]
|
||||
::VideoFormat::Bgr10a2Le,
|
||||
crate::VideoFormat::Bgr10a2Le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbra,
|
||||
::VideoFormat::Abgr,
|
||||
crate::VideoFormat::Gbra,
|
||||
crate::VideoFormat::Abgr,
|
||||
#[cfg(feature = "v1_16")]
|
||||
::VideoFormat::Vuya,
|
||||
::VideoFormat::Bgra,
|
||||
::VideoFormat::Ayuv,
|
||||
::VideoFormat::Argb,
|
||||
::VideoFormat::Rgba,
|
||||
::VideoFormat::A420,
|
||||
::VideoFormat::V216,
|
||||
crate::VideoFormat::Vuya,
|
||||
crate::VideoFormat::Bgra,
|
||||
crate::VideoFormat::Ayuv,
|
||||
crate::VideoFormat::Argb,
|
||||
crate::VideoFormat::Rgba,
|
||||
crate::VideoFormat::A420,
|
||||
crate::VideoFormat::V216,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Y44412be,
|
||||
crate::VideoFormat::Y44412be,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbr12be,
|
||||
crate::VideoFormat::Gbr12be,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Y44412le,
|
||||
crate::VideoFormat::Y44412le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Gbr12le,
|
||||
crate::VideoFormat::Gbr12le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::I42212be,
|
||||
crate::VideoFormat::I42212be,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::I42212le,
|
||||
crate::VideoFormat::I42212le,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::I42012be,
|
||||
crate::VideoFormat::I42012be,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::I42012le,
|
||||
::VideoFormat::Y44410be,
|
||||
::VideoFormat::Gbr10be,
|
||||
::VideoFormat::Y44410le,
|
||||
::VideoFormat::Gbr10le,
|
||||
::VideoFormat::R210,
|
||||
::VideoFormat::I42210be,
|
||||
::VideoFormat::I42210le,
|
||||
crate::VideoFormat::I42012le,
|
||||
crate::VideoFormat::Y44410be,
|
||||
crate::VideoFormat::Gbr10be,
|
||||
crate::VideoFormat::Y44410le,
|
||||
crate::VideoFormat::Gbr10le,
|
||||
crate::VideoFormat::R210,
|
||||
crate::VideoFormat::I42210be,
|
||||
crate::VideoFormat::I42210le,
|
||||
#[cfg(feature = "v1_14")]
|
||||
::VideoFormat::Nv1610le32,
|
||||
crate::VideoFormat::Nv1610le32,
|
||||
#[cfg(feature = "v1_16")]
|
||||
::VideoFormat::Y210,
|
||||
::VideoFormat::V210,
|
||||
::VideoFormat::Uyvp,
|
||||
::VideoFormat::I42010be,
|
||||
::VideoFormat::I42010le,
|
||||
crate::VideoFormat::Y210,
|
||||
crate::VideoFormat::V210,
|
||||
crate::VideoFormat::Uyvp,
|
||||
crate::VideoFormat::I42010be,
|
||||
crate::VideoFormat::I42010le,
|
||||
#[cfg(feature = "v1_10")]
|
||||
::VideoFormat::P01010be,
|
||||
crate::VideoFormat::P01010be,
|
||||
#[cfg(feature = "v1_10")]
|
||||
::VideoFormat::P01010le,
|
||||
crate::VideoFormat::P01010le,
|
||||
#[cfg(feature = "v1_14")]
|
||||
::VideoFormat::Nv1210le32,
|
||||
crate::VideoFormat::Nv1210le32,
|
||||
#[cfg(feature = "v1_16")]
|
||||
::VideoFormat::Nv1210le40,
|
||||
::VideoFormat::Y444,
|
||||
::VideoFormat::Gbr,
|
||||
::VideoFormat::Nv24,
|
||||
::VideoFormat::Xbgr,
|
||||
::VideoFormat::Bgrx,
|
||||
::VideoFormat::Xrgb,
|
||||
::VideoFormat::Rgbx,
|
||||
::VideoFormat::Bgr,
|
||||
crate::VideoFormat::Nv1210le40,
|
||||
crate::VideoFormat::Y444,
|
||||
crate::VideoFormat::Gbr,
|
||||
crate::VideoFormat::Nv24,
|
||||
crate::VideoFormat::Xbgr,
|
||||
crate::VideoFormat::Bgrx,
|
||||
crate::VideoFormat::Xrgb,
|
||||
crate::VideoFormat::Rgbx,
|
||||
crate::VideoFormat::Bgr,
|
||||
#[cfg(feature = "v1_10")]
|
||||
::VideoFormat::Iyu2,
|
||||
::VideoFormat::V308,
|
||||
::VideoFormat::Rgb,
|
||||
::VideoFormat::Y42b,
|
||||
::VideoFormat::Nv61,
|
||||
::VideoFormat::Nv16,
|
||||
crate::VideoFormat::Iyu2,
|
||||
crate::VideoFormat::V308,
|
||||
crate::VideoFormat::Rgb,
|
||||
crate::VideoFormat::Y42b,
|
||||
crate::VideoFormat::Nv61,
|
||||
crate::VideoFormat::Nv16,
|
||||
#[cfg(feature = "v1_12")]
|
||||
::VideoFormat::Vyuy,
|
||||
::VideoFormat::Uyvy,
|
||||
::VideoFormat::Yvyu,
|
||||
::VideoFormat::Yuy2,
|
||||
::VideoFormat::I420,
|
||||
::VideoFormat::Yv12,
|
||||
::VideoFormat::Nv21,
|
||||
::VideoFormat::Nv12,
|
||||
::VideoFormat::Nv1264z32,
|
||||
::VideoFormat::Y41b,
|
||||
::VideoFormat::Iyu1,
|
||||
::VideoFormat::Yvu9,
|
||||
::VideoFormat::Yuv9,
|
||||
::VideoFormat::Rgb16,
|
||||
::VideoFormat::Bgr16,
|
||||
::VideoFormat::Rgb15,
|
||||
::VideoFormat::Bgr15,
|
||||
::VideoFormat::Rgb8p,
|
||||
::VideoFormat::Gray16Be,
|
||||
::VideoFormat::Gray16Le,
|
||||
crate::VideoFormat::Vyuy,
|
||||
crate::VideoFormat::Uyvy,
|
||||
crate::VideoFormat::Yvyu,
|
||||
crate::VideoFormat::Yuy2,
|
||||
crate::VideoFormat::I420,
|
||||
crate::VideoFormat::Yv12,
|
||||
crate::VideoFormat::Nv21,
|
||||
crate::VideoFormat::Nv12,
|
||||
crate::VideoFormat::Nv1264z32,
|
||||
crate::VideoFormat::Y41b,
|
||||
crate::VideoFormat::Iyu1,
|
||||
crate::VideoFormat::Yvu9,
|
||||
crate::VideoFormat::Yuv9,
|
||||
crate::VideoFormat::Rgb16,
|
||||
crate::VideoFormat::Bgr16,
|
||||
crate::VideoFormat::Rgb15,
|
||||
crate::VideoFormat::Bgr15,
|
||||
crate::VideoFormat::Rgb8p,
|
||||
crate::VideoFormat::Gray16Be,
|
||||
crate::VideoFormat::Gray16Le,
|
||||
#[cfg(feature = "v1_14")]
|
||||
::VideoFormat::Gray10Le32,
|
||||
::VideoFormat::Gray8,
|
||||
crate::VideoFormat::Gray10Le32,
|
||||
crate::VideoFormat::Gray8,
|
||||
])
|
||||
}
|
||||
});
|
||||
|
@ -290,26 +288,26 @@ impl ToGlib for VideoEndianness {
|
|||
}
|
||||
}
|
||||
|
||||
impl ::VideoFormat {
|
||||
pub fn from_fourcc(fourcc: u32) -> ::VideoFormat {
|
||||
impl crate::VideoFormat {
|
||||
pub fn from_fourcc(fourcc: u32) -> crate::VideoFormat {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
unsafe { from_glib(gst_video_sys::gst_video_format_from_fourcc(fourcc)) }
|
||||
unsafe { from_glib(ffi::gst_video_format_from_fourcc(fourcc)) }
|
||||
}
|
||||
|
||||
pub fn from_masks(
|
||||
depth: u32,
|
||||
bpp: u32,
|
||||
endianness: ::VideoEndianness,
|
||||
endianness: crate::VideoEndianness,
|
||||
red_mask: u32,
|
||||
blue_mask: u32,
|
||||
green_mask: u32,
|
||||
alpha_mask: u32,
|
||||
) -> ::VideoFormat {
|
||||
) -> crate::VideoFormat {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
unsafe {
|
||||
from_glib(gst_video_sys::gst_video_format_from_masks(
|
||||
from_glib(ffi::gst_video_format_from_masks(
|
||||
depth as i32,
|
||||
bpp as i32,
|
||||
endianness.to_glib(),
|
||||
|
@ -322,12 +320,12 @@ impl ::VideoFormat {
|
|||
}
|
||||
|
||||
pub fn to_str<'a>(self) -> &'a str {
|
||||
if self == ::VideoFormat::Unknown {
|
||||
if self == crate::VideoFormat::Unknown {
|
||||
return "UNKNOWN";
|
||||
}
|
||||
|
||||
unsafe {
|
||||
CStr::from_ptr(gst_video_sys::gst_video_format_to_string(self.to_glib()))
|
||||
CStr::from_ptr(ffi::gst_video_format_to_string(self.to_glib()))
|
||||
.to_str()
|
||||
.unwrap()
|
||||
}
|
||||
|
@ -338,19 +336,21 @@ impl ::VideoFormat {
|
|||
}
|
||||
}
|
||||
|
||||
impl str::FromStr for ::VideoFormat {
|
||||
impl str::FromStr for crate::VideoFormat {
|
||||
type Err = glib::BoolError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, glib::BoolError> {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
unsafe {
|
||||
let fmt = ::VideoFormat::from_glib(gst_video_sys::gst_video_format_from_string(
|
||||
let fmt = crate::VideoFormat::from_glib(ffi::gst_video_format_from_string(
|
||||
s.to_glib_none().0,
|
||||
));
|
||||
|
||||
if fmt == ::VideoFormat::Unknown {
|
||||
Err(glib_bool_error!("Failed to parse video format from string"))
|
||||
if fmt == crate::VideoFormat::Unknown {
|
||||
Err(glib::glib_bool_error!(
|
||||
"Failed to parse video format from string"
|
||||
))
|
||||
} else {
|
||||
Ok(fmt)
|
||||
}
|
||||
|
@ -358,21 +358,22 @@ impl str::FromStr for ::VideoFormat {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ::VideoFormat {
|
||||
impl fmt::Display for crate::VideoFormat {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
f.write_str((*self).to_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for ::VideoFormat {
|
||||
fn partial_cmp(&self, other: &::VideoFormat) -> Option<std::cmp::Ordering> {
|
||||
::VideoFormatInfo::from_format(*self).partial_cmp(&::VideoFormatInfo::from_format(*other))
|
||||
impl PartialOrd for crate::VideoFormat {
|
||||
fn partial_cmp(&self, other: &crate::VideoFormat) -> Option<std::cmp::Ordering> {
|
||||
crate::VideoFormatInfo::from_format(*self)
|
||||
.partial_cmp(&crate::VideoFormatInfo::from_format(*other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for ::VideoFormat {
|
||||
fn cmp(&self, other: &::VideoFormat) -> std::cmp::Ordering {
|
||||
::VideoFormatInfo::from_format(*self).cmp(&::VideoFormatInfo::from_format(*other))
|
||||
impl Ord for crate::VideoFormat {
|
||||
fn cmp(&self, other: &crate::VideoFormat) -> std::cmp::Ordering {
|
||||
crate::VideoFormatInfo::from_format(*self).cmp(&crate::VideoFormatInfo::from_format(*other))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -391,7 +392,7 @@ impl Default for VideoFormatIterator {
|
|||
}
|
||||
|
||||
impl Iterator for VideoFormatIterator {
|
||||
type Item = ::VideoFormat;
|
||||
type Item = crate::VideoFormat;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.idx >= self.len {
|
||||
|
@ -433,12 +434,12 @@ pub trait VideoFormatIteratorExt {
|
|||
|
||||
impl<T> VideoFormatIteratorExt for T
|
||||
where
|
||||
T: Iterator<Item = ::VideoFormat>,
|
||||
T: Iterator<Item = crate::VideoFormat>,
|
||||
{
|
||||
fn into_video_caps(self) -> Option<gst::caps::Builder<gst::caps::NoFeature>> {
|
||||
let formats: Vec<::VideoFormat> = self.collect();
|
||||
let formats: Vec<crate::VideoFormat> = self.collect();
|
||||
if !formats.is_empty() {
|
||||
Some(::functions::video_make_raw_caps(&formats))
|
||||
Some(crate::functions::video_make_raw_caps(&formats))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -451,12 +452,12 @@ pub trait VideoFormatIteratorExtRef {
|
|||
|
||||
impl<'a, T> VideoFormatIteratorExtRef for T
|
||||
where
|
||||
T: Iterator<Item = &'a ::VideoFormat>,
|
||||
T: Iterator<Item = &'a crate::VideoFormat>,
|
||||
{
|
||||
fn into_video_caps(self) -> Option<gst::caps::Builder<gst::caps::NoFeature>> {
|
||||
let formats: Vec<::VideoFormat> = self.copied().collect();
|
||||
let formats: Vec<crate::VideoFormat> = self.copied().collect();
|
||||
if !formats.is_empty() {
|
||||
Some(::functions::video_make_raw_caps(&formats))
|
||||
Some(crate::functions::video_make_raw_caps(&formats))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -465,13 +466,12 @@ where
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use gst;
|
||||
|
||||
#[test]
|
||||
fn test_display() {
|
||||
gst::init().unwrap();
|
||||
|
||||
format!("{}", ::VideoFormat::Nv16);
|
||||
format!("{}", crate::VideoFormat::Nv16);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -479,13 +479,13 @@ mod tests {
|
|||
use super::*;
|
||||
gst::init().unwrap();
|
||||
|
||||
assert!(::VideoFormat::iter_raw().count() > 0);
|
||||
assert!(crate::VideoFormat::iter_raw().count() > 0);
|
||||
assert_eq!(
|
||||
::VideoFormat::iter_raw().count(),
|
||||
::VideoFormat::iter_raw().len()
|
||||
crate::VideoFormat::iter_raw().count(),
|
||||
crate::VideoFormat::iter_raw().len()
|
||||
);
|
||||
|
||||
let mut i = ::VideoFormat::iter_raw();
|
||||
let mut i = crate::VideoFormat::iter_raw();
|
||||
let mut count = 0;
|
||||
loop {
|
||||
if i.next().is_none() {
|
||||
|
@ -497,25 +497,25 @@ mod tests {
|
|||
}
|
||||
count += 1;
|
||||
}
|
||||
assert_eq!(count, ::VideoFormat::iter_raw().len());
|
||||
assert_eq!(count, crate::VideoFormat::iter_raw().len());
|
||||
|
||||
assert!(::VideoFormat::iter_raw().any(|f| f == ::VideoFormat::Nv12));
|
||||
assert!(::VideoFormat::iter_raw()
|
||||
.find(|f| *f == ::VideoFormat::Encoded)
|
||||
assert!(crate::VideoFormat::iter_raw().any(|f| f == crate::VideoFormat::Nv12));
|
||||
assert!(crate::VideoFormat::iter_raw()
|
||||
.find(|f| *f == crate::VideoFormat::Encoded)
|
||||
.is_none());
|
||||
|
||||
let caps = ::VideoFormat::iter_raw().into_video_caps();
|
||||
let caps = crate::VideoFormat::iter_raw().into_video_caps();
|
||||
assert!(caps.is_some());
|
||||
|
||||
let caps = ::VideoFormat::iter_raw()
|
||||
.filter(|f| ::VideoFormatInfo::from_format(*f).is_gray())
|
||||
let caps = crate::VideoFormat::iter_raw()
|
||||
.filter(|f| crate::VideoFormatInfo::from_format(*f).is_gray())
|
||||
.into_video_caps();
|
||||
assert!(caps.is_some());
|
||||
|
||||
let caps = ::VideoFormat::iter_raw().skip(1000).into_video_caps();
|
||||
let caps = crate::VideoFormat::iter_raw().skip(1000).into_video_caps();
|
||||
assert!(caps.is_none());
|
||||
|
||||
let caps = [::VideoFormat::Nv12, ::VideoFormat::Nv16]
|
||||
let caps = [crate::VideoFormat::Nv12, crate::VideoFormat::Nv16]
|
||||
.iter()
|
||||
.into_video_caps()
|
||||
.unwrap()
|
||||
|
@ -531,16 +531,17 @@ mod tests {
|
|||
gst::init().unwrap();
|
||||
|
||||
assert!(
|
||||
::VideoFormatInfo::from_format(::VideoFormat::Nv16)
|
||||
> ::VideoFormatInfo::from_format(::VideoFormat::Nv12)
|
||||
crate::VideoFormatInfo::from_format(crate::VideoFormat::Nv16)
|
||||
> crate::VideoFormatInfo::from_format(crate::VideoFormat::Nv12)
|
||||
);
|
||||
assert!(::VideoFormat::I420 > ::VideoFormat::Yv12);
|
||||
assert!(crate::VideoFormat::I420 > crate::VideoFormat::Yv12);
|
||||
|
||||
let sorted: Vec<::VideoFormat> = ::VideoFormat::iter_raw().sorted().rev().collect();
|
||||
let sorted: Vec<crate::VideoFormat> =
|
||||
crate::VideoFormat::iter_raw().sorted().rev().collect();
|
||||
// FIXME: use is_sorted_by() once API is in stable
|
||||
assert_eq!(
|
||||
sorted,
|
||||
::VideoFormat::iter_raw().collect::<Vec<::VideoFormat>>()
|
||||
crate::VideoFormat::iter_raw().collect::<Vec<crate::VideoFormat>>()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,31 +6,28 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use gst_video_sys;
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use std::ffi::CStr;
|
||||
use std::fmt;
|
||||
use std::str;
|
||||
|
||||
use glib;
|
||||
use glib::translate::{from_glib, ToGlib};
|
||||
|
||||
pub struct VideoFormatInfo(&'static gst_video_sys::GstVideoFormatInfo);
|
||||
pub struct VideoFormatInfo(&'static ffi::GstVideoFormatInfo);
|
||||
|
||||
impl VideoFormatInfo {
|
||||
pub fn from_format(format: ::VideoFormat) -> VideoFormatInfo {
|
||||
pub fn from_format(format: crate::VideoFormat) -> VideoFormatInfo {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
unsafe {
|
||||
let info = gst_video_sys::gst_video_format_get_info(format.to_glib());
|
||||
let info = ffi::gst_video_format_get_info(format.to_glib());
|
||||
assert!(!info.is_null());
|
||||
|
||||
VideoFormatInfo(&*info)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format(&self) -> ::VideoFormat {
|
||||
pub fn format(&self) -> crate::VideoFormat {
|
||||
from_glib(self.0.format)
|
||||
}
|
||||
|
||||
|
@ -42,7 +39,7 @@ impl VideoFormatInfo {
|
|||
unsafe { CStr::from_ptr(self.0.description).to_str().unwrap() }
|
||||
}
|
||||
|
||||
pub fn flags(&self) -> ::VideoFormatFlags {
|
||||
pub fn flags(&self) -> crate::VideoFormatFlags {
|
||||
from_glib(self.0.flags)
|
||||
}
|
||||
|
||||
|
@ -86,7 +83,7 @@ impl VideoFormatInfo {
|
|||
&self.0.h_sub[0..(self.0.n_components as usize)]
|
||||
}
|
||||
|
||||
pub fn tile_mode(&self) -> ::VideoTileMode {
|
||||
pub fn tile_mode(&self) -> crate::VideoTileMode {
|
||||
from_glib(self.0.tile_mode)
|
||||
}
|
||||
|
||||
|
@ -98,7 +95,7 @@ impl VideoFormatInfo {
|
|||
self.0.tile_hs
|
||||
}
|
||||
|
||||
pub fn unpack_format(&self) -> ::VideoFormat {
|
||||
pub fn unpack_format(&self) -> crate::VideoFormat {
|
||||
from_glib(self.0.unpack_format)
|
||||
}
|
||||
|
||||
|
@ -107,35 +104,35 @@ impl VideoFormatInfo {
|
|||
}
|
||||
|
||||
pub fn has_alpha(&self) -> bool {
|
||||
self.0.flags & gst_video_sys::GST_VIDEO_FORMAT_FLAG_ALPHA != 0
|
||||
self.0.flags & ffi::GST_VIDEO_FORMAT_FLAG_ALPHA != 0
|
||||
}
|
||||
|
||||
pub fn has_palette(&self) -> bool {
|
||||
self.0.flags & gst_video_sys::GST_VIDEO_FORMAT_FLAG_PALETTE != 0
|
||||
self.0.flags & ffi::GST_VIDEO_FORMAT_FLAG_PALETTE != 0
|
||||
}
|
||||
|
||||
pub fn is_complex(&self) -> bool {
|
||||
self.0.flags & gst_video_sys::GST_VIDEO_FORMAT_FLAG_COMPLEX != 0
|
||||
self.0.flags & ffi::GST_VIDEO_FORMAT_FLAG_COMPLEX != 0
|
||||
}
|
||||
|
||||
pub fn is_gray(&self) -> bool {
|
||||
self.0.flags & gst_video_sys::GST_VIDEO_FORMAT_FLAG_GRAY != 0
|
||||
self.0.flags & ffi::GST_VIDEO_FORMAT_FLAG_GRAY != 0
|
||||
}
|
||||
|
||||
pub fn is_le(&self) -> bool {
|
||||
self.0.flags & gst_video_sys::GST_VIDEO_FORMAT_FLAG_LE != 0
|
||||
self.0.flags & ffi::GST_VIDEO_FORMAT_FLAG_LE != 0
|
||||
}
|
||||
|
||||
pub fn is_rgb(&self) -> bool {
|
||||
self.0.flags & gst_video_sys::GST_VIDEO_FORMAT_FLAG_RGB != 0
|
||||
self.0.flags & ffi::GST_VIDEO_FORMAT_FLAG_RGB != 0
|
||||
}
|
||||
|
||||
pub fn is_tiled(&self) -> bool {
|
||||
self.0.flags & gst_video_sys::GST_VIDEO_FORMAT_FLAG_TILED != 0
|
||||
self.0.flags & ffi::GST_VIDEO_FORMAT_FLAG_TILED != 0
|
||||
}
|
||||
|
||||
pub fn is_yuv(&self) -> bool {
|
||||
self.0.flags & gst_video_sys::GST_VIDEO_FORMAT_FLAG_YUV != 0
|
||||
self.0.flags & ffi::GST_VIDEO_FORMAT_FLAG_YUV != 0
|
||||
}
|
||||
|
||||
pub fn scale_width(&self, component: u8, width: u32) -> u32 {
|
||||
|
@ -149,7 +146,7 @@ impl VideoFormatInfo {
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn unpack(
|
||||
&self,
|
||||
flags: ::VideoPackFlags,
|
||||
flags: crate::VideoPackFlags,
|
||||
dest: &mut [u8],
|
||||
src: &[&[u8]],
|
||||
stride: &[i32],
|
||||
|
@ -203,8 +200,7 @@ impl VideoFormatInfo {
|
|||
unsafe {
|
||||
use std::ptr;
|
||||
|
||||
let mut src_ptr =
|
||||
[ptr::null() as *const u8; gst_video_sys::GST_VIDEO_MAX_PLANES as usize];
|
||||
let mut src_ptr = [ptr::null() as *const u8; ffi::GST_VIDEO_MAX_PLANES as usize];
|
||||
for plane in 0..(self.n_planes()) {
|
||||
src_ptr[plane as usize] = src[plane as usize].as_ptr();
|
||||
}
|
||||
|
@ -225,12 +221,12 @@ impl VideoFormatInfo {
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn pack(
|
||||
&self,
|
||||
flags: ::VideoPackFlags,
|
||||
flags: crate::VideoPackFlags,
|
||||
src: &[u8],
|
||||
src_stride: i32,
|
||||
dest: &mut [&mut [u8]],
|
||||
dest_stride: &[i32],
|
||||
chroma_site: ::VideoChromaSite,
|
||||
chroma_site: crate::VideoChromaSite,
|
||||
y: i32,
|
||||
width: i32,
|
||||
) {
|
||||
|
@ -280,8 +276,7 @@ impl VideoFormatInfo {
|
|||
unsafe {
|
||||
use std::ptr;
|
||||
|
||||
let mut dest_ptr =
|
||||
[ptr::null_mut() as *mut u8; gst_video_sys::GST_VIDEO_MAX_PLANES as usize];
|
||||
let mut dest_ptr = [ptr::null_mut() as *mut u8; ffi::GST_VIDEO_MAX_PLANES as usize];
|
||||
for plane in 0..(self.n_planes()) {
|
||||
dest_ptr[plane as usize] = dest[plane as usize].as_mut_ptr();
|
||||
}
|
||||
|
@ -330,8 +325,8 @@ impl Ord for VideoFormatInfo {
|
|||
.then_with(|| {
|
||||
// Format using native endianess is considered as bigger
|
||||
match (
|
||||
self.flags().contains(::VideoFormatFlags::LE),
|
||||
other.flags().contains(::VideoFormatFlags::LE),
|
||||
self.flags().contains(crate::VideoFormatFlags::LE),
|
||||
other.flags().contains(crate::VideoFormatFlags::LE),
|
||||
) {
|
||||
(true, false) => {
|
||||
// a LE, b BE
|
||||
|
@ -363,8 +358,8 @@ impl Ord for VideoFormatInfo {
|
|||
.then_with(|| {
|
||||
// Prefer non-complex formats
|
||||
match (
|
||||
self.flags().contains(::VideoFormatFlags::COMPLEX),
|
||||
other.flags().contains(::VideoFormatFlags::COMPLEX),
|
||||
self.flags().contains(crate::VideoFormatFlags::COMPLEX),
|
||||
other.flags().contains(crate::VideoFormatFlags::COMPLEX),
|
||||
) {
|
||||
(true, false) => Ordering::Less,
|
||||
(false, true) => Ordering::Greater,
|
||||
|
@ -373,12 +368,12 @@ impl Ord for VideoFormatInfo {
|
|||
})
|
||||
.then_with(|| {
|
||||
// tiebreaker: YUV > RGB
|
||||
if self.flags().contains(::VideoFormatFlags::RGB)
|
||||
&& other.flags().contains(::VideoFormatFlags::YUV)
|
||||
if self.flags().contains(crate::VideoFormatFlags::RGB)
|
||||
&& other.flags().contains(crate::VideoFormatFlags::YUV)
|
||||
{
|
||||
Ordering::Less
|
||||
} else if self.flags().contains(::VideoFormatFlags::YUV)
|
||||
&& other.flags().contains(::VideoFormatFlags::RGB)
|
||||
} else if self.flags().contains(crate::VideoFormatFlags::YUV)
|
||||
&& other.flags().contains(crate::VideoFormatFlags::RGB)
|
||||
{
|
||||
Ordering::Greater
|
||||
} else {
|
||||
|
@ -389,8 +384,8 @@ impl Ord for VideoFormatInfo {
|
|||
// Manual tiebreaker
|
||||
match (self.format(), other.format()) {
|
||||
// I420 is more commonly used in GStreamer
|
||||
(::VideoFormat::I420, ::VideoFormat::Yv12) => Ordering::Greater,
|
||||
(::VideoFormat::Yv12, ::VideoFormat::I420) => Ordering::Less,
|
||||
(crate::VideoFormat::I420, crate::VideoFormat::Yv12) => Ordering::Greater,
|
||||
(crate::VideoFormat::Yv12, crate::VideoFormat::I420) => Ordering::Less,
|
||||
_ => Ordering::Equal,
|
||||
}
|
||||
})
|
||||
|
@ -433,7 +428,7 @@ impl fmt::Display for VideoFormatInfo {
|
|||
}
|
||||
}
|
||||
|
||||
impl str::FromStr for ::VideoFormatInfo {
|
||||
impl str::FromStr for crate::VideoFormatInfo {
|
||||
type Err = glib::BoolError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, glib::BoolError> {
|
||||
|
@ -443,8 +438,8 @@ impl str::FromStr for ::VideoFormatInfo {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<::VideoFormat> for VideoFormatInfo {
|
||||
fn from(f: ::VideoFormat) -> Self {
|
||||
impl From<crate::VideoFormat> for VideoFormatInfo {
|
||||
fn from(f: crate::VideoFormat) -> Self {
|
||||
skip_assert_initialized!();
|
||||
Self::from_format(f)
|
||||
}
|
||||
|
@ -452,30 +447,26 @@ impl From<::VideoFormat> for VideoFormatInfo {
|
|||
|
||||
#[doc(hidden)]
|
||||
impl glib::translate::GlibPtrDefault for VideoFormatInfo {
|
||||
type GlibType = *mut gst_video_sys::GstVideoFormatInfo;
|
||||
type GlibType = *mut ffi::GstVideoFormatInfo;
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a> glib::translate::ToGlibPtr<'a, *const gst_video_sys::GstVideoFormatInfo>
|
||||
for VideoFormatInfo
|
||||
{
|
||||
impl<'a> glib::translate::ToGlibPtr<'a, *const ffi::GstVideoFormatInfo> for VideoFormatInfo {
|
||||
type Storage = &'a VideoFormatInfo;
|
||||
|
||||
fn to_glib_none(
|
||||
&'a self,
|
||||
) -> glib::translate::Stash<'a, *const gst_video_sys::GstVideoFormatInfo, Self> {
|
||||
fn to_glib_none(&'a self) -> glib::translate::Stash<'a, *const ffi::GstVideoFormatInfo, Self> {
|
||||
glib::translate::Stash(self.0, self)
|
||||
}
|
||||
|
||||
fn to_glib_full(&self) -> *const gst_video_sys::GstVideoFormatInfo {
|
||||
fn to_glib_full(&self) -> *const ffi::GstVideoFormatInfo {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl glib::translate::FromGlibPtrNone<*mut gst_video_sys::GstVideoFormatInfo> for VideoFormatInfo {
|
||||
impl glib::translate::FromGlibPtrNone<*mut ffi::GstVideoFormatInfo> for VideoFormatInfo {
|
||||
#[inline]
|
||||
unsafe fn from_glib_none(ptr: *mut gst_video_sys::GstVideoFormatInfo) -> Self {
|
||||
unsafe fn from_glib_none(ptr: *mut ffi::GstVideoFormatInfo) -> Self {
|
||||
VideoFormatInfo(&*ptr)
|
||||
}
|
||||
}
|
||||
|
@ -483,13 +474,12 @@ impl glib::translate::FromGlibPtrNone<*mut gst_video_sys::GstVideoFormatInfo> fo
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gst;
|
||||
|
||||
#[test]
|
||||
fn test_get() {
|
||||
gst::init().unwrap();
|
||||
|
||||
let info = VideoFormatInfo::from_format(::VideoFormat::I420);
|
||||
let info = VideoFormatInfo::from_format(crate::VideoFormat::I420);
|
||||
assert_eq!(info.name(), "I420");
|
||||
|
||||
let other_info = "I420".parse().unwrap();
|
||||
|
@ -511,10 +501,10 @@ mod tests {
|
|||
// One line of 320 pixel I420
|
||||
let output = &mut [&mut [0; 320][..], &mut [0; 160][..], &mut [0; 160][..]];
|
||||
|
||||
let info = VideoFormatInfo::from_format(::VideoFormat::I420);
|
||||
assert_eq!(info.unpack_format(), ::VideoFormat::Ayuv);
|
||||
let info = VideoFormatInfo::from_format(crate::VideoFormat::I420);
|
||||
assert_eq!(info.unpack_format(), crate::VideoFormat::Ayuv);
|
||||
info.unpack(
|
||||
::VideoPackFlags::empty(),
|
||||
crate::VideoPackFlags::empty(),
|
||||
intermediate,
|
||||
input,
|
||||
&[320, 160, 160][..],
|
||||
|
@ -528,12 +518,12 @@ mod tests {
|
|||
}
|
||||
|
||||
info.pack(
|
||||
::VideoPackFlags::empty(),
|
||||
crate::VideoPackFlags::empty(),
|
||||
&intermediate[..(4 * 320)],
|
||||
4 * 320,
|
||||
output,
|
||||
&[320, 160, 160][..],
|
||||
::VideoChromaSite::NONE,
|
||||
crate::VideoChromaSite::NONE,
|
||||
0,
|
||||
320,
|
||||
);
|
||||
|
|
|
@ -6,12 +6,7 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use gst_sys;
|
||||
use gst_video_sys;
|
||||
|
||||
use glib;
|
||||
use glib::translate::{from_glib, from_glib_none, Borrowed, ToGlibPtr};
|
||||
use gst;
|
||||
|
||||
use std::fmt;
|
||||
use std::marker::PhantomData;
|
||||
|
@ -24,9 +19,9 @@ pub enum Readable {}
|
|||
pub enum Writable {}
|
||||
|
||||
pub struct VideoFrame<T> {
|
||||
frame: gst_video_sys::GstVideoFrame,
|
||||
frame: ffi::GstVideoFrame,
|
||||
buffer: Option<gst::Buffer>,
|
||||
info: ::VideoInfo,
|
||||
info: crate::VideoInfo,
|
||||
phantom: PhantomData<T>,
|
||||
}
|
||||
|
||||
|
@ -45,11 +40,11 @@ impl<T> fmt::Debug for VideoFrame<T> {
|
|||
}
|
||||
|
||||
impl<T> VideoFrame<T> {
|
||||
pub fn info(&self) -> &::VideoInfo {
|
||||
pub fn info(&self) -> &crate::VideoInfo {
|
||||
&self.info
|
||||
}
|
||||
|
||||
pub fn flags(&self) -> ::VideoFrameFlags {
|
||||
pub fn flags(&self) -> crate::VideoFrameFlags {
|
||||
from_glib(self.frame.flags)
|
||||
}
|
||||
|
||||
|
@ -63,14 +58,11 @@ impl<T> VideoFrame<T> {
|
|||
|
||||
pub fn copy(&self, dest: &mut VideoFrame<Writable>) -> Result<(), glib::BoolError> {
|
||||
unsafe {
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_copy(
|
||||
&mut dest.frame,
|
||||
&self.frame,
|
||||
));
|
||||
let res: bool = from_glib(ffi::gst_video_frame_copy(&mut dest.frame, &self.frame));
|
||||
if res {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(glib_bool_error!("Failed to copy video frame"))
|
||||
Err(glib::glib_bool_error!("Failed to copy video frame"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -83,7 +75,7 @@ impl<T> VideoFrame<T> {
|
|||
skip_assert_initialized!();
|
||||
|
||||
unsafe {
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_copy_plane(
|
||||
let res: bool = from_glib(ffi::gst_video_frame_copy_plane(
|
||||
&mut dest.frame,
|
||||
&self.frame,
|
||||
plane,
|
||||
|
@ -91,16 +83,16 @@ impl<T> VideoFrame<T> {
|
|||
if res {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(glib_bool_error!("Failed to copy video frame plane"))
|
||||
Err(glib::glib_bool_error!("Failed to copy video frame plane"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format(&self) -> ::VideoFormat {
|
||||
pub fn format(&self) -> crate::VideoFormat {
|
||||
self.info().format()
|
||||
}
|
||||
|
||||
pub fn format_info(&self) -> ::VideoFormatInfo {
|
||||
pub fn format_info(&self) -> crate::VideoFormatInfo {
|
||||
self.info().format_info()
|
||||
}
|
||||
|
||||
|
@ -117,19 +109,19 @@ impl<T> VideoFrame<T> {
|
|||
}
|
||||
|
||||
pub fn is_interlaced(&self) -> bool {
|
||||
self.flags().contains(::VideoFrameFlags::INTERLACED)
|
||||
self.flags().contains(crate::VideoFrameFlags::INTERLACED)
|
||||
}
|
||||
|
||||
pub fn is_tff(&self) -> bool {
|
||||
self.flags().contains(::VideoFrameFlags::TFF)
|
||||
self.flags().contains(crate::VideoFrameFlags::TFF)
|
||||
}
|
||||
|
||||
pub fn is_rff(&self) -> bool {
|
||||
self.flags().contains(::VideoFrameFlags::RFF)
|
||||
self.flags().contains(crate::VideoFrameFlags::RFF)
|
||||
}
|
||||
|
||||
pub fn is_onefield(&self) -> bool {
|
||||
self.flags().contains(::VideoFrameFlags::ONEFIELD)
|
||||
self.flags().contains(crate::VideoFrameFlags::ONEFIELD)
|
||||
}
|
||||
|
||||
pub fn n_planes(&self) -> u32 {
|
||||
|
@ -154,7 +146,9 @@ impl<T> VideoFrame<T> {
|
|||
|
||||
pub fn plane_data(&self, plane: u32) -> Result<&[u8], glib::BoolError> {
|
||||
if plane >= self.n_planes() {
|
||||
return Err(glib_bool_error!("Plane index higher than number of planes"));
|
||||
return Err(glib::glib_bool_error!(
|
||||
"Plane index higher than number of planes"
|
||||
));
|
||||
}
|
||||
|
||||
let format_info = self.format_info();
|
||||
|
@ -182,8 +176,8 @@ impl<T> VideoFrame<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub unsafe fn from_glib_full(frame: gst_video_sys::GstVideoFrame) -> Self {
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
pub unsafe fn from_glib_full(frame: ffi::GstVideoFrame) -> Self {
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
let buffer = gst::Buffer::from_glib_none(frame.buffer);
|
||||
VideoFrame {
|
||||
frame,
|
||||
|
@ -204,7 +198,7 @@ impl<T> VideoFrame<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn as_ptr(&self) -> *const gst_video_sys::GstVideoFrame {
|
||||
pub fn as_ptr(&self) -> *const ffi::GstVideoFrame {
|
||||
&self.frame
|
||||
}
|
||||
}
|
||||
|
@ -212,7 +206,7 @@ impl<T> VideoFrame<T> {
|
|||
impl<T> Drop for VideoFrame<T> {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_frame_unmap(&mut self.frame);
|
||||
ffi::gst_video_frame_unmap(&mut self.frame);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -220,7 +214,7 @@ impl<T> Drop for VideoFrame<T> {
|
|||
impl VideoFrame<Readable> {
|
||||
pub fn from_buffer_readable(
|
||||
buffer: gst::Buffer,
|
||||
info: &::VideoInfo,
|
||||
info: &crate::VideoInfo,
|
||||
) -> Result<VideoFrame<Readable>, gst::Buffer> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
|
@ -228,18 +222,18 @@ impl VideoFrame<Readable> {
|
|||
|
||||
unsafe {
|
||||
let mut frame = mem::MaybeUninit::zeroed();
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_map(
|
||||
let res: bool = from_glib(ffi::gst_video_frame_map(
|
||||
frame.as_mut_ptr(),
|
||||
info.to_glib_none().0 as *mut _,
|
||||
buffer.to_glib_none().0,
|
||||
gst_video_sys::GST_VIDEO_FRAME_MAP_FLAG_NO_REF | gst_sys::GST_MAP_READ,
|
||||
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF | gst::ffi::GST_MAP_READ,
|
||||
));
|
||||
|
||||
if !res {
|
||||
Err(buffer)
|
||||
} else {
|
||||
let frame = frame.assume_init();
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
Ok(VideoFrame {
|
||||
frame,
|
||||
buffer: Some(buffer),
|
||||
|
@ -253,7 +247,7 @@ impl VideoFrame<Readable> {
|
|||
pub fn from_buffer_id_readable(
|
||||
buffer: gst::Buffer,
|
||||
id: i32,
|
||||
info: &::VideoInfo,
|
||||
info: &crate::VideoInfo,
|
||||
) -> Result<VideoFrame<Readable>, gst::Buffer> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
|
@ -261,19 +255,19 @@ impl VideoFrame<Readable> {
|
|||
|
||||
unsafe {
|
||||
let mut frame = mem::MaybeUninit::zeroed();
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_map_id(
|
||||
let res: bool = from_glib(ffi::gst_video_frame_map_id(
|
||||
frame.as_mut_ptr(),
|
||||
info.to_glib_none().0 as *mut _,
|
||||
buffer.to_glib_none().0,
|
||||
id,
|
||||
gst_video_sys::GST_VIDEO_FRAME_MAP_FLAG_NO_REF | gst_sys::GST_MAP_READ,
|
||||
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF | gst::ffi::GST_MAP_READ,
|
||||
));
|
||||
|
||||
if !res {
|
||||
Err(buffer)
|
||||
} else {
|
||||
let frame = frame.assume_init();
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
Ok(VideoFrame {
|
||||
frame,
|
||||
buffer: Some(buffer),
|
||||
|
@ -292,7 +286,7 @@ impl VideoFrame<Readable> {
|
|||
impl VideoFrame<Writable> {
|
||||
pub fn from_buffer_writable(
|
||||
buffer: gst::Buffer,
|
||||
info: &::VideoInfo,
|
||||
info: &crate::VideoInfo,
|
||||
) -> Result<VideoFrame<Writable>, gst::Buffer> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
|
@ -300,20 +294,20 @@ impl VideoFrame<Writable> {
|
|||
|
||||
unsafe {
|
||||
let mut frame = mem::MaybeUninit::zeroed();
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_map(
|
||||
let res: bool = from_glib(ffi::gst_video_frame_map(
|
||||
frame.as_mut_ptr(),
|
||||
info.to_glib_none().0 as *mut _,
|
||||
buffer.to_glib_none().0,
|
||||
gst_video_sys::GST_VIDEO_FRAME_MAP_FLAG_NO_REF
|
||||
| gst_sys::GST_MAP_READ
|
||||
| gst_sys::GST_MAP_WRITE,
|
||||
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF
|
||||
| gst::ffi::GST_MAP_READ
|
||||
| gst::ffi::GST_MAP_WRITE,
|
||||
));
|
||||
|
||||
if !res {
|
||||
Err(buffer)
|
||||
} else {
|
||||
let frame = frame.assume_init();
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
Ok(VideoFrame {
|
||||
frame,
|
||||
buffer: Some(buffer),
|
||||
|
@ -327,7 +321,7 @@ impl VideoFrame<Writable> {
|
|||
pub fn from_buffer_id_writable(
|
||||
buffer: gst::Buffer,
|
||||
id: i32,
|
||||
info: &::VideoInfo,
|
||||
info: &crate::VideoInfo,
|
||||
) -> Result<VideoFrame<Writable>, gst::Buffer> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
|
@ -335,21 +329,21 @@ impl VideoFrame<Writable> {
|
|||
|
||||
unsafe {
|
||||
let mut frame = mem::MaybeUninit::zeroed();
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_map_id(
|
||||
let res: bool = from_glib(ffi::gst_video_frame_map_id(
|
||||
frame.as_mut_ptr(),
|
||||
info.to_glib_none().0 as *mut _,
|
||||
buffer.to_glib_none().0,
|
||||
id,
|
||||
gst_video_sys::GST_VIDEO_FRAME_MAP_FLAG_NO_REF
|
||||
| gst_sys::GST_MAP_READ
|
||||
| gst_sys::GST_MAP_WRITE,
|
||||
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF
|
||||
| gst::ffi::GST_MAP_READ
|
||||
| gst::ffi::GST_MAP_WRITE,
|
||||
));
|
||||
|
||||
if !res {
|
||||
Err(buffer)
|
||||
} else {
|
||||
let frame = frame.assume_init();
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
Ok(VideoFrame {
|
||||
frame,
|
||||
buffer: Some(buffer),
|
||||
|
@ -366,7 +360,9 @@ impl VideoFrame<Writable> {
|
|||
|
||||
pub fn plane_data_mut(&mut self, plane: u32) -> Result<&mut [u8], glib::BoolError> {
|
||||
if plane >= self.n_planes() {
|
||||
return Err(glib_bool_error!("Plane index higher than number of planes"));
|
||||
return Err(glib::glib_bool_error!(
|
||||
"Plane index higher than number of planes"
|
||||
));
|
||||
}
|
||||
|
||||
let format_info = self.format_info();
|
||||
|
@ -405,25 +401,25 @@ impl VideoFrame<Writable> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn as_mut_ptr(&mut self) -> *mut gst_video_sys::GstVideoFrame {
|
||||
pub fn as_mut_ptr(&mut self) -> *mut ffi::GstVideoFrame {
|
||||
&mut self.frame
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct VideoFrameRef<T> {
|
||||
frame: gst_video_sys::GstVideoFrame,
|
||||
frame: ffi::GstVideoFrame,
|
||||
buffer: Option<T>,
|
||||
info: ::VideoInfo,
|
||||
info: crate::VideoInfo,
|
||||
unmap: bool,
|
||||
}
|
||||
|
||||
impl<T> VideoFrameRef<T> {
|
||||
pub fn info(&self) -> &::VideoInfo {
|
||||
pub fn info(&self) -> &crate::VideoInfo {
|
||||
&self.info
|
||||
}
|
||||
|
||||
pub fn flags(&self) -> ::VideoFrameFlags {
|
||||
pub fn flags(&self) -> crate::VideoFrameFlags {
|
||||
from_glib(self.frame.flags)
|
||||
}
|
||||
|
||||
|
@ -436,14 +432,11 @@ impl<T> VideoFrameRef<T> {
|
|||
dest: &mut VideoFrameRef<&mut gst::BufferRef>,
|
||||
) -> Result<(), glib::BoolError> {
|
||||
unsafe {
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_copy(
|
||||
&mut dest.frame,
|
||||
&self.frame,
|
||||
));
|
||||
let res: bool = from_glib(ffi::gst_video_frame_copy(&mut dest.frame, &self.frame));
|
||||
if res {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(glib_bool_error!("Failed to copy video frame"))
|
||||
Err(glib::glib_bool_error!("Failed to copy video frame"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -456,7 +449,7 @@ impl<T> VideoFrameRef<T> {
|
|||
skip_assert_initialized!();
|
||||
|
||||
unsafe {
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_copy_plane(
|
||||
let res: bool = from_glib(ffi::gst_video_frame_copy_plane(
|
||||
&mut dest.frame,
|
||||
&self.frame,
|
||||
plane,
|
||||
|
@ -464,16 +457,16 @@ impl<T> VideoFrameRef<T> {
|
|||
if res {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(glib_bool_error!("Failed to copy video frame plane"))
|
||||
Err(glib::glib_bool_error!("Failed to copy video frame plane"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format(&self) -> ::VideoFormat {
|
||||
pub fn format(&self) -> crate::VideoFormat {
|
||||
self.info().format()
|
||||
}
|
||||
|
||||
pub fn format_info(&self) -> ::VideoFormatInfo {
|
||||
pub fn format_info(&self) -> crate::VideoFormatInfo {
|
||||
self.info().format_info()
|
||||
}
|
||||
|
||||
|
@ -490,19 +483,19 @@ impl<T> VideoFrameRef<T> {
|
|||
}
|
||||
|
||||
pub fn is_interlaced(&self) -> bool {
|
||||
self.flags().contains(::VideoFrameFlags::INTERLACED)
|
||||
self.flags().contains(crate::VideoFrameFlags::INTERLACED)
|
||||
}
|
||||
|
||||
pub fn is_tff(&self) -> bool {
|
||||
self.flags().contains(::VideoFrameFlags::TFF)
|
||||
self.flags().contains(crate::VideoFrameFlags::TFF)
|
||||
}
|
||||
|
||||
pub fn is_rff(&self) -> bool {
|
||||
self.flags().contains(::VideoFrameFlags::RFF)
|
||||
self.flags().contains(crate::VideoFrameFlags::RFF)
|
||||
}
|
||||
|
||||
pub fn is_onefield(&self) -> bool {
|
||||
self.flags().contains(::VideoFrameFlags::ONEFIELD)
|
||||
self.flags().contains(crate::VideoFrameFlags::ONEFIELD)
|
||||
}
|
||||
|
||||
pub fn n_planes(&self) -> u32 {
|
||||
|
@ -523,7 +516,9 @@ impl<T> VideoFrameRef<T> {
|
|||
|
||||
pub fn plane_data(&self, plane: u32) -> Result<&[u8], glib::BoolError> {
|
||||
if plane >= self.n_planes() {
|
||||
return Err(glib_bool_error!("Plane index higher than number of planes"));
|
||||
return Err(glib::glib_bool_error!(
|
||||
"Plane index higher than number of planes"
|
||||
));
|
||||
}
|
||||
|
||||
let format_info = self.format_info();
|
||||
|
@ -551,17 +546,17 @@ impl<T> VideoFrameRef<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn as_ptr(&self) -> *const gst_video_sys::GstVideoFrame {
|
||||
pub fn as_ptr(&self) -> *const ffi::GstVideoFrame {
|
||||
&self.frame
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> VideoFrameRef<&'a gst::BufferRef> {
|
||||
pub unsafe fn from_glib_borrow(frame: *const gst_video_sys::GstVideoFrame) -> Borrowed<Self> {
|
||||
pub unsafe fn from_glib_borrow(frame: *const ffi::GstVideoFrame) -> Borrowed<Self> {
|
||||
assert!(!frame.is_null());
|
||||
|
||||
let frame = ptr::read(frame);
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
let buffer = gst::BufferRef::from_ptr(frame.buffer);
|
||||
Borrowed::new(VideoFrameRef {
|
||||
frame,
|
||||
|
@ -571,8 +566,8 @@ impl<'a> VideoFrameRef<&'a gst::BufferRef> {
|
|||
})
|
||||
}
|
||||
|
||||
pub unsafe fn from_glib_full(frame: gst_video_sys::GstVideoFrame) -> Self {
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
pub unsafe fn from_glib_full(frame: ffi::GstVideoFrame) -> Self {
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
let buffer = gst::BufferRef::from_ptr(frame.buffer);
|
||||
VideoFrameRef {
|
||||
frame,
|
||||
|
@ -584,7 +579,7 @@ impl<'a> VideoFrameRef<&'a gst::BufferRef> {
|
|||
|
||||
pub fn from_buffer_ref_readable<'b>(
|
||||
buffer: &'a gst::BufferRef,
|
||||
info: &'b ::VideoInfo,
|
||||
info: &'b crate::VideoInfo,
|
||||
) -> Result<VideoFrameRef<&'a gst::BufferRef>, glib::BoolError> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
|
@ -592,18 +587,18 @@ impl<'a> VideoFrameRef<&'a gst::BufferRef> {
|
|||
|
||||
unsafe {
|
||||
let mut frame = mem::MaybeUninit::zeroed();
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_map(
|
||||
let res: bool = from_glib(ffi::gst_video_frame_map(
|
||||
frame.as_mut_ptr(),
|
||||
info.to_glib_none().0 as *mut _,
|
||||
buffer.as_mut_ptr(),
|
||||
gst_video_sys::GST_VIDEO_FRAME_MAP_FLAG_NO_REF | gst_sys::GST_MAP_READ,
|
||||
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF | gst::ffi::GST_MAP_READ,
|
||||
));
|
||||
|
||||
if !res {
|
||||
Err(glib_bool_error!("Failed to map VideoFrame"))
|
||||
Err(glib::glib_bool_error!("Failed to map VideoFrame"))
|
||||
} else {
|
||||
let frame = frame.assume_init();
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
Ok(VideoFrameRef {
|
||||
frame,
|
||||
buffer: Some(buffer),
|
||||
|
@ -617,7 +612,7 @@ impl<'a> VideoFrameRef<&'a gst::BufferRef> {
|
|||
pub fn from_buffer_ref_id_readable<'b>(
|
||||
buffer: &'a gst::BufferRef,
|
||||
id: i32,
|
||||
info: &'b ::VideoInfo,
|
||||
info: &'b crate::VideoInfo,
|
||||
) -> Result<VideoFrameRef<&'a gst::BufferRef>, glib::BoolError> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
|
@ -625,19 +620,19 @@ impl<'a> VideoFrameRef<&'a gst::BufferRef> {
|
|||
|
||||
unsafe {
|
||||
let mut frame = mem::MaybeUninit::zeroed();
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_map_id(
|
||||
let res: bool = from_glib(ffi::gst_video_frame_map_id(
|
||||
frame.as_mut_ptr(),
|
||||
info.to_glib_none().0 as *mut _,
|
||||
buffer.as_mut_ptr(),
|
||||
id,
|
||||
gst_video_sys::GST_VIDEO_FRAME_MAP_FLAG_NO_REF | gst_sys::GST_MAP_READ,
|
||||
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF | gst::ffi::GST_MAP_READ,
|
||||
));
|
||||
|
||||
if !res {
|
||||
Err(glib_bool_error!("Failed to map VideoFrame"))
|
||||
Err(glib::glib_bool_error!("Failed to map VideoFrame"))
|
||||
} else {
|
||||
let frame = frame.assume_init();
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
Ok(VideoFrameRef {
|
||||
frame,
|
||||
buffer: Some(buffer),
|
||||
|
@ -654,11 +649,11 @@ impl<'a> VideoFrameRef<&'a gst::BufferRef> {
|
|||
}
|
||||
|
||||
impl<'a> VideoFrameRef<&'a mut gst::BufferRef> {
|
||||
pub unsafe fn from_glib_borrow_mut(frame: *mut gst_video_sys::GstVideoFrame) -> Borrowed<Self> {
|
||||
pub unsafe fn from_glib_borrow_mut(frame: *mut ffi::GstVideoFrame) -> Borrowed<Self> {
|
||||
assert!(!frame.is_null());
|
||||
|
||||
let frame = ptr::read(frame);
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
let buffer = gst::BufferRef::from_mut_ptr(frame.buffer);
|
||||
Borrowed::new(VideoFrameRef {
|
||||
frame,
|
||||
|
@ -668,8 +663,8 @@ impl<'a> VideoFrameRef<&'a mut gst::BufferRef> {
|
|||
})
|
||||
}
|
||||
|
||||
pub unsafe fn from_glib_full_mut(frame: gst_video_sys::GstVideoFrame) -> Self {
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
pub unsafe fn from_glib_full_mut(frame: ffi::GstVideoFrame) -> Self {
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
let buffer = gst::BufferRef::from_mut_ptr(frame.buffer);
|
||||
VideoFrameRef {
|
||||
frame,
|
||||
|
@ -681,7 +676,7 @@ impl<'a> VideoFrameRef<&'a mut gst::BufferRef> {
|
|||
|
||||
pub fn from_buffer_ref_writable<'b>(
|
||||
buffer: &'a mut gst::BufferRef,
|
||||
info: &'b ::VideoInfo,
|
||||
info: &'b crate::VideoInfo,
|
||||
) -> Result<VideoFrameRef<&'a mut gst::BufferRef>, glib::BoolError> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
|
@ -689,20 +684,20 @@ impl<'a> VideoFrameRef<&'a mut gst::BufferRef> {
|
|||
|
||||
unsafe {
|
||||
let mut frame = mem::MaybeUninit::zeroed();
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_map(
|
||||
let res: bool = from_glib(ffi::gst_video_frame_map(
|
||||
frame.as_mut_ptr(),
|
||||
info.to_glib_none().0 as *mut _,
|
||||
buffer.as_mut_ptr(),
|
||||
gst_video_sys::GST_VIDEO_FRAME_MAP_FLAG_NO_REF
|
||||
| gst_sys::GST_MAP_READ
|
||||
| gst_sys::GST_MAP_WRITE,
|
||||
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF
|
||||
| gst::ffi::GST_MAP_READ
|
||||
| gst::ffi::GST_MAP_WRITE,
|
||||
));
|
||||
|
||||
if !res {
|
||||
Err(glib_bool_error!("Failed to map VideoFrame"))
|
||||
Err(glib::glib_bool_error!("Failed to map VideoFrame"))
|
||||
} else {
|
||||
let frame = frame.assume_init();
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
Ok(VideoFrameRef {
|
||||
frame,
|
||||
buffer: Some(buffer),
|
||||
|
@ -716,7 +711,7 @@ impl<'a> VideoFrameRef<&'a mut gst::BufferRef> {
|
|||
pub fn from_buffer_ref_id_writable<'b>(
|
||||
buffer: &'a mut gst::BufferRef,
|
||||
id: i32,
|
||||
info: &'b ::VideoInfo,
|
||||
info: &'b crate::VideoInfo,
|
||||
) -> Result<VideoFrameRef<&'a mut gst::BufferRef>, glib::BoolError> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
|
@ -724,21 +719,21 @@ impl<'a> VideoFrameRef<&'a mut gst::BufferRef> {
|
|||
|
||||
unsafe {
|
||||
let mut frame = mem::MaybeUninit::zeroed();
|
||||
let res: bool = from_glib(gst_video_sys::gst_video_frame_map_id(
|
||||
let res: bool = from_glib(ffi::gst_video_frame_map_id(
|
||||
frame.as_mut_ptr(),
|
||||
info.to_glib_none().0 as *mut _,
|
||||
buffer.as_mut_ptr(),
|
||||
id,
|
||||
gst_video_sys::GST_VIDEO_FRAME_MAP_FLAG_NO_REF
|
||||
| gst_sys::GST_MAP_READ
|
||||
| gst_sys::GST_MAP_WRITE,
|
||||
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF
|
||||
| gst::ffi::GST_MAP_READ
|
||||
| gst::ffi::GST_MAP_WRITE,
|
||||
));
|
||||
|
||||
if !res {
|
||||
Err(glib_bool_error!("Failed to map VideoFrame"))
|
||||
Err(glib::glib_bool_error!("Failed to map VideoFrame"))
|
||||
} else {
|
||||
let frame = frame.assume_init();
|
||||
let info = ::VideoInfo(ptr::read(&frame.info));
|
||||
let info = crate::VideoInfo(ptr::read(&frame.info));
|
||||
Ok(VideoFrameRef {
|
||||
frame,
|
||||
buffer: Some(buffer),
|
||||
|
@ -755,7 +750,9 @@ impl<'a> VideoFrameRef<&'a mut gst::BufferRef> {
|
|||
|
||||
pub fn plane_data_mut(&mut self, plane: u32) -> Result<&mut [u8], glib::BoolError> {
|
||||
if plane >= self.n_planes() {
|
||||
return Err(glib_bool_error!("Plane index higher than number of planes"));
|
||||
return Err(glib::glib_bool_error!(
|
||||
"Plane index higher than number of planes"
|
||||
));
|
||||
}
|
||||
|
||||
let format_info = self.format_info();
|
||||
|
@ -783,7 +780,7 @@ impl<'a> VideoFrameRef<&'a mut gst::BufferRef> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn as_mut_ptr(&mut self) -> *mut gst_video_sys::GstVideoFrame {
|
||||
pub fn as_mut_ptr(&mut self) -> *mut ffi::GstVideoFrame {
|
||||
&mut self.frame
|
||||
}
|
||||
}
|
||||
|
@ -806,34 +803,34 @@ impl<T> Drop for VideoFrameRef<T> {
|
|||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
if self.unmap {
|
||||
gst_video_sys::gst_video_frame_unmap(&mut self.frame);
|
||||
ffi::gst_video_frame_unmap(&mut self.frame);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait VideoBufferExt {
|
||||
fn get_video_flags(&self) -> ::VideoBufferFlags;
|
||||
fn set_video_flags(&mut self, flags: ::VideoBufferFlags);
|
||||
fn unset_video_flags(&mut self, flags: ::VideoBufferFlags);
|
||||
fn get_video_flags(&self) -> crate::VideoBufferFlags;
|
||||
fn set_video_flags(&mut self, flags: crate::VideoBufferFlags);
|
||||
fn unset_video_flags(&mut self, flags: crate::VideoBufferFlags);
|
||||
}
|
||||
|
||||
impl VideoBufferExt for gst::BufferRef {
|
||||
fn get_video_flags(&self) -> ::VideoBufferFlags {
|
||||
fn get_video_flags(&self) -> crate::VideoBufferFlags {
|
||||
unsafe {
|
||||
let ptr = self.as_mut_ptr();
|
||||
::VideoBufferFlags::from_bits_truncate((*ptr).mini_object.flags)
|
||||
crate::VideoBufferFlags::from_bits_truncate((*ptr).mini_object.flags)
|
||||
}
|
||||
}
|
||||
|
||||
fn set_video_flags(&mut self, flags: ::VideoBufferFlags) {
|
||||
fn set_video_flags(&mut self, flags: crate::VideoBufferFlags) {
|
||||
unsafe {
|
||||
let ptr = self.as_mut_ptr();
|
||||
(*ptr).mini_object.flags |= flags.bits();
|
||||
}
|
||||
}
|
||||
|
||||
fn unset_video_flags(&mut self, flags: ::VideoBufferFlags) {
|
||||
fn unset_video_flags(&mut self, flags: crate::VideoBufferFlags) {
|
||||
unsafe {
|
||||
let ptr = self.as_mut_ptr();
|
||||
(*ptr).mini_object.flags &= !flags.bits();
|
||||
|
@ -844,13 +841,12 @@ impl VideoBufferExt for gst::BufferRef {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gst;
|
||||
|
||||
#[test]
|
||||
fn test_map_read() {
|
||||
gst::init().unwrap();
|
||||
|
||||
let info = ::VideoInfo::builder(::VideoFormat::Gray8, 320, 240)
|
||||
let info = crate::VideoInfo::builder(crate::VideoFormat::Gray8, 320, 240)
|
||||
.build()
|
||||
.unwrap();
|
||||
let buffer = gst::Buffer::with_size(info.size()).unwrap();
|
||||
|
@ -880,7 +876,7 @@ mod tests {
|
|||
fn test_map_write() {
|
||||
gst::init().unwrap();
|
||||
|
||||
let info = ::VideoInfo::builder(::VideoFormat::Gray8, 320, 240)
|
||||
let info = crate::VideoInfo::builder(crate::VideoFormat::Gray8, 320, 240)
|
||||
.build()
|
||||
.unwrap();
|
||||
let buffer = gst::Buffer::with_size(info.size()).unwrap();
|
||||
|
@ -910,7 +906,7 @@ mod tests {
|
|||
fn test_map_ref_read() {
|
||||
gst::init().unwrap();
|
||||
|
||||
let info = ::VideoInfo::builder(::VideoFormat::Gray8, 320, 240)
|
||||
let info = crate::VideoInfo::builder(crate::VideoFormat::Gray8, 320, 240)
|
||||
.build()
|
||||
.unwrap();
|
||||
let buffer = gst::Buffer::with_size(info.size()).unwrap();
|
||||
|
@ -926,7 +922,7 @@ mod tests {
|
|||
fn test_map_ref_write() {
|
||||
gst::init().unwrap();
|
||||
|
||||
let info = ::VideoInfo::builder(::VideoFormat::Gray8, 320, 240)
|
||||
let info = crate::VideoInfo::builder(crate::VideoFormat::Gray8, 320, 240)
|
||||
.build()
|
||||
.unwrap();
|
||||
let mut buffer = gst::Buffer::with_size(info.size()).unwrap();
|
||||
|
|
|
@ -6,16 +6,10 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use glib_sys;
|
||||
use gobject_sys;
|
||||
use gst_video_sys;
|
||||
|
||||
use glib;
|
||||
use glib::translate::{
|
||||
from_glib, from_glib_full, from_glib_none, FromGlib, FromGlibPtrFull, FromGlibPtrNone, ToGlib,
|
||||
ToGlibPtr, ToGlibPtrMut,
|
||||
};
|
||||
use gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
use std::ffi::CStr;
|
||||
|
@ -24,7 +18,7 @@ use std::mem;
|
|||
use std::ptr;
|
||||
use std::str;
|
||||
|
||||
pub const VIDEO_MAX_PLANES: usize = gst_video_sys::GST_VIDEO_MAX_PLANES as usize;
|
||||
pub const VIDEO_MAX_PLANES: usize = ffi::GST_VIDEO_MAX_PLANES as usize;
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum VideoColorRange {
|
||||
|
@ -37,21 +31,21 @@ pub enum VideoColorRange {
|
|||
|
||||
#[doc(hidden)]
|
||||
impl ToGlib for VideoColorRange {
|
||||
type GlibType = gst_video_sys::GstVideoColorRange;
|
||||
type GlibType = ffi::GstVideoColorRange;
|
||||
|
||||
fn to_glib(&self) -> gst_video_sys::GstVideoColorRange {
|
||||
fn to_glib(&self) -> ffi::GstVideoColorRange {
|
||||
match *self {
|
||||
VideoColorRange::Unknown => gst_video_sys::GST_VIDEO_COLOR_RANGE_UNKNOWN,
|
||||
VideoColorRange::Range0255 => gst_video_sys::GST_VIDEO_COLOR_RANGE_0_255,
|
||||
VideoColorRange::Range16235 => gst_video_sys::GST_VIDEO_COLOR_RANGE_16_235,
|
||||
VideoColorRange::Unknown => ffi::GST_VIDEO_COLOR_RANGE_UNKNOWN,
|
||||
VideoColorRange::Range0255 => ffi::GST_VIDEO_COLOR_RANGE_0_255,
|
||||
VideoColorRange::Range16235 => ffi::GST_VIDEO_COLOR_RANGE_16_235,
|
||||
VideoColorRange::__Unknown(value) => value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl FromGlib<gst_video_sys::GstVideoColorRange> for VideoColorRange {
|
||||
fn from_glib(value: gst_video_sys::GstVideoColorRange) -> Self {
|
||||
impl FromGlib<ffi::GstVideoColorRange> for VideoColorRange {
|
||||
fn from_glib(value: ffi::GstVideoColorRange) -> Self {
|
||||
skip_assert_initialized!();
|
||||
match value as i32 {
|
||||
0 => VideoColorRange::Unknown,
|
||||
|
@ -64,7 +58,7 @@ impl FromGlib<gst_video_sys::GstVideoColorRange> for VideoColorRange {
|
|||
|
||||
impl glib::StaticType for VideoColorRange {
|
||||
fn static_type() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_color_range_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_color_range_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,29 +70,29 @@ impl<'a> glib::value::FromValueOptional<'a> for VideoColorRange {
|
|||
|
||||
impl<'a> glib::value::FromValue<'a> for VideoColorRange {
|
||||
unsafe fn from_value(value: &glib::value::Value) -> Self {
|
||||
from_glib(gobject_sys::g_value_get_enum(value.to_glib_none().0))
|
||||
from_glib(glib::gobject_ffi::g_value_get_enum(value.to_glib_none().0))
|
||||
}
|
||||
}
|
||||
|
||||
impl glib::value::SetValue for VideoColorRange {
|
||||
unsafe fn set_value(value: &mut glib::value::Value, this: &Self) {
|
||||
gobject_sys::g_value_set_enum(value.to_glib_none_mut().0, this.to_glib() as i32)
|
||||
glib::gobject_ffi::g_value_set_enum(value.to_glib_none_mut().0, this.to_glib() as i32)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VideoColorimetry(gst_video_sys::GstVideoColorimetry);
|
||||
pub struct VideoColorimetry(ffi::GstVideoColorimetry);
|
||||
|
||||
impl VideoColorimetry {
|
||||
pub fn new(
|
||||
range: VideoColorRange,
|
||||
matrix: ::VideoColorMatrix,
|
||||
transfer: ::VideoTransferFunction,
|
||||
primaries: ::VideoColorPrimaries,
|
||||
matrix: crate::VideoColorMatrix,
|
||||
transfer: crate::VideoTransferFunction,
|
||||
primaries: crate::VideoColorPrimaries,
|
||||
) -> Self {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
let colorimetry = unsafe {
|
||||
let mut colorimetry: gst_video_sys::GstVideoColorimetry = mem::zeroed();
|
||||
let mut colorimetry: ffi::GstVideoColorimetry = mem::zeroed();
|
||||
|
||||
colorimetry.range = range.to_glib();
|
||||
colorimetry.matrix = matrix.to_glib();
|
||||
|
@ -111,19 +105,19 @@ impl VideoColorimetry {
|
|||
VideoColorimetry(colorimetry)
|
||||
}
|
||||
|
||||
pub fn range(&self) -> ::VideoColorRange {
|
||||
pub fn range(&self) -> crate::VideoColorRange {
|
||||
from_glib(self.0.range)
|
||||
}
|
||||
|
||||
pub fn matrix(&self) -> ::VideoColorMatrix {
|
||||
pub fn matrix(&self) -> crate::VideoColorMatrix {
|
||||
from_glib(self.0.matrix)
|
||||
}
|
||||
|
||||
pub fn transfer(&self) -> ::VideoTransferFunction {
|
||||
pub fn transfer(&self) -> crate::VideoTransferFunction {
|
||||
from_glib(self.0.transfer)
|
||||
}
|
||||
|
||||
pub fn primaries(&self) -> ::VideoColorPrimaries {
|
||||
pub fn primaries(&self) -> crate::VideoColorPrimaries {
|
||||
from_glib(self.0.primaries)
|
||||
}
|
||||
}
|
||||
|
@ -136,17 +130,13 @@ impl Clone for VideoColorimetry {
|
|||
|
||||
impl PartialEq for VideoColorimetry {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
unsafe {
|
||||
from_glib(gst_video_sys::gst_video_colorimetry_is_equal(
|
||||
&self.0, &other.0,
|
||||
))
|
||||
}
|
||||
unsafe { from_glib(ffi::gst_video_colorimetry_is_equal(&self.0, &other.0)) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for VideoColorimetry {}
|
||||
|
||||
impl str::FromStr for ::VideoColorimetry {
|
||||
impl str::FromStr for crate::VideoColorimetry {
|
||||
type Err = glib::error::BoolError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, glib::error::BoolError> {
|
||||
|
@ -154,20 +144,20 @@ impl str::FromStr for ::VideoColorimetry {
|
|||
|
||||
unsafe {
|
||||
let mut colorimetry = mem::MaybeUninit::zeroed();
|
||||
let valid: bool = from_glib(gst_video_sys::gst_video_colorimetry_from_string(
|
||||
let valid: bool = from_glib(ffi::gst_video_colorimetry_from_string(
|
||||
colorimetry.as_mut_ptr(),
|
||||
s.to_glib_none().0,
|
||||
));
|
||||
if valid {
|
||||
Ok(VideoColorimetry(colorimetry.assume_init()))
|
||||
} else {
|
||||
Err(glib_bool_error!("Invalid colorimetry info"))
|
||||
Err(glib::glib_bool_error!("Invalid colorimetry info"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for ::VideoColorimetry {
|
||||
impl fmt::Debug for crate::VideoColorimetry {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
f.debug_struct("VideoColorimetry")
|
||||
.field("range", &self.0.range)
|
||||
|
@ -178,27 +168,24 @@ impl fmt::Debug for ::VideoColorimetry {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ::VideoColorimetry {
|
||||
impl fmt::Display for crate::VideoColorimetry {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
let s = unsafe {
|
||||
glib::GString::from_glib_full(gst_video_sys::gst_video_colorimetry_to_string(&self.0))
|
||||
};
|
||||
let s =
|
||||
unsafe { glib::GString::from_glib_full(ffi::gst_video_colorimetry_to_string(&self.0)) };
|
||||
f.write_str(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl str::FromStr for ::VideoChromaSite {
|
||||
impl str::FromStr for crate::VideoChromaSite {
|
||||
type Err = glib::error::BoolError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, glib::error::BoolError> {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
unsafe {
|
||||
let chroma_site = from_glib(gst_video_sys::gst_video_chroma_from_string(
|
||||
s.to_glib_none().0,
|
||||
));
|
||||
if chroma_site == ::VideoChromaSite::empty() {
|
||||
Err(glib_bool_error!("Invalid chroma site"))
|
||||
let chroma_site = from_glib(ffi::gst_video_chroma_from_string(s.to_glib_none().0));
|
||||
if chroma_site == crate::VideoChromaSite::empty() {
|
||||
Err(glib::glib_bool_error!("Invalid chroma site"))
|
||||
} else {
|
||||
Ok(chroma_site)
|
||||
}
|
||||
|
@ -206,25 +193,27 @@ impl str::FromStr for ::VideoChromaSite {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ::VideoChromaSite {
|
||||
impl fmt::Display for crate::VideoChromaSite {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
let s = unsafe {
|
||||
glib::GString::from_glib_full(gst_video_sys::gst_video_chroma_to_string(self.to_glib()))
|
||||
glib::GString::from_glib_full(ffi::gst_video_chroma_to_string(self.to_glib()))
|
||||
};
|
||||
f.write_str(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl ::VideoTransferFunction {
|
||||
impl crate::VideoTransferFunction {
|
||||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
pub fn from_iso(iso: u32) -> Result<::VideoTransferFunction, glib::BoolError> {
|
||||
pub fn from_iso(iso: u32) -> Result<crate::VideoTransferFunction, glib::BoolError> {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
unsafe {
|
||||
let value = from_glib(gst_video_sys::gst_video_transfer_function_from_iso(iso));
|
||||
let value = from_glib(ffi::gst_video_transfer_function_from_iso(iso));
|
||||
match value {
|
||||
::VideoTransferFunction::__Unknown(_) => Err(glib_bool_error!("Invalid ISO value")),
|
||||
crate::VideoTransferFunction::__Unknown(_) => {
|
||||
Err(glib::glib_bool_error!("Invalid ISO value"))
|
||||
}
|
||||
_ => Ok(value),
|
||||
}
|
||||
}
|
||||
|
@ -233,7 +222,7 @@ impl ::VideoTransferFunction {
|
|||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
pub fn to_iso(&self) -> u32 {
|
||||
unsafe { gst_video_sys::gst_video_transfer_function_to_iso(self.to_glib()) }
|
||||
unsafe { ffi::gst_video_transfer_function_to_iso(self.to_glib()) }
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
|
@ -241,11 +230,11 @@ impl ::VideoTransferFunction {
|
|||
pub fn is_equivalent(
|
||||
&self,
|
||||
from_bpp: u32,
|
||||
to_func: ::VideoTransferFunction,
|
||||
to_func: crate::VideoTransferFunction,
|
||||
to_bpp: u32,
|
||||
) -> bool {
|
||||
unsafe {
|
||||
from_glib(gst_video_sys::gst_video_transfer_function_is_equivalent(
|
||||
from_glib(ffi::gst_video_transfer_function_is_equivalent(
|
||||
self.to_glib(),
|
||||
from_bpp,
|
||||
to_func.to_glib(),
|
||||
|
@ -255,16 +244,18 @@ impl ::VideoTransferFunction {
|
|||
}
|
||||
}
|
||||
|
||||
impl ::VideoColorMatrix {
|
||||
impl crate::VideoColorMatrix {
|
||||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
pub fn from_iso(iso: u32) -> Result<::VideoColorMatrix, glib::BoolError> {
|
||||
pub fn from_iso(iso: u32) -> Result<crate::VideoColorMatrix, glib::BoolError> {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
unsafe {
|
||||
let value = from_glib(gst_video_sys::gst_video_color_matrix_from_iso(iso));
|
||||
let value = from_glib(ffi::gst_video_color_matrix_from_iso(iso));
|
||||
match value {
|
||||
::VideoColorMatrix::__Unknown(_) => Err(glib_bool_error!("Invalid ISO value")),
|
||||
crate::VideoColorMatrix::__Unknown(_) => {
|
||||
Err(glib::glib_bool_error!("Invalid ISO value"))
|
||||
}
|
||||
_ => Ok(value),
|
||||
}
|
||||
}
|
||||
|
@ -273,20 +264,22 @@ impl ::VideoColorMatrix {
|
|||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
pub fn to_iso(&self) -> u32 {
|
||||
unsafe { gst_video_sys::gst_video_color_matrix_to_iso(self.to_glib()) }
|
||||
unsafe { ffi::gst_video_color_matrix_to_iso(self.to_glib()) }
|
||||
}
|
||||
}
|
||||
|
||||
impl ::VideoColorPrimaries {
|
||||
impl crate::VideoColorPrimaries {
|
||||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
pub fn from_iso(iso: u32) -> Result<::VideoColorPrimaries, glib::BoolError> {
|
||||
pub fn from_iso(iso: u32) -> Result<crate::VideoColorPrimaries, glib::BoolError> {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
unsafe {
|
||||
let value = from_glib(gst_video_sys::gst_video_color_primaries_from_iso(iso));
|
||||
let value = from_glib(ffi::gst_video_color_primaries_from_iso(iso));
|
||||
match value {
|
||||
::VideoColorPrimaries::__Unknown(_) => Err(glib_bool_error!("Invalid ISO value")),
|
||||
crate::VideoColorPrimaries::__Unknown(_) => {
|
||||
Err(glib::glib_bool_error!("Invalid ISO value"))
|
||||
}
|
||||
_ => Ok(value),
|
||||
}
|
||||
}
|
||||
|
@ -295,34 +288,36 @@ impl ::VideoColorPrimaries {
|
|||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
pub fn to_iso(&self) -> u32 {
|
||||
unsafe { gst_video_sys::gst_video_color_primaries_to_iso(self.to_glib()) }
|
||||
unsafe { ffi::gst_video_color_primaries_to_iso(self.to_glib()) }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<::VideoMultiviewFramePacking> for ::VideoMultiviewMode {
|
||||
fn from(v: ::VideoMultiviewFramePacking) -> Self {
|
||||
impl From<crate::VideoMultiviewFramePacking> for crate::VideoMultiviewMode {
|
||||
fn from(v: crate::VideoMultiviewFramePacking) -> Self {
|
||||
skip_assert_initialized!();
|
||||
from_glib(v.to_glib())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::TryFrom<::VideoMultiviewMode> for ::VideoMultiviewFramePacking {
|
||||
impl std::convert::TryFrom<crate::VideoMultiviewMode> for crate::VideoMultiviewFramePacking {
|
||||
type Error = glib::BoolError;
|
||||
|
||||
fn try_from(v: ::VideoMultiviewMode) -> Result<::VideoMultiviewFramePacking, glib::BoolError> {
|
||||
fn try_from(
|
||||
v: crate::VideoMultiviewMode,
|
||||
) -> Result<crate::VideoMultiviewFramePacking, glib::BoolError> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
let v2 = from_glib(v.to_glib());
|
||||
|
||||
if let ::VideoMultiviewFramePacking::__Unknown(_) = v2 {
|
||||
Err(glib_bool_error!("Invalid frame packing mode"))
|
||||
if let crate::VideoMultiviewFramePacking::__Unknown(_) = v2 {
|
||||
Err(glib::glib_bool_error!("Invalid frame packing mode"))
|
||||
} else {
|
||||
Ok(v2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VideoInfo(pub(crate) gst_video_sys::GstVideoInfo);
|
||||
pub struct VideoInfo(pub(crate) ffi::GstVideoInfo);
|
||||
|
||||
impl fmt::Debug for VideoInfo {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
|
@ -354,24 +349,24 @@ impl fmt::Debug for VideoInfo {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct VideoInfoBuilder<'a> {
|
||||
format: ::VideoFormat,
|
||||
format: crate::VideoFormat,
|
||||
width: u32,
|
||||
height: u32,
|
||||
interlace_mode: Option<::VideoInterlaceMode>,
|
||||
flags: Option<::VideoFlags>,
|
||||
interlace_mode: Option<crate::VideoInterlaceMode>,
|
||||
flags: Option<crate::VideoFlags>,
|
||||
size: Option<usize>,
|
||||
views: Option<u32>,
|
||||
chroma_site: Option<::VideoChromaSite>,
|
||||
colorimetry: Option<&'a ::VideoColorimetry>,
|
||||
chroma_site: Option<crate::VideoChromaSite>,
|
||||
colorimetry: Option<&'a crate::VideoColorimetry>,
|
||||
par: Option<gst::Fraction>,
|
||||
fps: Option<gst::Fraction>,
|
||||
offset: Option<&'a [usize]>,
|
||||
stride: Option<&'a [i32]>,
|
||||
multiview_mode: Option<::VideoMultiviewMode>,
|
||||
multiview_flags: Option<::VideoMultiviewFlags>,
|
||||
multiview_mode: Option<crate::VideoMultiviewMode>,
|
||||
multiview_flags: Option<crate::VideoMultiviewFlags>,
|
||||
#[cfg(any(feature = "v1_12", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_12")))]
|
||||
field_order: Option<::VideoFieldOrder>,
|
||||
field_order: Option<crate::VideoFieldOrder>,
|
||||
}
|
||||
|
||||
impl<'a> VideoInfoBuilder<'a> {
|
||||
|
@ -379,11 +374,11 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
unsafe {
|
||||
let mut info = mem::MaybeUninit::uninit();
|
||||
|
||||
cfg_if! {
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(feature = "v1_16")] {
|
||||
let res: bool = {
|
||||
from_glib(if let Some(interlace_mode) = self.interlace_mode {
|
||||
gst_video_sys::gst_video_info_set_interlaced_format(
|
||||
ffi::gst_video_info_set_interlaced_format(
|
||||
info.as_mut_ptr(),
|
||||
self.format.to_glib(),
|
||||
interlace_mode.to_glib(),
|
||||
|
@ -391,7 +386,7 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
self.height,
|
||||
)
|
||||
} else {
|
||||
gst_video_sys::gst_video_info_set_format(
|
||||
ffi::gst_video_info_set_format(
|
||||
info.as_mut_ptr(),
|
||||
self.format.to_glib(),
|
||||
self.width,
|
||||
|
@ -401,7 +396,7 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
};
|
||||
} else if #[cfg(feature = "v1_12")] {
|
||||
let res: bool = {
|
||||
let res = from_glib(gst_video_sys::gst_video_info_set_format(
|
||||
let res = from_glib(ffi::gst_video_info_set_format(
|
||||
info.as_mut_ptr(),
|
||||
self.format.to_glib(),
|
||||
self.width,
|
||||
|
@ -422,7 +417,7 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
// The bool return value is new with 1.11.1, see
|
||||
// https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/commit/17cdd369e6f2f73329d27dfceb50011f40f1ceb0
|
||||
let res = if gst::version() < (1, 11, 1, 0) {
|
||||
gst_video_sys::gst_video_info_set_format(
|
||||
ffi::gst_video_info_set_format(
|
||||
info.as_mut_ptr(),
|
||||
self.format.to_glib(),
|
||||
self.width,
|
||||
|
@ -431,7 +426,7 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
|
||||
true
|
||||
} else {
|
||||
from_glib(gst_video_sys::gst_video_info_set_format(
|
||||
from_glib(ffi::gst_video_info_set_format(
|
||||
info.as_mut_ptr(),
|
||||
self.format.to_glib(),
|
||||
self.width,
|
||||
|
@ -452,13 +447,13 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
}
|
||||
|
||||
if !res {
|
||||
return Err(glib_bool_error!("Failed to build VideoInfo"));
|
||||
return Err(glib::glib_bool_error!("Failed to build VideoInfo"));
|
||||
}
|
||||
|
||||
let mut info = info.assume_init();
|
||||
|
||||
if info.finfo.is_null() || info.width <= 0 || info.height <= 0 {
|
||||
return Err(glib_bool_error!("Failed to build VideoInfo"));
|
||||
return Err(glib::glib_bool_error!("Failed to build VideoInfo"));
|
||||
}
|
||||
|
||||
if let Some(flags) = self.flags {
|
||||
|
@ -493,7 +488,7 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
|
||||
if let Some(offset) = self.offset {
|
||||
if offset.len() != ((*info.finfo).n_planes as usize) {
|
||||
return Err(glib_bool_error!("Failed to build VideoInfo"));
|
||||
return Err(glib::glib_bool_error!("Failed to build VideoInfo"));
|
||||
}
|
||||
|
||||
let n_planes = (*info.finfo).n_planes as usize;
|
||||
|
@ -502,7 +497,7 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
|
||||
if let Some(stride) = self.stride {
|
||||
if stride.len() != ((*info.finfo).n_planes as usize) {
|
||||
return Err(glib_bool_error!("Failed to build VideoInfo"));
|
||||
return Err(glib::glib_bool_error!("Failed to build VideoInfo"));
|
||||
}
|
||||
|
||||
let n_planes = (*info.finfo).n_planes as usize;
|
||||
|
@ -529,14 +524,14 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn interlace_mode(self, interlace_mode: ::VideoInterlaceMode) -> VideoInfoBuilder<'a> {
|
||||
pub fn interlace_mode(self, interlace_mode: crate::VideoInterlaceMode) -> VideoInfoBuilder<'a> {
|
||||
Self {
|
||||
interlace_mode: Some(interlace_mode),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn flags(self, flags: ::VideoFlags) -> Self {
|
||||
pub fn flags(self, flags: crate::VideoFlags) -> Self {
|
||||
Self {
|
||||
flags: Some(flags),
|
||||
..self
|
||||
|
@ -557,14 +552,14 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn chroma_site(self, chroma_site: ::VideoChromaSite) -> Self {
|
||||
pub fn chroma_site(self, chroma_site: crate::VideoChromaSite) -> Self {
|
||||
Self {
|
||||
chroma_site: Some(chroma_site),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn colorimetry(self, colorimetry: &'a ::VideoColorimetry) -> VideoInfoBuilder<'a> {
|
||||
pub fn colorimetry(self, colorimetry: &'a crate::VideoColorimetry) -> VideoInfoBuilder<'a> {
|
||||
Self {
|
||||
colorimetry: Some(colorimetry),
|
||||
..self
|
||||
|
@ -599,14 +594,14 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn multiview_mode(self, multiview_mode: ::VideoMultiviewMode) -> Self {
|
||||
pub fn multiview_mode(self, multiview_mode: crate::VideoMultiviewMode) -> Self {
|
||||
Self {
|
||||
multiview_mode: Some(multiview_mode),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn multiview_flags(self, multiview_flags: ::VideoMultiviewFlags) -> Self {
|
||||
pub fn multiview_flags(self, multiview_flags: crate::VideoMultiviewFlags) -> Self {
|
||||
Self {
|
||||
multiview_flags: Some(multiview_flags),
|
||||
..self
|
||||
|
@ -615,7 +610,7 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
|
||||
#[cfg(any(feature = "v1_12", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_12")))]
|
||||
pub fn field_order(self, field_order: ::VideoFieldOrder) -> Self {
|
||||
pub fn field_order(self, field_order: crate::VideoFieldOrder) -> Self {
|
||||
Self {
|
||||
field_order: Some(field_order),
|
||||
..self
|
||||
|
@ -624,10 +619,14 @@ impl<'a> VideoInfoBuilder<'a> {
|
|||
}
|
||||
|
||||
impl VideoInfo {
|
||||
pub fn builder<'a>(format: ::VideoFormat, width: u32, height: u32) -> VideoInfoBuilder<'a> {
|
||||
pub fn builder<'a>(
|
||||
format: crate::VideoFormat,
|
||||
width: u32,
|
||||
height: u32,
|
||||
) -> VideoInfoBuilder<'a> {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
cfg_if! {
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(any(feature = "v1_12", feature = "dox"))] {
|
||||
VideoInfoBuilder {
|
||||
format,
|
||||
|
@ -678,39 +677,41 @@ impl VideoInfo {
|
|||
|
||||
unsafe {
|
||||
let mut info = mem::MaybeUninit::uninit();
|
||||
if from_glib(gst_video_sys::gst_video_info_from_caps(
|
||||
if from_glib(ffi::gst_video_info_from_caps(
|
||||
info.as_mut_ptr(),
|
||||
caps.as_ptr(),
|
||||
)) {
|
||||
Ok(VideoInfo(info.assume_init()))
|
||||
} else {
|
||||
Err(glib_bool_error!("Failed to create VideoInfo from caps"))
|
||||
Err(glib::glib_bool_error!(
|
||||
"Failed to create VideoInfo from caps"
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_caps(&self) -> Result<gst::Caps, glib::error::BoolError> {
|
||||
unsafe {
|
||||
let result = from_glib_full(gst_video_sys::gst_video_info_to_caps(
|
||||
&self.0 as *const _ as *mut _,
|
||||
));
|
||||
let result = from_glib_full(ffi::gst_video_info_to_caps(&self.0 as *const _ as *mut _));
|
||||
match result {
|
||||
Some(c) => Ok(c),
|
||||
None => Err(glib_bool_error!("Failed to create caps from VideoInfo")),
|
||||
None => Err(glib::glib_bool_error!(
|
||||
"Failed to create caps from VideoInfo"
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format(&self) -> ::VideoFormat {
|
||||
pub fn format(&self) -> crate::VideoFormat {
|
||||
if self.0.finfo.is_null() {
|
||||
return ::VideoFormat::Unknown;
|
||||
return crate::VideoFormat::Unknown;
|
||||
}
|
||||
|
||||
unsafe { from_glib((*self.0.finfo).format) }
|
||||
}
|
||||
|
||||
pub fn format_info(&self) -> ::VideoFormatInfo {
|
||||
::VideoFormatInfo::from_format(self.format())
|
||||
pub fn format_info(&self) -> crate::VideoFormatInfo {
|
||||
crate::VideoFormatInfo::from_format(self.format())
|
||||
}
|
||||
|
||||
pub fn width(&self) -> u32 {
|
||||
|
@ -724,18 +725,18 @@ impl VideoInfo {
|
|||
#[cfg(any(feature = "v1_16", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_16")))]
|
||||
pub fn field_height(&self) -> u32 {
|
||||
if self.0.interlace_mode == gst_video_sys::GST_VIDEO_INTERLACE_MODE_ALTERNATE {
|
||||
if self.0.interlace_mode == ffi::GST_VIDEO_INTERLACE_MODE_ALTERNATE {
|
||||
(self.0.height as u32 + 1) / 2
|
||||
} else {
|
||||
self.0.height as u32
|
||||
}
|
||||
}
|
||||
|
||||
pub fn interlace_mode(&self) -> ::VideoInterlaceMode {
|
||||
pub fn interlace_mode(&self) -> crate::VideoInterlaceMode {
|
||||
from_glib(self.0.interlace_mode)
|
||||
}
|
||||
|
||||
pub fn flags(&self) -> ::VideoFlags {
|
||||
pub fn flags(&self) -> crate::VideoFlags {
|
||||
from_glib(self.0.flags)
|
||||
}
|
||||
|
||||
|
@ -747,7 +748,7 @@ impl VideoInfo {
|
|||
self.0.views as u32
|
||||
}
|
||||
|
||||
pub fn chroma_site(&self) -> ::VideoChromaSite {
|
||||
pub fn chroma_site(&self) -> crate::VideoChromaSite {
|
||||
from_glib(self.0.chroma_site)
|
||||
}
|
||||
|
||||
|
@ -771,14 +772,14 @@ impl VideoInfo {
|
|||
&self.0.stride[0..(self.format_info().n_planes() as usize)]
|
||||
}
|
||||
|
||||
pub fn multiview_mode(&self) -> ::VideoMultiviewMode {
|
||||
pub fn multiview_mode(&self) -> crate::VideoMultiviewMode {
|
||||
unsafe {
|
||||
let ptr = &self.0.ABI._gst_reserved as *const _ as *const i32;
|
||||
from_glib(ptr::read(ptr.offset(0)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn multiview_flags(&self) -> ::VideoMultiviewFlags {
|
||||
pub fn multiview_flags(&self) -> crate::VideoMultiviewFlags {
|
||||
unsafe {
|
||||
let ptr = &self.0.ABI._gst_reserved as *const _ as *const u32;
|
||||
from_glib(ptr::read(ptr.offset(1)))
|
||||
|
@ -787,7 +788,7 @@ impl VideoInfo {
|
|||
|
||||
#[cfg(any(feature = "v1_12", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_12")))]
|
||||
pub fn field_order(&self) -> ::VideoFieldOrder {
|
||||
pub fn field_order(&self) -> crate::VideoFieldOrder {
|
||||
unsafe {
|
||||
let ptr = &self.0.ABI._gst_reserved as *const _ as *const i32;
|
||||
from_glib(ptr::read(ptr.offset(2)))
|
||||
|
@ -811,7 +812,7 @@ impl VideoInfo {
|
|||
}
|
||||
|
||||
pub fn is_interlaced(&self) -> bool {
|
||||
self.interlace_mode() != ::VideoInterlaceMode::Progressive
|
||||
self.interlace_mode() != crate::VideoInterlaceMode::Progressive
|
||||
}
|
||||
|
||||
pub fn n_planes(&self) -> u32 {
|
||||
|
@ -831,7 +832,7 @@ impl VideoInfo {
|
|||
let src_val = src_val.into();
|
||||
unsafe {
|
||||
let mut dest_val = mem::MaybeUninit::uninit();
|
||||
if from_glib(gst_video_sys::gst_video_info_convert(
|
||||
if from_glib(ffi::gst_video_info_convert(
|
||||
&self.0 as *const _ as *mut _,
|
||||
src_val.get_format().to_glib(),
|
||||
src_val.to_raw_value(),
|
||||
|
@ -855,7 +856,7 @@ impl VideoInfo {
|
|||
let src_val = src_val.into();
|
||||
unsafe {
|
||||
let mut dest_val = mem::MaybeUninit::uninit();
|
||||
if from_glib(gst_video_sys::gst_video_info_convert(
|
||||
if from_glib(ffi::gst_video_info_convert(
|
||||
&self.0 as *const _ as *mut _,
|
||||
src_val.get_format().to_glib(),
|
||||
src_val.to_raw_value(),
|
||||
|
@ -872,11 +873,11 @@ impl VideoInfo {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn align(&mut self, align: &mut ::VideoAlignment) -> bool {
|
||||
cfg_if! {
|
||||
pub fn align(&mut self, align: &mut crate::VideoAlignment) -> bool {
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(feature = "v1_12")] {
|
||||
unsafe {
|
||||
from_glib(gst_video_sys::gst_video_info_align(
|
||||
from_glib(ffi::gst_video_info_align(
|
||||
&mut self.0,
|
||||
&mut align.0,
|
||||
))
|
||||
|
@ -886,11 +887,11 @@ impl VideoInfo {
|
|||
// The bool return value is new with 1.11.1, see
|
||||
// https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/commit/17cdd369e6f2f73329d27dfceb50011f40f1ceb0
|
||||
if gst::version() < (1, 11, 1, 0) {
|
||||
gst_video_sys::gst_video_info_align(&mut self.0, &mut align.0);
|
||||
ffi::gst_video_info_align(&mut self.0, &mut align.0);
|
||||
|
||||
true
|
||||
} else {
|
||||
from_glib(gst_video_sys::gst_video_info_align(
|
||||
from_glib(ffi::gst_video_info_align(
|
||||
&mut self.0,
|
||||
&mut align.0,
|
||||
))
|
||||
|
@ -909,7 +910,7 @@ impl Clone for VideoInfo {
|
|||
|
||||
impl PartialEq for VideoInfo {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_info_is_equal(&self.0, &other.0)) }
|
||||
unsafe { from_glib(ffi::gst_video_info_is_equal(&self.0, &other.0)) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -920,25 +921,26 @@ unsafe impl Sync for VideoInfo {}
|
|||
|
||||
impl glib::types::StaticType for VideoInfo {
|
||||
fn static_type() -> glib::types::Type {
|
||||
unsafe { glib::translate::from_glib(gst_video_sys::gst_video_info_get_type()) }
|
||||
unsafe { glib::translate::from_glib(ffi::gst_video_info_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a> glib::value::FromValueOptional<'a> for VideoInfo {
|
||||
unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> {
|
||||
Option::<VideoInfo>::from_glib_none(gobject_sys::g_value_get_boxed(value.to_glib_none().0)
|
||||
as *mut gst_video_sys::GstVideoInfo)
|
||||
Option::<VideoInfo>::from_glib_none(glib::gobject_ffi::g_value_get_boxed(
|
||||
value.to_glib_none().0,
|
||||
) as *mut ffi::GstVideoInfo)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl glib::value::SetValue for VideoInfo {
|
||||
unsafe fn set_value(value: &mut glib::Value, this: &Self) {
|
||||
gobject_sys::g_value_set_boxed(
|
||||
glib::gobject_ffi::g_value_set_boxed(
|
||||
value.to_glib_none_mut().0,
|
||||
glib::translate::ToGlibPtr::<*const gst_video_sys::GstVideoInfo>::to_glib_none(this).0
|
||||
as glib_sys::gpointer,
|
||||
glib::translate::ToGlibPtr::<*const ffi::GstVideoInfo>::to_glib_none(this).0
|
||||
as glib::ffi::gpointer,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -946,10 +948,10 @@ impl glib::value::SetValue for VideoInfo {
|
|||
#[doc(hidden)]
|
||||
impl glib::value::SetValueOptional for VideoInfo {
|
||||
unsafe fn set_value_optional(value: &mut glib::Value, this: Option<&Self>) {
|
||||
gobject_sys::g_value_set_boxed(
|
||||
glib::gobject_ffi::g_value_set_boxed(
|
||||
value.to_glib_none_mut().0,
|
||||
glib::translate::ToGlibPtr::<*const gst_video_sys::GstVideoInfo>::to_glib_none(&this).0
|
||||
as glib_sys::gpointer,
|
||||
glib::translate::ToGlibPtr::<*const ffi::GstVideoInfo>::to_glib_none(&this).0
|
||||
as glib::ffi::gpointer,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -963,66 +965,62 @@ impl glib::translate::Uninitialized for VideoInfo {
|
|||
|
||||
#[doc(hidden)]
|
||||
impl glib::translate::GlibPtrDefault for VideoInfo {
|
||||
type GlibType = *mut gst_video_sys::GstVideoInfo;
|
||||
type GlibType = *mut ffi::GstVideoInfo;
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a> glib::translate::ToGlibPtr<'a, *const gst_video_sys::GstVideoInfo> for VideoInfo {
|
||||
impl<'a> glib::translate::ToGlibPtr<'a, *const ffi::GstVideoInfo> for VideoInfo {
|
||||
type Storage = &'a VideoInfo;
|
||||
|
||||
fn to_glib_none(
|
||||
&'a self,
|
||||
) -> glib::translate::Stash<'a, *const gst_video_sys::GstVideoInfo, Self> {
|
||||
fn to_glib_none(&'a self) -> glib::translate::Stash<'a, *const ffi::GstVideoInfo, Self> {
|
||||
glib::translate::Stash(&self.0, self)
|
||||
}
|
||||
|
||||
fn to_glib_full(&self) -> *const gst_video_sys::GstVideoInfo {
|
||||
fn to_glib_full(&self) -> *const ffi::GstVideoInfo {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl glib::translate::FromGlibPtrNone<*mut gst_video_sys::GstVideoInfo> for VideoInfo {
|
||||
impl glib::translate::FromGlibPtrNone<*mut ffi::GstVideoInfo> for VideoInfo {
|
||||
#[inline]
|
||||
unsafe fn from_glib_none(ptr: *mut gst_video_sys::GstVideoInfo) -> Self {
|
||||
unsafe fn from_glib_none(ptr: *mut ffi::GstVideoInfo) -> Self {
|
||||
VideoInfo(ptr::read(ptr))
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl glib::translate::FromGlibPtrFull<*mut gst_video_sys::GstVideoInfo> for VideoInfo {
|
||||
impl glib::translate::FromGlibPtrFull<*mut ffi::GstVideoInfo> for VideoInfo {
|
||||
#[inline]
|
||||
unsafe fn from_glib_full(ptr: *mut gst_video_sys::GstVideoInfo) -> Self {
|
||||
unsafe fn from_glib_full(ptr: *mut ffi::GstVideoInfo) -> Self {
|
||||
let info = from_glib_none(ptr);
|
||||
glib_sys::g_free(ptr as *mut _);
|
||||
glib::ffi::g_free(ptr as *mut _);
|
||||
info
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "v1_12", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_12")))]
|
||||
impl ::VideoFieldOrder {
|
||||
impl crate::VideoFieldOrder {
|
||||
pub fn to_str<'a>(self) -> &'a str {
|
||||
unsafe {
|
||||
CStr::from_ptr(gst_video_sys::gst_video_field_order_to_string(
|
||||
self.to_glib(),
|
||||
))
|
||||
.to_str()
|
||||
.unwrap()
|
||||
CStr::from_ptr(ffi::gst_video_field_order_to_string(self.to_glib()))
|
||||
.to_str()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "v1_12", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_12")))]
|
||||
impl str::FromStr for ::VideoFieldOrder {
|
||||
impl str::FromStr for crate::VideoFieldOrder {
|
||||
type Err = glib::error::BoolError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, glib::error::BoolError> {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
unsafe {
|
||||
Ok(from_glib(gst_video_sys::gst_video_field_order_from_string(
|
||||
Ok(from_glib(ffi::gst_video_field_order_from_string(
|
||||
s.to_glib_none().0,
|
||||
)))
|
||||
}
|
||||
|
@ -1031,39 +1029,37 @@ impl str::FromStr for ::VideoFieldOrder {
|
|||
|
||||
#[cfg(any(feature = "v1_12", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_12")))]
|
||||
impl fmt::Display for ::VideoFieldOrder {
|
||||
impl fmt::Display for crate::VideoFieldOrder {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
f.write_str((*self).to_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl ::VideoInterlaceMode {
|
||||
impl crate::VideoInterlaceMode {
|
||||
pub fn to_str<'a>(self) -> &'a str {
|
||||
unsafe {
|
||||
CStr::from_ptr(gst_video_sys::gst_video_interlace_mode_to_string(
|
||||
self.to_glib(),
|
||||
))
|
||||
.to_str()
|
||||
.unwrap()
|
||||
CStr::from_ptr(ffi::gst_video_interlace_mode_to_string(self.to_glib()))
|
||||
.to_str()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl str::FromStr for ::VideoInterlaceMode {
|
||||
impl str::FromStr for crate::VideoInterlaceMode {
|
||||
type Err = glib::error::BoolError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, glib::error::BoolError> {
|
||||
assert_initialized_main_thread!();
|
||||
|
||||
unsafe {
|
||||
Ok(from_glib(
|
||||
gst_video_sys::gst_video_interlace_mode_from_string(s.to_glib_none().0),
|
||||
))
|
||||
Ok(from_glib(ffi::gst_video_interlace_mode_from_string(
|
||||
s.to_glib_none().0,
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ::VideoInterlaceMode {
|
||||
impl fmt::Display for crate::VideoInterlaceMode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
f.write_str((*self).to_str())
|
||||
}
|
||||
|
@ -1072,40 +1068,39 @@ impl fmt::Display for ::VideoInterlaceMode {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gst;
|
||||
|
||||
#[test]
|
||||
fn test_new() {
|
||||
gst::init().unwrap();
|
||||
|
||||
let info = VideoInfo::builder(::VideoFormat::I420, 320, 240)
|
||||
let info = VideoInfo::builder(crate::VideoFormat::I420, 320, 240)
|
||||
.build()
|
||||
.unwrap();
|
||||
assert_eq!(info.format(), ::VideoFormat::I420);
|
||||
assert_eq!(info.format(), crate::VideoFormat::I420);
|
||||
assert_eq!(info.width(), 320);
|
||||
assert_eq!(info.height(), 240);
|
||||
assert_eq!(info.size(), 320 * 240 + 2 * 160 * 120);
|
||||
assert_eq!(info.multiview_mode(), ::VideoMultiviewMode::None);
|
||||
assert_eq!(info.multiview_mode(), crate::VideoMultiviewMode::None);
|
||||
assert_eq!(&info.offset(), &[0, 320 * 240, 320 * 240 + 160 * 120]);
|
||||
assert_eq!(&info.stride(), &[320, 160, 160]);
|
||||
|
||||
let offsets = [0, 640 * 240 + 16, 640 * 240 + 16 + 320 * 120 + 16];
|
||||
let strides = [640, 320, 320];
|
||||
let info = VideoInfo::builder(::VideoFormat::I420, 320, 240)
|
||||
let info = VideoInfo::builder(crate::VideoFormat::I420, 320, 240)
|
||||
.offset(&offsets)
|
||||
.stride(&strides)
|
||||
.size(640 * 240 + 16 + 320 * 120 + 16 + 320 * 120 + 16)
|
||||
.multiview_mode(::VideoMultiviewMode::SideBySide)
|
||||
.multiview_mode(crate::VideoMultiviewMode::SideBySide)
|
||||
.build()
|
||||
.unwrap();
|
||||
assert_eq!(info.format(), ::VideoFormat::I420);
|
||||
assert_eq!(info.format(), crate::VideoFormat::I420);
|
||||
assert_eq!(info.width(), 320);
|
||||
assert_eq!(info.height(), 240);
|
||||
assert_eq!(
|
||||
info.size(),
|
||||
640 * 240 + 16 + 320 * 120 + 16 + 320 * 120 + 16
|
||||
);
|
||||
assert_eq!(info.multiview_mode(), ::VideoMultiviewMode::SideBySide);
|
||||
assert_eq!(info.multiview_mode(), crate::VideoMultiviewMode::SideBySide);
|
||||
assert_eq!(
|
||||
&info.offset(),
|
||||
&[0, 640 * 240 + 16, 640 * 240 + 16 + 320 * 120 + 16]
|
||||
|
@ -1131,12 +1126,15 @@ mod tests {
|
|||
],
|
||||
);
|
||||
let info = VideoInfo::from_caps(&caps).unwrap();
|
||||
assert_eq!(info.format(), ::VideoFormat::I420);
|
||||
assert_eq!(info.format(), crate::VideoFormat::I420);
|
||||
assert_eq!(info.width(), 320);
|
||||
assert_eq!(info.height(), 240);
|
||||
assert_eq!(info.fps(), gst::Fraction::new(30, 1));
|
||||
assert_eq!(info.interlace_mode(), ::VideoInterlaceMode::Progressive);
|
||||
assert_eq!(info.chroma_site(), ::VideoChromaSite::MPEG2);
|
||||
assert_eq!(
|
||||
info.interlace_mode(),
|
||||
crate::VideoInterlaceMode::Progressive
|
||||
);
|
||||
assert_eq!(info.chroma_site(), crate::VideoChromaSite::MPEG2);
|
||||
assert_eq!(info.colorimetry(), "bt709".parse().unwrap());
|
||||
|
||||
let caps2 = info.to_caps().unwrap();
|
||||
|
@ -1152,14 +1150,14 @@ mod tests {
|
|||
fn test_video_align() {
|
||||
gst::init().unwrap();
|
||||
|
||||
let mut info = ::VideoInfo::builder(::VideoFormat::Nv16, 1920, 1080)
|
||||
let mut info = crate::VideoInfo::builder(crate::VideoFormat::Nv16, 1920, 1080)
|
||||
.build()
|
||||
.expect("Failed to create VideoInfo");
|
||||
|
||||
assert_eq!(info.stride(), [1920, 1920]);
|
||||
assert_eq!(info.offset(), [0, 2_073_600]);
|
||||
|
||||
let mut align = ::VideoAlignment::new(0, 0, 0, 8, &[0; VIDEO_MAX_PLANES]);
|
||||
let mut align = crate::VideoAlignment::new(0, 0, 0, 8, &[0; VIDEO_MAX_PLANES]);
|
||||
assert!(info.align(&mut align));
|
||||
|
||||
assert_eq!(info.stride(), [1928, 1928]);
|
||||
|
@ -1174,8 +1172,8 @@ mod tests {
|
|||
|
||||
gst::init().unwrap();
|
||||
|
||||
format!("{}", ::VideoColorimetry::from_str("sRGB").unwrap());
|
||||
format!("{}", ::VideoFieldOrder::TopFieldFirst);
|
||||
format!("{}", ::VideoInterlaceMode::Progressive);
|
||||
format!("{}", crate::VideoColorimetry::from_str("sRGB").unwrap());
|
||||
format!("{}", crate::VideoFieldOrder::TopFieldFirst);
|
||||
format!("{}", crate::VideoInterlaceMode::Progressive);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,15 +9,11 @@
|
|||
use std::fmt;
|
||||
use std::ptr;
|
||||
|
||||
use glib;
|
||||
use glib::translate::{from_glib, from_glib_none, FromGlib, ToGlib, ToGlibPtr};
|
||||
use gst;
|
||||
use gst::prelude::*;
|
||||
use gst_sys;
|
||||
use gst_video_sys;
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct VideoMeta(gst_video_sys::GstVideoMeta);
|
||||
pub struct VideoMeta(ffi::GstVideoMeta);
|
||||
|
||||
unsafe impl Send for VideoMeta {}
|
||||
unsafe impl Sync for VideoMeta {}
|
||||
|
@ -25,25 +21,28 @@ unsafe impl Sync for VideoMeta {}
|
|||
impl VideoMeta {
|
||||
pub fn add(
|
||||
buffer: &mut gst::BufferRef,
|
||||
flags: ::VideoFrameFlags,
|
||||
format: ::VideoFormat,
|
||||
flags: crate::VideoFrameFlags,
|
||||
format: crate::VideoFormat,
|
||||
width: u32,
|
||||
height: u32,
|
||||
) -> Result<gst::MetaRefMut<Self, gst::meta::Standalone>, glib::BoolError> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
if format == ::VideoFormat::Unknown || format == ::VideoFormat::Encoded {
|
||||
return Err(glib_bool_error!("Unsupported video format {}", format));
|
||||
if format == crate::VideoFormat::Unknown || format == crate::VideoFormat::Encoded {
|
||||
return Err(glib::glib_bool_error!(
|
||||
"Unsupported video format {}",
|
||||
format
|
||||
));
|
||||
}
|
||||
|
||||
let info = ::VideoInfo::builder(format, width, height).build()?;
|
||||
let info = crate::VideoInfo::builder(format, width, height).build()?;
|
||||
|
||||
if !info.is_valid() {
|
||||
return Err(glib_bool_error!("Invalid video info"));
|
||||
return Err(glib::glib_bool_error!("Invalid video info"));
|
||||
}
|
||||
|
||||
if buffer.get_size() < info.size() {
|
||||
return Err(glib_bool_error!(
|
||||
return Err(glib::glib_bool_error!(
|
||||
"Buffer smaller than required frame size ({} < {})",
|
||||
buffer.get_size(),
|
||||
info.size()
|
||||
|
@ -51,7 +50,7 @@ impl VideoMeta {
|
|||
}
|
||||
|
||||
unsafe {
|
||||
let meta = gst_video_sys::gst_buffer_add_video_meta(
|
||||
let meta = ffi::gst_buffer_add_video_meta(
|
||||
buffer.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
format.to_glib(),
|
||||
|
@ -60,7 +59,7 @@ impl VideoMeta {
|
|||
);
|
||||
|
||||
if meta.is_null() {
|
||||
return Err(glib_bool_error!("Failed to add video meta"));
|
||||
return Err(glib::glib_bool_error!("Failed to add video meta"));
|
||||
}
|
||||
|
||||
Ok(Self::from_mut_ptr(buffer, meta))
|
||||
|
@ -69,8 +68,8 @@ impl VideoMeta {
|
|||
|
||||
pub fn add_full<'a>(
|
||||
buffer: &'a mut gst::BufferRef,
|
||||
flags: ::VideoFrameFlags,
|
||||
format: ::VideoFormat,
|
||||
flags: crate::VideoFrameFlags,
|
||||
format: crate::VideoFormat,
|
||||
width: u32,
|
||||
height: u32,
|
||||
offset: &[usize],
|
||||
|
@ -78,22 +77,25 @@ impl VideoMeta {
|
|||
) -> Result<gst::MetaRefMut<'a, Self, gst::meta::Standalone>, glib::BoolError> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
if format == ::VideoFormat::Unknown || format == ::VideoFormat::Encoded {
|
||||
return Err(glib_bool_error!("Unsupported video format {}", format));
|
||||
if format == crate::VideoFormat::Unknown || format == crate::VideoFormat::Encoded {
|
||||
return Err(glib::glib_bool_error!(
|
||||
"Unsupported video format {}",
|
||||
format
|
||||
));
|
||||
}
|
||||
|
||||
let n_planes = offset.len() as u32;
|
||||
let info = ::VideoInfo::builder(format, width, height)
|
||||
let info = crate::VideoInfo::builder(format, width, height)
|
||||
.offset(offset)
|
||||
.stride(stride)
|
||||
.build()?;
|
||||
|
||||
if !info.is_valid() {
|
||||
return Err(glib_bool_error!("Invalid video info"));
|
||||
return Err(glib::glib_bool_error!("Invalid video info"));
|
||||
}
|
||||
|
||||
if buffer.get_size() < info.size() {
|
||||
return Err(glib_bool_error!(
|
||||
return Err(glib::glib_bool_error!(
|
||||
"Buffer smaller than required frame size ({} < {})",
|
||||
buffer.get_size(),
|
||||
info.size()
|
||||
|
@ -101,7 +103,7 @@ impl VideoMeta {
|
|||
}
|
||||
|
||||
unsafe {
|
||||
let meta = gst_video_sys::gst_buffer_add_video_meta_full(
|
||||
let meta = ffi::gst_buffer_add_video_meta_full(
|
||||
buffer.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
format.to_glib(),
|
||||
|
@ -113,18 +115,18 @@ impl VideoMeta {
|
|||
);
|
||||
|
||||
if meta.is_null() {
|
||||
return Err(glib_bool_error!("Failed to add video meta"));
|
||||
return Err(glib::glib_bool_error!("Failed to add video meta"));
|
||||
}
|
||||
|
||||
Ok(Self::from_mut_ptr(buffer, meta))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_flags(&self) -> ::VideoFrameFlags {
|
||||
pub fn get_flags(&self) -> crate::VideoFrameFlags {
|
||||
from_glib(self.0.flags)
|
||||
}
|
||||
|
||||
pub fn get_format(&self) -> ::VideoFormat {
|
||||
pub fn get_format(&self) -> crate::VideoFormat {
|
||||
from_glib(self.0.format)
|
||||
}
|
||||
|
||||
|
@ -154,10 +156,10 @@ impl VideoMeta {
|
|||
}
|
||||
|
||||
unsafe impl MetaAPI for VideoMeta {
|
||||
type GstType = gst_video_sys::GstVideoMeta;
|
||||
type GstType = ffi::GstVideoMeta;
|
||||
|
||||
fn get_meta_api() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_meta_api_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_meta_api_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,7 +179,7 @@ impl fmt::Debug for VideoMeta {
|
|||
}
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct VideoCropMeta(gst_video_sys::GstVideoCropMeta);
|
||||
pub struct VideoCropMeta(ffi::GstVideoCropMeta);
|
||||
|
||||
unsafe impl Send for VideoCropMeta {}
|
||||
unsafe impl Sync for VideoCropMeta {}
|
||||
|
@ -189,11 +191,11 @@ impl VideoCropMeta {
|
|||
) -> gst::MetaRefMut<Self, gst::meta::Standalone> {
|
||||
skip_assert_initialized!();
|
||||
unsafe {
|
||||
let meta = gst_sys::gst_buffer_add_meta(
|
||||
let meta = gst::ffi::gst_buffer_add_meta(
|
||||
buffer.as_mut_ptr(),
|
||||
gst_video_sys::gst_video_crop_meta_get_info(),
|
||||
ffi::gst_video_crop_meta_get_info(),
|
||||
ptr::null_mut(),
|
||||
) as *mut gst_video_sys::GstVideoCropMeta;
|
||||
) as *mut ffi::GstVideoCropMeta;
|
||||
|
||||
{
|
||||
let meta = &mut *meta;
|
||||
|
@ -220,10 +222,10 @@ impl VideoCropMeta {
|
|||
}
|
||||
|
||||
unsafe impl MetaAPI for VideoCropMeta {
|
||||
type GstType = gst_video_sys::GstVideoCropMeta;
|
||||
type GstType = ffi::GstVideoCropMeta;
|
||||
|
||||
fn get_meta_api() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_crop_meta_api_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_crop_meta_api_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -236,7 +238,7 @@ impl fmt::Debug for VideoCropMeta {
|
|||
}
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct VideoRegionOfInterestMeta(gst_video_sys::GstVideoRegionOfInterestMeta);
|
||||
pub struct VideoRegionOfInterestMeta(ffi::GstVideoRegionOfInterestMeta);
|
||||
|
||||
unsafe impl Send for VideoRegionOfInterestMeta {}
|
||||
unsafe impl Sync for VideoRegionOfInterestMeta {}
|
||||
|
@ -249,7 +251,7 @@ impl VideoRegionOfInterestMeta {
|
|||
) -> gst::MetaRefMut<'a, Self, gst::meta::Standalone> {
|
||||
skip_assert_initialized!();
|
||||
unsafe {
|
||||
let meta = gst_video_sys::gst_buffer_add_video_region_of_interest_meta(
|
||||
let meta = ffi::gst_buffer_add_video_region_of_interest_meta(
|
||||
buffer.as_mut_ptr(),
|
||||
roi_type.to_glib_none().0,
|
||||
rect.0,
|
||||
|
@ -309,7 +311,7 @@ impl VideoRegionOfInterestMeta {
|
|||
#[cfg(feature = "v1_14")]
|
||||
pub fn add_param(&mut self, s: gst::Structure) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_region_of_interest_meta_add_param(&mut self.0, s.into_ptr());
|
||||
ffi::gst_video_region_of_interest_meta_add_param(&mut self.0, s.into_ptr());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -317,7 +319,7 @@ impl VideoRegionOfInterestMeta {
|
|||
#[cfg(feature = "v1_14")]
|
||||
pub struct ParamsIter<'a> {
|
||||
_meta: &'a VideoRegionOfInterestMeta,
|
||||
list: *const glib_sys::GList,
|
||||
list: *const glib::ffi::GList,
|
||||
}
|
||||
|
||||
#[cfg(feature = "v1_14")]
|
||||
|
@ -334,7 +336,7 @@ impl<'a> Iterator for ParamsIter<'a> {
|
|||
assert!(!data.is_null());
|
||||
self.list = (*self.list).next;
|
||||
|
||||
let s = gst::StructureRef::from_glib_borrow(data as *const gst_sys::GstStructure);
|
||||
let s = gst::StructureRef::from_glib_borrow(data as *const gst::ffi::GstStructure);
|
||||
|
||||
Some(s)
|
||||
}
|
||||
|
@ -342,10 +344,10 @@ impl<'a> Iterator for ParamsIter<'a> {
|
|||
}
|
||||
|
||||
unsafe impl MetaAPI for VideoRegionOfInterestMeta {
|
||||
type GstType = gst_video_sys::GstVideoRegionOfInterestMeta;
|
||||
type GstType = ffi::GstVideoRegionOfInterestMeta;
|
||||
|
||||
fn get_meta_api() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_region_of_interest_meta_api_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_region_of_interest_meta_api_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -368,7 +370,7 @@ impl fmt::Debug for VideoRegionOfInterestMeta {
|
|||
}
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct VideoAffineTransformationMeta(gst_video_sys::GstVideoAffineTransformationMeta);
|
||||
pub struct VideoAffineTransformationMeta(ffi::GstVideoAffineTransformationMeta);
|
||||
|
||||
unsafe impl Send for VideoAffineTransformationMeta {}
|
||||
unsafe impl Sync for VideoAffineTransformationMeta {}
|
||||
|
@ -380,11 +382,11 @@ impl VideoAffineTransformationMeta {
|
|||
) -> gst::MetaRefMut<'a, Self, gst::meta::Standalone> {
|
||||
skip_assert_initialized!();
|
||||
unsafe {
|
||||
let meta = gst_sys::gst_buffer_add_meta(
|
||||
let meta = gst::ffi::gst_buffer_add_meta(
|
||||
buffer.as_mut_ptr(),
|
||||
gst_video_sys::gst_video_affine_transformation_meta_get_info(),
|
||||
ffi::gst_video_affine_transformation_meta_get_info(),
|
||||
ptr::null_mut(),
|
||||
) as *mut gst_video_sys::GstVideoAffineTransformationMeta;
|
||||
) as *mut ffi::GstVideoAffineTransformationMeta;
|
||||
|
||||
if let Some(matrix) = matrix {
|
||||
let meta = &mut *meta;
|
||||
|
@ -405,16 +407,16 @@ impl VideoAffineTransformationMeta {
|
|||
|
||||
pub fn apply_matrix(&mut self, matrix: &[f32; 16]) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_affine_transformation_meta_apply_matrix(&mut self.0, matrix);
|
||||
ffi::gst_video_affine_transformation_meta_apply_matrix(&mut self.0, matrix);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl MetaAPI for VideoAffineTransformationMeta {
|
||||
type GstType = gst_video_sys::GstVideoAffineTransformationMeta;
|
||||
type GstType = ffi::GstVideoAffineTransformationMeta;
|
||||
|
||||
fn get_meta_api() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_affine_transformation_meta_api_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_affine_transformation_meta_api_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -427,7 +429,7 @@ impl fmt::Debug for VideoAffineTransformationMeta {
|
|||
}
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct VideoOverlayCompositionMeta(gst_video_sys::GstVideoOverlayCompositionMeta);
|
||||
pub struct VideoOverlayCompositionMeta(ffi::GstVideoOverlayCompositionMeta);
|
||||
|
||||
unsafe impl Send for VideoOverlayCompositionMeta {}
|
||||
unsafe impl Sync for VideoOverlayCompositionMeta {}
|
||||
|
@ -435,11 +437,11 @@ unsafe impl Sync for VideoOverlayCompositionMeta {}
|
|||
impl VideoOverlayCompositionMeta {
|
||||
pub fn add<'a>(
|
||||
buffer: &'a mut gst::BufferRef,
|
||||
overlay: &::VideoOverlayComposition,
|
||||
overlay: &crate::VideoOverlayComposition,
|
||||
) -> gst::MetaRefMut<'a, Self, gst::meta::Standalone> {
|
||||
skip_assert_initialized!();
|
||||
unsafe {
|
||||
let meta = gst_video_sys::gst_buffer_add_video_overlay_composition_meta(
|
||||
let meta = ffi::gst_buffer_add_video_overlay_composition_meta(
|
||||
buffer.as_mut_ptr(),
|
||||
overlay.as_mut_ptr(),
|
||||
);
|
||||
|
@ -448,28 +450,29 @@ impl VideoOverlayCompositionMeta {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_overlay(&self) -> &::VideoOverlayCompositionRef {
|
||||
unsafe { ::VideoOverlayCompositionRef::from_ptr(self.0.overlay) }
|
||||
pub fn get_overlay(&self) -> &crate::VideoOverlayCompositionRef {
|
||||
unsafe { crate::VideoOverlayCompositionRef::from_ptr(self.0.overlay) }
|
||||
}
|
||||
|
||||
pub fn get_overlay_owned(&self) -> ::VideoOverlayComposition {
|
||||
pub fn get_overlay_owned(&self) -> crate::VideoOverlayComposition {
|
||||
unsafe { from_glib_none(self.get_overlay().as_ptr()) }
|
||||
}
|
||||
|
||||
pub fn set_overlay(&mut self, overlay: &::VideoOverlayComposition) {
|
||||
pub fn set_overlay(&mut self, overlay: &crate::VideoOverlayComposition) {
|
||||
#![allow(clippy::cast_ptr_alignment)]
|
||||
unsafe {
|
||||
gst_sys::gst_mini_object_unref(self.0.overlay as *mut _);
|
||||
self.0.overlay = gst_sys::gst_mini_object_ref(overlay.as_mut_ptr() as *mut _) as *mut _;
|
||||
gst::ffi::gst_mini_object_unref(self.0.overlay as *mut _);
|
||||
self.0.overlay =
|
||||
gst::ffi::gst_mini_object_ref(overlay.as_mut_ptr() as *mut _) as *mut _;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl MetaAPI for VideoOverlayCompositionMeta {
|
||||
type GstType = gst_video_sys::GstVideoOverlayCompositionMeta;
|
||||
type GstType = ffi::GstVideoOverlayCompositionMeta;
|
||||
|
||||
fn get_meta_api() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_overlay_composition_meta_api_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_overlay_composition_meta_api_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -484,7 +487,7 @@ impl fmt::Debug for VideoOverlayCompositionMeta {
|
|||
#[cfg(any(feature = "v1_16", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_16")))]
|
||||
#[repr(transparent)]
|
||||
pub struct VideoCaptionMeta(gst_video_sys::GstVideoCaptionMeta);
|
||||
pub struct VideoCaptionMeta(ffi::GstVideoCaptionMeta);
|
||||
|
||||
#[cfg(any(feature = "v1_16", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_16")))]
|
||||
|
@ -498,13 +501,13 @@ unsafe impl Sync for VideoCaptionMeta {}
|
|||
impl VideoCaptionMeta {
|
||||
pub fn add<'a>(
|
||||
buffer: &'a mut gst::BufferRef,
|
||||
caption_type: ::VideoCaptionType,
|
||||
caption_type: crate::VideoCaptionType,
|
||||
data: &[u8],
|
||||
) -> gst::MetaRefMut<'a, Self, gst::meta::Standalone> {
|
||||
skip_assert_initialized!();
|
||||
assert!(!data.is_empty());
|
||||
unsafe {
|
||||
let meta = gst_video_sys::gst_buffer_add_video_caption_meta(
|
||||
let meta = ffi::gst_buffer_add_video_caption_meta(
|
||||
buffer.as_mut_ptr(),
|
||||
caption_type.to_glib(),
|
||||
data.as_ptr(),
|
||||
|
@ -515,7 +518,7 @@ impl VideoCaptionMeta {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_caption_type(&self) -> ::VideoCaptionType {
|
||||
pub fn get_caption_type(&self) -> crate::VideoCaptionType {
|
||||
from_glib(self.0.caption_type)
|
||||
}
|
||||
|
||||
|
@ -531,10 +534,10 @@ impl VideoCaptionMeta {
|
|||
#[cfg(any(feature = "v1_16", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_16")))]
|
||||
unsafe impl MetaAPI for VideoCaptionMeta {
|
||||
type GstType = gst_video_sys::GstVideoCaptionMeta;
|
||||
type GstType = ffi::GstVideoCaptionMeta;
|
||||
|
||||
fn get_meta_api() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_caption_meta_api_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_caption_meta_api_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -552,7 +555,7 @@ impl fmt::Debug for VideoCaptionMeta {
|
|||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
#[repr(transparent)]
|
||||
pub struct VideoAFDMeta(gst_video_sys::GstVideoAFDMeta);
|
||||
pub struct VideoAFDMeta(ffi::GstVideoAFDMeta);
|
||||
|
||||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
|
@ -567,13 +570,13 @@ impl VideoAFDMeta {
|
|||
pub fn add(
|
||||
buffer: &mut gst::BufferRef,
|
||||
field: u8,
|
||||
spec: ::VideoAFDSpec,
|
||||
afd: ::VideoAFDValue,
|
||||
spec: crate::VideoAFDSpec,
|
||||
afd: crate::VideoAFDValue,
|
||||
) -> gst::MetaRefMut<Self, gst::meta::Standalone> {
|
||||
skip_assert_initialized!();
|
||||
|
||||
unsafe {
|
||||
let meta = gst_video_sys::gst_buffer_add_video_afd_meta(
|
||||
let meta = ffi::gst_buffer_add_video_afd_meta(
|
||||
buffer.as_mut_ptr(),
|
||||
field,
|
||||
spec.to_glib(),
|
||||
|
@ -588,11 +591,11 @@ impl VideoAFDMeta {
|
|||
self.0.field
|
||||
}
|
||||
|
||||
pub fn get_spec(&self) -> ::VideoAFDSpec {
|
||||
pub fn get_spec(&self) -> crate::VideoAFDSpec {
|
||||
from_glib(self.0.spec)
|
||||
}
|
||||
|
||||
pub fn get_afd(&self) -> ::VideoAFDValue {
|
||||
pub fn get_afd(&self) -> crate::VideoAFDValue {
|
||||
from_glib(self.0.afd)
|
||||
}
|
||||
}
|
||||
|
@ -600,10 +603,10 @@ impl VideoAFDMeta {
|
|||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
unsafe impl MetaAPI for VideoAFDMeta {
|
||||
type GstType = gst_video_sys::GstVideoAFDMeta;
|
||||
type GstType = ffi::GstVideoAFDMeta;
|
||||
|
||||
fn get_meta_api() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_afd_meta_api_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_afd_meta_api_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -622,7 +625,7 @@ impl fmt::Debug for VideoAFDMeta {
|
|||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
#[repr(transparent)]
|
||||
pub struct VideoBarMeta(gst_video_sys::GstVideoBarMeta);
|
||||
pub struct VideoBarMeta(ffi::GstVideoBarMeta);
|
||||
|
||||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
|
@ -644,7 +647,7 @@ impl VideoBarMeta {
|
|||
skip_assert_initialized!();
|
||||
|
||||
unsafe {
|
||||
let meta = gst_video_sys::gst_buffer_add_video_bar_meta(
|
||||
let meta = ffi::gst_buffer_add_video_bar_meta(
|
||||
buffer.as_mut_ptr(),
|
||||
field,
|
||||
is_letterbox.to_glib(),
|
||||
|
@ -676,10 +679,10 @@ impl VideoBarMeta {
|
|||
#[cfg(any(feature = "v1_18", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_18")))]
|
||||
unsafe impl MetaAPI for VideoBarMeta {
|
||||
type GstType = gst_video_sys::GstVideoBarMeta;
|
||||
type GstType = ffi::GstVideoBarMeta;
|
||||
|
||||
fn get_meta_api() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_bar_meta_api_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_bar_meta_api_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -708,15 +711,15 @@ mod tests {
|
|||
{
|
||||
let meta = VideoMeta::add(
|
||||
buffer.get_mut().unwrap(),
|
||||
::VideoFrameFlags::empty(),
|
||||
::VideoFormat::Argb,
|
||||
crate::VideoFrameFlags::empty(),
|
||||
crate::VideoFormat::Argb,
|
||||
320,
|
||||
240,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(meta.get_id(), 0);
|
||||
assert_eq!(meta.get_flags(), ::VideoFrameFlags::empty());
|
||||
assert_eq!(meta.get_format(), ::VideoFormat::Argb);
|
||||
assert_eq!(meta.get_flags(), crate::VideoFrameFlags::empty());
|
||||
assert_eq!(meta.get_format(), crate::VideoFormat::Argb);
|
||||
assert_eq!(meta.get_width(), 320);
|
||||
assert_eq!(meta.get_height(), 240);
|
||||
assert_eq!(meta.get_n_planes(), 1);
|
||||
|
@ -727,8 +730,8 @@ mod tests {
|
|||
{
|
||||
let meta = buffer.get_meta::<VideoMeta>().unwrap();
|
||||
assert_eq!(meta.get_id(), 0);
|
||||
assert_eq!(meta.get_flags(), ::VideoFrameFlags::empty());
|
||||
assert_eq!(meta.get_format(), ::VideoFormat::Argb);
|
||||
assert_eq!(meta.get_flags(), crate::VideoFrameFlags::empty());
|
||||
assert_eq!(meta.get_format(), crate::VideoFormat::Argb);
|
||||
assert_eq!(meta.get_width(), 320);
|
||||
assert_eq!(meta.get_height(), 240);
|
||||
assert_eq!(meta.get_n_planes(), 1);
|
||||
|
@ -745,8 +748,8 @@ mod tests {
|
|||
{
|
||||
let meta = VideoMeta::add_full(
|
||||
buffer.get_mut().unwrap(),
|
||||
::VideoFrameFlags::empty(),
|
||||
::VideoFormat::Argb,
|
||||
crate::VideoFrameFlags::empty(),
|
||||
crate::VideoFormat::Argb,
|
||||
320,
|
||||
240,
|
||||
&[0],
|
||||
|
@ -754,8 +757,8 @@ mod tests {
|
|||
)
|
||||
.unwrap();
|
||||
assert_eq!(meta.get_id(), 0);
|
||||
assert_eq!(meta.get_flags(), ::VideoFrameFlags::empty());
|
||||
assert_eq!(meta.get_format(), ::VideoFormat::Argb);
|
||||
assert_eq!(meta.get_flags(), crate::VideoFrameFlags::empty());
|
||||
assert_eq!(meta.get_format(), crate::VideoFormat::Argb);
|
||||
assert_eq!(meta.get_width(), 320);
|
||||
assert_eq!(meta.get_height(), 240);
|
||||
assert_eq!(meta.get_n_planes(), 1);
|
||||
|
@ -766,8 +769,8 @@ mod tests {
|
|||
{
|
||||
let meta = buffer.get_meta::<VideoMeta>().unwrap();
|
||||
assert_eq!(meta.get_id(), 0);
|
||||
assert_eq!(meta.get_flags(), ::VideoFrameFlags::empty());
|
||||
assert_eq!(meta.get_format(), ::VideoFormat::Argb);
|
||||
assert_eq!(meta.get_flags(), crate::VideoFrameFlags::empty());
|
||||
assert_eq!(meta.get_format(), crate::VideoFormat::Argb);
|
||||
assert_eq!(meta.get_width(), 320);
|
||||
assert_eq!(meta.get_height(), 240);
|
||||
assert_eq!(meta.get_n_planes(), 1);
|
||||
|
|
|
@ -6,11 +6,9 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use crate::VideoOverlay;
|
||||
use glib::translate::*;
|
||||
use gst;
|
||||
use gst_video_sys;
|
||||
use libc::uintptr_t;
|
||||
use VideoOverlay;
|
||||
|
||||
use glib::IsA;
|
||||
|
||||
|
@ -21,19 +19,19 @@ pub trait VideoOverlayExtManual: 'static {
|
|||
|
||||
impl<O: IsA<VideoOverlay>> VideoOverlayExtManual for O {
|
||||
unsafe fn set_window_handle(&self, handle: uintptr_t) {
|
||||
gst_video_sys::gst_video_overlay_set_window_handle(self.as_ref().to_glib_none().0, handle)
|
||||
ffi::gst_video_overlay_set_window_handle(self.as_ref().to_glib_none().0, handle)
|
||||
}
|
||||
|
||||
unsafe fn got_window_handle(&self, handle: uintptr_t) {
|
||||
gst_video_sys::gst_video_overlay_got_window_handle(self.as_ref().to_glib_none().0, handle)
|
||||
ffi::gst_video_overlay_got_window_handle(self.as_ref().to_glib_none().0, handle)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_video_overlay_prepare_window_handle_message(msg: &gst::MessageRef) -> bool {
|
||||
skip_assert_initialized!();
|
||||
unsafe {
|
||||
from_glib(
|
||||
gst_video_sys::gst_is_video_overlay_prepare_window_handle_message(msg.as_mut_ptr()),
|
||||
)
|
||||
from_glib(ffi::gst_is_video_overlay_prepare_window_handle_message(
|
||||
msg.as_mut_ptr(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,17 +9,13 @@
|
|||
use std::fmt;
|
||||
use std::mem;
|
||||
|
||||
use gst;
|
||||
use gst_video_sys;
|
||||
|
||||
use glib;
|
||||
use glib::translate::{from_glib, from_glib_full, from_glib_none, ToGlib, ToGlibPtr};
|
||||
|
||||
gst_define_mini_object_wrapper!(
|
||||
gst::gst_define_mini_object_wrapper!(
|
||||
VideoOverlayRectangle,
|
||||
VideoOverlayRectangleRef,
|
||||
gst_video_sys::GstVideoOverlayRectangle,
|
||||
|| gst_video_sys::gst_video_overlay_rectangle_get_type()
|
||||
ffi::GstVideoOverlayRectangle,
|
||||
|| ffi::gst_video_overlay_rectangle_get_type()
|
||||
);
|
||||
|
||||
impl fmt::Debug for VideoOverlayRectangle {
|
||||
|
@ -45,12 +41,12 @@ impl VideoOverlayRectangle {
|
|||
render_y: i32,
|
||||
render_width: u32,
|
||||
render_height: u32,
|
||||
flags: ::VideoOverlayFormatFlags,
|
||||
flags: crate::VideoOverlayFormatFlags,
|
||||
) -> Self {
|
||||
assert_initialized_main_thread!();
|
||||
assert!(buffer.get_meta::<::VideoMeta>().is_some());
|
||||
assert!(buffer.get_meta::<crate::VideoMeta>().is_some());
|
||||
unsafe {
|
||||
from_glib_full(gst_video_sys::gst_video_overlay_rectangle_new_raw(
|
||||
from_glib_full(ffi::gst_video_overlay_rectangle_new_raw(
|
||||
buffer.to_glib_none().0,
|
||||
render_x,
|
||||
render_y,
|
||||
|
@ -63,26 +59,24 @@ impl VideoOverlayRectangle {
|
|||
}
|
||||
|
||||
impl VideoOverlayRectangleRef {
|
||||
pub fn get_flags(&self) -> ::VideoOverlayFormatFlags {
|
||||
pub fn get_flags(&self) -> crate::VideoOverlayFormatFlags {
|
||||
unsafe {
|
||||
from_glib(gst_video_sys::gst_video_overlay_rectangle_get_flags(
|
||||
from_glib(ffi::gst_video_overlay_rectangle_get_flags(
|
||||
self.as_mut_ptr(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_global_alpha(&self) -> f32 {
|
||||
unsafe { gst_video_sys::gst_video_overlay_rectangle_get_global_alpha(self.as_mut_ptr()) }
|
||||
unsafe { ffi::gst_video_overlay_rectangle_get_global_alpha(self.as_mut_ptr()) }
|
||||
}
|
||||
|
||||
pub fn set_global_alpha(&mut self, alpha: f32) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_overlay_rectangle_set_global_alpha(self.as_mut_ptr(), alpha)
|
||||
}
|
||||
unsafe { ffi::gst_video_overlay_rectangle_set_global_alpha(self.as_mut_ptr(), alpha) }
|
||||
}
|
||||
|
||||
pub fn get_seqnum(&self) -> u32 {
|
||||
unsafe { gst_video_sys::gst_video_overlay_rectangle_get_seqnum(self.as_mut_ptr()) }
|
||||
unsafe { ffi::gst_video_overlay_rectangle_get_seqnum(self.as_mut_ptr()) }
|
||||
}
|
||||
|
||||
pub fn get_render_rectangle(&self) -> (i32, i32, u32, u32) {
|
||||
|
@ -92,7 +86,7 @@ impl VideoOverlayRectangleRef {
|
|||
let mut render_width = mem::MaybeUninit::uninit();
|
||||
let mut render_height = mem::MaybeUninit::uninit();
|
||||
|
||||
gst_video_sys::gst_video_overlay_rectangle_get_render_rectangle(
|
||||
ffi::gst_video_overlay_rectangle_get_render_rectangle(
|
||||
self.as_mut_ptr(),
|
||||
render_x.as_mut_ptr(),
|
||||
render_y.as_mut_ptr(),
|
||||
|
@ -117,7 +111,7 @@ impl VideoOverlayRectangleRef {
|
|||
render_height: u32,
|
||||
) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_overlay_rectangle_set_render_rectangle(
|
||||
ffi::gst_video_overlay_rectangle_set_render_rectangle(
|
||||
self.as_mut_ptr(),
|
||||
render_x,
|
||||
render_y,
|
||||
|
@ -127,60 +121,54 @@ impl VideoOverlayRectangleRef {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_pixels_unscaled_raw(&self, flags: ::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
pub fn get_pixels_unscaled_raw(&self, flags: crate::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
unsafe {
|
||||
from_glib_none(
|
||||
gst_video_sys::gst_video_overlay_rectangle_get_pixels_unscaled_raw(
|
||||
self.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_pixels_unscaled_ayuv(&self, flags: ::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
unsafe {
|
||||
from_glib_none(
|
||||
gst_video_sys::gst_video_overlay_rectangle_get_pixels_unscaled_ayuv(
|
||||
self.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_pixels_unscaled_argb(&self, flags: ::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
unsafe {
|
||||
from_glib_none(
|
||||
gst_video_sys::gst_video_overlay_rectangle_get_pixels_unscaled_argb(
|
||||
self.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_pixels_raw(&self, flags: ::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
unsafe {
|
||||
from_glib_none(gst_video_sys::gst_video_overlay_rectangle_get_pixels_raw(
|
||||
from_glib_none(ffi::gst_video_overlay_rectangle_get_pixels_unscaled_raw(
|
||||
self.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_pixels_ayuv(&self, flags: ::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
pub fn get_pixels_unscaled_ayuv(&self, flags: crate::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
unsafe {
|
||||
from_glib_none(gst_video_sys::gst_video_overlay_rectangle_get_pixels_ayuv(
|
||||
from_glib_none(ffi::gst_video_overlay_rectangle_get_pixels_unscaled_ayuv(
|
||||
self.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_pixels_argb(&self, flags: ::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
pub fn get_pixels_unscaled_argb(&self, flags: crate::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
unsafe {
|
||||
from_glib_none(gst_video_sys::gst_video_overlay_rectangle_get_pixels_argb(
|
||||
from_glib_none(ffi::gst_video_overlay_rectangle_get_pixels_unscaled_argb(
|
||||
self.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_pixels_raw(&self, flags: crate::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
unsafe {
|
||||
from_glib_none(ffi::gst_video_overlay_rectangle_get_pixels_raw(
|
||||
self.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_pixels_ayuv(&self, flags: crate::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
unsafe {
|
||||
from_glib_none(ffi::gst_video_overlay_rectangle_get_pixels_ayuv(
|
||||
self.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_pixels_argb(&self, flags: crate::VideoOverlayFormatFlags) -> gst::Buffer {
|
||||
unsafe {
|
||||
from_glib_none(ffi::gst_video_overlay_rectangle_get_pixels_argb(
|
||||
self.as_mut_ptr(),
|
||||
flags.to_glib(),
|
||||
))
|
||||
|
@ -188,11 +176,11 @@ impl VideoOverlayRectangleRef {
|
|||
}
|
||||
}
|
||||
|
||||
gst_define_mini_object_wrapper!(
|
||||
gst::gst_define_mini_object_wrapper!(
|
||||
VideoOverlayComposition,
|
||||
VideoOverlayCompositionRef,
|
||||
gst_video_sys::GstVideoOverlayComposition,
|
||||
|| gst_video_sys::gst_video_overlay_composition_get_type()
|
||||
ffi::GstVideoOverlayComposition,
|
||||
|| ffi::gst_video_overlay_composition_get_type()
|
||||
);
|
||||
|
||||
impl fmt::Debug for VideoOverlayComposition {
|
||||
|
@ -216,16 +204,19 @@ impl VideoOverlayComposition {
|
|||
let mut iter = rects.into_iter();
|
||||
|
||||
let first = match iter.next() {
|
||||
None => return Err(glib_bool_error!("Failed to create VideoOverlayComposition")),
|
||||
None => {
|
||||
return Err(glib::glib_bool_error!(
|
||||
"Failed to create VideoOverlayComposition"
|
||||
))
|
||||
}
|
||||
Some(first) => first,
|
||||
};
|
||||
|
||||
let composition = Self::from_glib_full(
|
||||
gst_video_sys::gst_video_overlay_composition_new(first.as_mut_ptr()),
|
||||
);
|
||||
let composition =
|
||||
Self::from_glib_full(ffi::gst_video_overlay_composition_new(first.as_mut_ptr()));
|
||||
|
||||
for rect in iter {
|
||||
gst_video_sys::gst_video_overlay_composition_add_rectangle(
|
||||
ffi::gst_video_overlay_composition_add_rectangle(
|
||||
composition.as_mut_ptr(),
|
||||
rect.as_mut_ptr(),
|
||||
);
|
||||
|
@ -238,39 +229,36 @@ impl VideoOverlayComposition {
|
|||
|
||||
impl VideoOverlayCompositionRef {
|
||||
pub fn n_rectangles(&self) -> u32 {
|
||||
unsafe { gst_video_sys::gst_video_overlay_composition_n_rectangles(self.as_mut_ptr()) }
|
||||
unsafe { ffi::gst_video_overlay_composition_n_rectangles(self.as_mut_ptr()) }
|
||||
}
|
||||
|
||||
pub fn get_rectangle(&self, idx: u32) -> Result<VideoOverlayRectangle, glib::error::BoolError> {
|
||||
if idx >= self.n_rectangles() {
|
||||
return Err(glib_bool_error!("Invalid index"));
|
||||
return Err(glib::glib_bool_error!("Invalid index"));
|
||||
}
|
||||
|
||||
unsafe {
|
||||
match from_glib_none(gst_video_sys::gst_video_overlay_composition_get_rectangle(
|
||||
match from_glib_none(ffi::gst_video_overlay_composition_get_rectangle(
|
||||
self.as_mut_ptr(),
|
||||
idx,
|
||||
)) {
|
||||
Some(r) => Ok(r),
|
||||
None => Err(glib_bool_error!("Failed to get rectangle")),
|
||||
None => Err(glib::glib_bool_error!("Failed to get rectangle")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_seqnum(&self) -> u32 {
|
||||
unsafe { gst_video_sys::gst_video_overlay_composition_get_seqnum(self.as_mut_ptr()) }
|
||||
unsafe { ffi::gst_video_overlay_composition_get_seqnum(self.as_mut_ptr()) }
|
||||
}
|
||||
|
||||
pub fn blend(
|
||||
&self,
|
||||
frame: &mut ::VideoFrameRef<&mut gst::BufferRef>,
|
||||
frame: &mut crate::VideoFrameRef<&mut gst::BufferRef>,
|
||||
) -> Result<(), glib::BoolError> {
|
||||
unsafe {
|
||||
glib_result_from_gboolean!(
|
||||
gst_video_sys::gst_video_overlay_composition_blend(
|
||||
self.as_mut_ptr(),
|
||||
frame.as_mut_ptr()
|
||||
),
|
||||
glib::glib_result_from_gboolean!(
|
||||
ffi::gst_video_overlay_composition_blend(self.as_mut_ptr(), frame.as_mut_ptr()),
|
||||
"Failed to blend overlay composition",
|
||||
)
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
// except according to those terms.
|
||||
|
||||
use glib::translate::ToGlib;
|
||||
use gst_video_sys;
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct VideoRectangle {
|
||||
|
@ -30,26 +29,26 @@ pub fn center_video_rectangle(
|
|||
scale: bool,
|
||||
) -> VideoRectangle {
|
||||
skip_assert_initialized!();
|
||||
let mut result = gst_video_sys::GstVideoRectangle {
|
||||
let mut result = ffi::GstVideoRectangle {
|
||||
x: 0,
|
||||
y: 0,
|
||||
w: 0,
|
||||
h: 0,
|
||||
};
|
||||
let src_rect = gst_video_sys::GstVideoRectangle {
|
||||
let src_rect = ffi::GstVideoRectangle {
|
||||
x: src.x,
|
||||
y: src.y,
|
||||
w: src.w,
|
||||
h: src.h,
|
||||
};
|
||||
let dst_rect = gst_video_sys::GstVideoRectangle {
|
||||
let dst_rect = ffi::GstVideoRectangle {
|
||||
x: dst.x,
|
||||
y: dst.y,
|
||||
w: dst.w,
|
||||
h: dst.h,
|
||||
};
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_sink_center_rect(src_rect, dst_rect, &mut result, scale.to_glib());
|
||||
ffi::gst_video_sink_center_rect(src_rect, dst_rect, &mut result, scale.to_glib());
|
||||
}
|
||||
VideoRectangle::new(result.x, result.y, result.w, result.h)
|
||||
}
|
||||
|
|
|
@ -6,15 +6,10 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use glib;
|
||||
use glib::prelude::*;
|
||||
use glib::translate::*;
|
||||
use glib::value;
|
||||
use glib_sys;
|
||||
use gobject_sys;
|
||||
use gst;
|
||||
use gst::prelude::*;
|
||||
use gst_video_sys;
|
||||
use std::cmp;
|
||||
use std::convert::{TryFrom, TryInto};
|
||||
use std::fmt;
|
||||
|
@ -24,20 +19,20 @@ use std::ptr;
|
|||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_12")))]
|
||||
use std::str;
|
||||
|
||||
use VideoTimeCodeFlags;
|
||||
use crate::VideoTimeCodeFlags;
|
||||
#[cfg(any(feature = "v1_12", feature = "dox"))]
|
||||
#[cfg_attr(feature = "dox", doc(cfg(feature = "v1_12")))]
|
||||
use VideoTimeCodeInterval;
|
||||
use crate::VideoTimeCodeInterval;
|
||||
|
||||
pub struct VideoTimeCode(gst_video_sys::GstVideoTimeCode);
|
||||
pub struct ValidVideoTimeCode(gst_video_sys::GstVideoTimeCode);
|
||||
pub struct VideoTimeCode(ffi::GstVideoTimeCode);
|
||||
pub struct ValidVideoTimeCode(ffi::GstVideoTimeCode);
|
||||
|
||||
impl VideoTimeCode {
|
||||
pub fn new_empty() -> VideoTimeCode {
|
||||
assert_initialized_main_thread!();
|
||||
unsafe {
|
||||
let mut v = mem::MaybeUninit::zeroed();
|
||||
gst_video_sys::gst_video_time_code_clear(v.as_mut_ptr());
|
||||
ffi::gst_video_time_code_clear(v.as_mut_ptr());
|
||||
VideoTimeCode(v.assume_init())
|
||||
}
|
||||
}
|
||||
|
@ -56,7 +51,7 @@ impl VideoTimeCode {
|
|||
assert_initialized_main_thread!();
|
||||
unsafe {
|
||||
let mut v = mem::MaybeUninit::zeroed();
|
||||
gst_video_sys::gst_video_time_code_init(
|
||||
ffi::gst_video_time_code_init(
|
||||
v.as_mut_ptr(),
|
||||
*fps.numer() as u32,
|
||||
*fps.denom() as u32,
|
||||
|
@ -85,7 +80,7 @@ impl VideoTimeCode {
|
|||
assert!(*fps.denom() > 0);
|
||||
unsafe {
|
||||
let mut v = mem::MaybeUninit::zeroed();
|
||||
let res = gst_video_sys::gst_video_time_code_init_from_date_time_full(
|
||||
let res = ffi::gst_video_time_code_init_from_date_time_full(
|
||||
v.as_mut_ptr(),
|
||||
*fps.numer() as u32,
|
||||
*fps.denom() as u32,
|
||||
|
@ -94,8 +89,8 @@ impl VideoTimeCode {
|
|||
field_count,
|
||||
);
|
||||
|
||||
if res == glib_sys::GFALSE {
|
||||
Err(glib_bool_error!("Failed to init video time code"))
|
||||
if res == glib::ffi::GFALSE {
|
||||
Err(glib::glib_bool_error!("Failed to init video time code"))
|
||||
} else {
|
||||
Ok(VideoTimeCode(v.assume_init()))
|
||||
}
|
||||
|
@ -103,11 +98,7 @@ impl VideoTimeCode {
|
|||
}
|
||||
|
||||
pub fn is_valid(&self) -> bool {
|
||||
unsafe {
|
||||
from_glib(gst_video_sys::gst_video_time_code_is_valid(
|
||||
self.to_glib_none().0,
|
||||
))
|
||||
}
|
||||
unsafe { from_glib(ffi::gst_video_time_code_is_valid(self.to_glib_none().0)) }
|
||||
}
|
||||
|
||||
pub fn set_fps(&mut self, fps: gst::Fraction) {
|
||||
|
@ -181,7 +172,9 @@ impl ValidVideoTimeCode {
|
|||
);
|
||||
match tc.try_into() {
|
||||
Ok(v) => Ok(v),
|
||||
Err(_) => Err(glib_bool_error!("Failed to create new ValidVideoTimeCode")),
|
||||
Err(_) => Err(glib::glib_bool_error!(
|
||||
"Failed to create new ValidVideoTimeCode"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,7 +193,7 @@ impl ValidVideoTimeCode {
|
|||
pub fn add_frames(&mut self, frames: i64) {
|
||||
skip_assert_initialized!();
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_time_code_add_frames(self.to_glib_none_mut().0, frames);
|
||||
ffi::gst_video_time_code_add_frames(self.to_glib_none_mut().0, frames);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -211,43 +204,39 @@ impl ValidVideoTimeCode {
|
|||
tc_inter: &VideoTimeCodeInterval,
|
||||
) -> Result<ValidVideoTimeCode, glib::error::BoolError> {
|
||||
unsafe {
|
||||
match from_glib_full(gst_video_sys::gst_video_time_code_add_interval(
|
||||
match from_glib_full(ffi::gst_video_time_code_add_interval(
|
||||
self.to_glib_none().0,
|
||||
tc_inter.to_glib_none().0,
|
||||
)) {
|
||||
Some(i) => Ok(i),
|
||||
None => Err(glib_bool_error!("Failed to add interval")),
|
||||
None => Err(glib::glib_bool_error!("Failed to add interval")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn compare(&self, tc2: &ValidVideoTimeCode) -> i32 {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_time_code_compare(self.to_glib_none().0, tc2.to_glib_none().0)
|
||||
}
|
||||
unsafe { ffi::gst_video_time_code_compare(self.to_glib_none().0, tc2.to_glib_none().0) }
|
||||
}
|
||||
|
||||
pub fn frames_since_daily_jam(&self) -> u64 {
|
||||
unsafe { gst_video_sys::gst_video_time_code_frames_since_daily_jam(self.to_glib_none().0) }
|
||||
unsafe { ffi::gst_video_time_code_frames_since_daily_jam(self.to_glib_none().0) }
|
||||
}
|
||||
|
||||
pub fn increment_frame(&mut self) {
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_time_code_increment_frame(self.to_glib_none_mut().0);
|
||||
ffi::gst_video_time_code_increment_frame(self.to_glib_none_mut().0);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nsec_since_daily_jam(&self) -> u64 {
|
||||
unsafe { gst_video_sys::gst_video_time_code_nsec_since_daily_jam(self.to_glib_none().0) }
|
||||
unsafe { ffi::gst_video_time_code_nsec_since_daily_jam(self.to_glib_none().0) }
|
||||
}
|
||||
|
||||
pub fn to_date_time(&self) -> Result<glib::DateTime, glib::error::BoolError> {
|
||||
unsafe {
|
||||
match from_glib_full(gst_video_sys::gst_video_time_code_to_date_time(
|
||||
self.to_glib_none().0,
|
||||
)) {
|
||||
match from_glib_full(ffi::gst_video_time_code_to_date_time(self.to_glib_none().0)) {
|
||||
Some(d) => Ok(d),
|
||||
None => Err(glib_bool_error!(
|
||||
None => Err(glib::glib_bool_error!(
|
||||
"Failed to convert VideoTimeCode to date time"
|
||||
)),
|
||||
}
|
||||
|
@ -293,7 +282,7 @@ macro_rules! generic_impl {
|
|||
pub fn set_latest_daily_jam(&mut self, latest_daily_jam: Option<&glib::DateTime>) {
|
||||
unsafe {
|
||||
if !self.0.config.latest_daily_jam.is_null() {
|
||||
glib_sys::g_date_time_unref(self.0.config.latest_daily_jam);
|
||||
glib::ffi::g_date_time_unref(self.0.config.latest_daily_jam);
|
||||
}
|
||||
|
||||
self.0.config.latest_daily_jam = latest_daily_jam.to_glib_full()
|
||||
|
@ -306,7 +295,7 @@ macro_rules! generic_impl {
|
|||
unsafe {
|
||||
let v = self.0;
|
||||
if !v.config.latest_daily_jam.is_null() {
|
||||
glib_sys::g_date_time_ref(v.config.latest_daily_jam);
|
||||
glib::ffi::g_date_time_ref(v.config.latest_daily_jam);
|
||||
}
|
||||
|
||||
$name(v)
|
||||
|
@ -318,7 +307,7 @@ macro_rules! generic_impl {
|
|||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
if !self.0.config.latest_daily_jam.is_null() {
|
||||
glib_sys::g_date_time_unref(self.0.config.latest_daily_jam);
|
||||
glib::ffi::g_date_time_unref(self.0.config.latest_daily_jam);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -343,7 +332,7 @@ macro_rules! generic_impl {
|
|||
#[inline]
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let s = unsafe {
|
||||
glib::GString::from_glib_full(gst_video_sys::gst_video_time_code_to_string(
|
||||
glib::GString::from_glib_full(ffi::gst_video_time_code_to_string(
|
||||
self.to_glib_none().0,
|
||||
))
|
||||
};
|
||||
|
@ -356,45 +345,43 @@ macro_rules! generic_impl {
|
|||
|
||||
#[doc(hidden)]
|
||||
impl GlibPtrDefault for $name {
|
||||
type GlibType = *mut gst_video_sys::GstVideoTimeCode;
|
||||
type GlibType = *mut ffi::GstVideoTimeCode;
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a> ToGlibPtr<'a, *const gst_video_sys::GstVideoTimeCode> for $name {
|
||||
impl<'a> ToGlibPtr<'a, *const ffi::GstVideoTimeCode> for $name {
|
||||
type Storage = &'a Self;
|
||||
|
||||
#[inline]
|
||||
fn to_glib_none(&'a self) -> Stash<'a, *const gst_video_sys::GstVideoTimeCode, Self> {
|
||||
fn to_glib_none(&'a self) -> Stash<'a, *const ffi::GstVideoTimeCode, Self> {
|
||||
Stash(&self.0 as *const _, self)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn to_glib_full(&self) -> *const gst_video_sys::GstVideoTimeCode {
|
||||
unsafe { gst_video_sys::gst_video_time_code_copy(&self.0 as *const _) }
|
||||
fn to_glib_full(&self) -> *const ffi::GstVideoTimeCode {
|
||||
unsafe { ffi::gst_video_time_code_copy(&self.0 as *const _) }
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a> ToGlibPtrMut<'a, *mut gst_video_sys::GstVideoTimeCode> for $name {
|
||||
impl<'a> ToGlibPtrMut<'a, *mut ffi::GstVideoTimeCode> for $name {
|
||||
type Storage = &'a mut Self;
|
||||
|
||||
#[inline]
|
||||
fn to_glib_none_mut(
|
||||
&'a mut self,
|
||||
) -> StashMut<'a, *mut gst_video_sys::GstVideoTimeCode, Self> {
|
||||
fn to_glib_none_mut(&'a mut self) -> StashMut<'a, *mut ffi::GstVideoTimeCode, Self> {
|
||||
let ptr = &mut self.0 as *mut _;
|
||||
StashMut(ptr, self)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl FromGlibPtrNone<*mut gst_video_sys::GstVideoTimeCode> for $name {
|
||||
impl FromGlibPtrNone<*mut ffi::GstVideoTimeCode> for $name {
|
||||
#[inline]
|
||||
unsafe fn from_glib_none(ptr: *mut gst_video_sys::GstVideoTimeCode) -> Self {
|
||||
unsafe fn from_glib_none(ptr: *mut ffi::GstVideoTimeCode) -> Self {
|
||||
assert!(!ptr.is_null());
|
||||
let v = ptr::read(ptr);
|
||||
if !v.config.latest_daily_jam.is_null() {
|
||||
glib_sys::g_date_time_ref(v.config.latest_daily_jam);
|
||||
glib::ffi::g_date_time_ref(v.config.latest_daily_jam);
|
||||
}
|
||||
|
||||
$name(v)
|
||||
|
@ -402,13 +389,13 @@ macro_rules! generic_impl {
|
|||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl FromGlibPtrNone<*const gst_video_sys::GstVideoTimeCode> for $name {
|
||||
impl FromGlibPtrNone<*const ffi::GstVideoTimeCode> for $name {
|
||||
#[inline]
|
||||
unsafe fn from_glib_none(ptr: *const gst_video_sys::GstVideoTimeCode) -> Self {
|
||||
unsafe fn from_glib_none(ptr: *const ffi::GstVideoTimeCode) -> Self {
|
||||
assert!(!ptr.is_null());
|
||||
let v = ptr::read(ptr);
|
||||
if !v.config.latest_daily_jam.is_null() {
|
||||
glib_sys::g_date_time_ref(v.config.latest_daily_jam);
|
||||
glib::ffi::g_date_time_ref(v.config.latest_daily_jam);
|
||||
}
|
||||
|
||||
$name(v)
|
||||
|
@ -416,26 +403,24 @@ macro_rules! generic_impl {
|
|||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl FromGlibPtrFull<*mut gst_video_sys::GstVideoTimeCode> for $name {
|
||||
impl FromGlibPtrFull<*mut ffi::GstVideoTimeCode> for $name {
|
||||
#[inline]
|
||||
unsafe fn from_glib_full(ptr: *mut gst_video_sys::GstVideoTimeCode) -> Self {
|
||||
unsafe fn from_glib_full(ptr: *mut ffi::GstVideoTimeCode) -> Self {
|
||||
assert!(!ptr.is_null());
|
||||
let v = ptr::read(ptr);
|
||||
if !v.config.latest_daily_jam.is_null() {
|
||||
glib_sys::g_date_time_ref(v.config.latest_daily_jam);
|
||||
glib::ffi::g_date_time_ref(v.config.latest_daily_jam);
|
||||
}
|
||||
gst_video_sys::gst_video_time_code_free(ptr);
|
||||
ffi::gst_video_time_code_free(ptr);
|
||||
|
||||
$name(v)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl FromGlibPtrBorrow<*mut gst_video_sys::GstVideoTimeCode> for $name {
|
||||
impl FromGlibPtrBorrow<*mut ffi::GstVideoTimeCode> for $name {
|
||||
#[inline]
|
||||
unsafe fn from_glib_borrow(
|
||||
ptr: *mut gst_video_sys::GstVideoTimeCode,
|
||||
) -> Borrowed<Self> {
|
||||
unsafe fn from_glib_borrow(ptr: *mut ffi::GstVideoTimeCode) -> Borrowed<Self> {
|
||||
assert!(!ptr.is_null());
|
||||
let v = ptr::read(ptr);
|
||||
|
||||
|
@ -445,26 +430,26 @@ macro_rules! generic_impl {
|
|||
|
||||
impl StaticType for $name {
|
||||
fn static_type() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_time_code_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_time_code_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a> value::FromValueOptional<'a> for $name {
|
||||
unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> {
|
||||
Option::<$name>::from_glib_none(gobject_sys::g_value_get_boxed(
|
||||
Option::<$name>::from_glib_none(glib::gobject_ffi::g_value_get_boxed(
|
||||
value.to_glib_none().0,
|
||||
) as *mut gst_video_sys::GstVideoTimeCode)
|
||||
) as *mut ffi::GstVideoTimeCode)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl value::SetValue for $name {
|
||||
unsafe fn set_value(value: &mut glib::Value, this: &Self) {
|
||||
gobject_sys::g_value_set_boxed(
|
||||
glib::gobject_ffi::g_value_set_boxed(
|
||||
value.to_glib_none_mut().0,
|
||||
ToGlibPtr::<*const gst_video_sys::GstVideoTimeCode>::to_glib_none(this).0
|
||||
as glib_sys::gpointer,
|
||||
ToGlibPtr::<*const ffi::GstVideoTimeCode>::to_glib_none(this).0
|
||||
as glib::ffi::gpointer,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -472,10 +457,10 @@ macro_rules! generic_impl {
|
|||
#[doc(hidden)]
|
||||
impl value::SetValueOptional for $name {
|
||||
unsafe fn set_value_optional(value: &mut glib::Value, this: Option<&Self>) {
|
||||
gobject_sys::g_value_set_boxed(
|
||||
glib::gobject_ffi::g_value_set_boxed(
|
||||
value.to_glib_none_mut().0,
|
||||
ToGlibPtr::<*const gst_video_sys::GstVideoTimeCode>::to_glib_none(&this).0
|
||||
as glib_sys::gpointer,
|
||||
ToGlibPtr::<*const ffi::GstVideoTimeCode>::to_glib_none(&this).0
|
||||
as glib::ffi::gpointer,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -493,10 +478,10 @@ impl str::FromStr for VideoTimeCode {
|
|||
fn from_str(s: &str) -> Result<Self, glib::error::BoolError> {
|
||||
assert_initialized_main_thread!();
|
||||
unsafe {
|
||||
Option::<VideoTimeCode>::from_glib_full(
|
||||
gst_video_sys::gst_video_time_code_new_from_string(s.to_glib_none().0),
|
||||
)
|
||||
.ok_or_else(|| glib_bool_error!("Failed to create VideoTimeCode from string"))
|
||||
Option::<VideoTimeCode>::from_glib_full(ffi::gst_video_time_code_new_from_string(
|
||||
s.to_glib_none().0,
|
||||
))
|
||||
.ok_or_else(|| glib::glib_bool_error!("Failed to create VideoTimeCode from string"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -532,7 +517,7 @@ impl From<ValidVideoTimeCode> for VideoTimeCode {
|
|||
}
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct VideoTimeCodeMeta(gst_video_sys::GstVideoTimeCodeMeta);
|
||||
pub struct VideoTimeCodeMeta(ffi::GstVideoTimeCodeMeta);
|
||||
|
||||
unsafe impl Send for VideoTimeCodeMeta {}
|
||||
unsafe impl Sync for VideoTimeCodeMeta {}
|
||||
|
@ -544,7 +529,7 @@ impl VideoTimeCodeMeta {
|
|||
) -> gst::MetaRefMut<'a, Self, gst::meta::Standalone> {
|
||||
skip_assert_initialized!();
|
||||
unsafe {
|
||||
let meta = gst_video_sys::gst_buffer_add_video_time_code_meta(
|
||||
let meta = ffi::gst_buffer_add_video_time_code_meta(
|
||||
buffer.as_mut_ptr(),
|
||||
tc.to_glib_none().0 as *mut _,
|
||||
);
|
||||
|
@ -560,20 +545,20 @@ impl VideoTimeCodeMeta {
|
|||
pub fn set_tc(&mut self, tc: ValidVideoTimeCode) {
|
||||
#![allow(clippy::cast_ptr_alignment)]
|
||||
unsafe {
|
||||
gst_video_sys::gst_video_time_code_clear(&mut self.0.tc);
|
||||
ffi::gst_video_time_code_clear(&mut self.0.tc);
|
||||
self.0.tc = tc.0;
|
||||
if !self.0.tc.config.latest_daily_jam.is_null() {
|
||||
glib_sys::g_date_time_ref(self.0.tc.config.latest_daily_jam);
|
||||
glib::ffi::g_date_time_ref(self.0.tc.config.latest_daily_jam);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl MetaAPI for VideoTimeCodeMeta {
|
||||
type GstType = gst_video_sys::GstVideoTimeCodeMeta;
|
||||
type GstType = ffi::GstVideoTimeCodeMeta;
|
||||
|
||||
fn get_meta_api() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_time_code_meta_api_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_time_code_meta_api_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,13 +6,9 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use glib;
|
||||
use glib::prelude::*;
|
||||
use glib::translate::*;
|
||||
use glib::value;
|
||||
use glib_sys;
|
||||
use gobject_sys;
|
||||
use gst_video_sys;
|
||||
use std::cmp;
|
||||
use std::fmt;
|
||||
use std::mem;
|
||||
|
@ -20,20 +16,14 @@ use std::ptr;
|
|||
use std::str;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VideoTimeCodeInterval(gst_video_sys::GstVideoTimeCodeInterval);
|
||||
pub struct VideoTimeCodeInterval(ffi::GstVideoTimeCodeInterval);
|
||||
|
||||
impl VideoTimeCodeInterval {
|
||||
pub fn new(hours: u32, minutes: u32, seconds: u32, frames: u32) -> Self {
|
||||
assert_initialized_main_thread!();
|
||||
unsafe {
|
||||
let mut v = mem::MaybeUninit::zeroed();
|
||||
gst_video_sys::gst_video_time_code_interval_init(
|
||||
v.as_mut_ptr(),
|
||||
hours,
|
||||
minutes,
|
||||
seconds,
|
||||
frames,
|
||||
);
|
||||
ffi::gst_video_time_code_interval_init(v.as_mut_ptr(), hours, minutes, seconds, frames);
|
||||
VideoTimeCodeInterval(v.assume_init())
|
||||
}
|
||||
}
|
||||
|
@ -134,82 +124,80 @@ impl str::FromStr for VideoTimeCodeInterval {
|
|||
assert_initialized_main_thread!();
|
||||
unsafe {
|
||||
Option::<VideoTimeCodeInterval>::from_glib_full(
|
||||
gst_video_sys::gst_video_time_code_interval_new_from_string(s.to_glib_none().0),
|
||||
ffi::gst_video_time_code_interval_new_from_string(s.to_glib_none().0),
|
||||
)
|
||||
.ok_or_else(|| glib_bool_error!("Failed to create VideoTimeCodeInterval from string"))
|
||||
.ok_or_else(|| {
|
||||
glib::glib_bool_error!("Failed to create VideoTimeCodeInterval from string")
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl GlibPtrDefault for VideoTimeCodeInterval {
|
||||
type GlibType = *mut gst_video_sys::GstVideoTimeCodeInterval;
|
||||
type GlibType = *mut ffi::GstVideoTimeCodeInterval;
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a> ToGlibPtr<'a, *const gst_video_sys::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
impl<'a> ToGlibPtr<'a, *const ffi::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
type Storage = &'a Self;
|
||||
|
||||
#[inline]
|
||||
fn to_glib_none(&'a self) -> Stash<'a, *const gst_video_sys::GstVideoTimeCodeInterval, Self> {
|
||||
fn to_glib_none(&'a self) -> Stash<'a, *const ffi::GstVideoTimeCodeInterval, Self> {
|
||||
Stash(&self.0 as *const _, self)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn to_glib_full(&self) -> *const gst_video_sys::GstVideoTimeCodeInterval {
|
||||
unsafe { gst_video_sys::gst_video_time_code_interval_copy(&self.0 as *const _) }
|
||||
fn to_glib_full(&self) -> *const ffi::GstVideoTimeCodeInterval {
|
||||
unsafe { ffi::gst_video_time_code_interval_copy(&self.0 as *const _) }
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a> ToGlibPtrMut<'a, *mut gst_video_sys::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
impl<'a> ToGlibPtrMut<'a, *mut ffi::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
type Storage = &'a mut Self;
|
||||
|
||||
#[inline]
|
||||
fn to_glib_none_mut(
|
||||
&'a mut self,
|
||||
) -> StashMut<'a, *mut gst_video_sys::GstVideoTimeCodeInterval, Self> {
|
||||
fn to_glib_none_mut(&'a mut self) -> StashMut<'a, *mut ffi::GstVideoTimeCodeInterval, Self> {
|
||||
let ptr = &mut self.0 as *mut _;
|
||||
StashMut(ptr, self)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl FromGlibPtrNone<*mut gst_video_sys::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
impl FromGlibPtrNone<*mut ffi::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
#[inline]
|
||||
unsafe fn from_glib_none(ptr: *mut gst_video_sys::GstVideoTimeCodeInterval) -> Self {
|
||||
unsafe fn from_glib_none(ptr: *mut ffi::GstVideoTimeCodeInterval) -> Self {
|
||||
assert!(!ptr.is_null());
|
||||
VideoTimeCodeInterval(ptr::read(ptr))
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl FromGlibPtrNone<*const gst_video_sys::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
impl FromGlibPtrNone<*const ffi::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
#[inline]
|
||||
unsafe fn from_glib_none(ptr: *const gst_video_sys::GstVideoTimeCodeInterval) -> Self {
|
||||
unsafe fn from_glib_none(ptr: *const ffi::GstVideoTimeCodeInterval) -> Self {
|
||||
assert!(!ptr.is_null());
|
||||
VideoTimeCodeInterval(ptr::read(ptr))
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl FromGlibPtrFull<*mut gst_video_sys::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
impl FromGlibPtrFull<*mut ffi::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
#[inline]
|
||||
unsafe fn from_glib_full(ptr: *mut gst_video_sys::GstVideoTimeCodeInterval) -> Self {
|
||||
unsafe fn from_glib_full(ptr: *mut ffi::GstVideoTimeCodeInterval) -> Self {
|
||||
assert!(!ptr.is_null());
|
||||
let res = VideoTimeCodeInterval(ptr::read(ptr));
|
||||
gst_video_sys::gst_video_time_code_interval_free(ptr);
|
||||
ffi::gst_video_time_code_interval_free(ptr);
|
||||
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl FromGlibPtrBorrow<*mut gst_video_sys::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
impl FromGlibPtrBorrow<*mut ffi::GstVideoTimeCodeInterval> for VideoTimeCodeInterval {
|
||||
#[inline]
|
||||
unsafe fn from_glib_borrow(
|
||||
ptr: *mut gst_video_sys::GstVideoTimeCodeInterval,
|
||||
) -> Borrowed<Self> {
|
||||
unsafe fn from_glib_borrow(ptr: *mut ffi::GstVideoTimeCodeInterval) -> Borrowed<Self> {
|
||||
assert!(!ptr.is_null());
|
||||
Borrowed::new(VideoTimeCodeInterval(ptr::read(ptr)))
|
||||
}
|
||||
|
@ -217,27 +205,27 @@ impl FromGlibPtrBorrow<*mut gst_video_sys::GstVideoTimeCodeInterval> for VideoTi
|
|||
|
||||
impl StaticType for VideoTimeCodeInterval {
|
||||
fn static_type() -> glib::Type {
|
||||
unsafe { from_glib(gst_video_sys::gst_video_time_code_interval_get_type()) }
|
||||
unsafe { from_glib(ffi::gst_video_time_code_interval_get_type()) }
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a> value::FromValueOptional<'a> for VideoTimeCodeInterval {
|
||||
unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> {
|
||||
Option::<VideoTimeCodeInterval>::from_glib_full(gobject_sys::g_value_dup_boxed(
|
||||
Option::<VideoTimeCodeInterval>::from_glib_full(glib::gobject_ffi::g_value_dup_boxed(
|
||||
value.to_glib_none().0,
|
||||
)
|
||||
as *mut gst_video_sys::GstVideoTimeCodeInterval)
|
||||
as *mut ffi::GstVideoTimeCodeInterval)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl value::SetValue for VideoTimeCodeInterval {
|
||||
unsafe fn set_value(value: &mut glib::Value, this: &Self) {
|
||||
gobject_sys::g_value_set_boxed(
|
||||
glib::gobject_ffi::g_value_set_boxed(
|
||||
value.to_glib_none_mut().0,
|
||||
ToGlibPtr::<*const gst_video_sys::GstVideoTimeCodeInterval>::to_glib_none(this).0
|
||||
as glib_sys::gpointer,
|
||||
ToGlibPtr::<*const ffi::GstVideoTimeCodeInterval>::to_glib_none(this).0
|
||||
as glib::ffi::gpointer,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -245,10 +233,10 @@ impl value::SetValue for VideoTimeCodeInterval {
|
|||
#[doc(hidden)]
|
||||
impl value::SetValueOptional for VideoTimeCodeInterval {
|
||||
unsafe fn set_value_optional(value: &mut glib::Value, this: Option<&Self>) {
|
||||
gobject_sys::g_value_set_boxed(
|
||||
glib::gobject_ffi::g_value_set_boxed(
|
||||
value.to_glib_none_mut().0,
|
||||
ToGlibPtr::<*const gst_video_sys::GstVideoTimeCodeInterval>::to_glib_none(&this).0
|
||||
as glib_sys::gpointer,
|
||||
ToGlibPtr::<*const ffi::GstVideoTimeCodeInterval>::to_glib_none(&this).0
|
||||
as glib::ffi::gpointer,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue