2020-12-15 10:53:31 +00:00
|
|
|
// Take a look at the license at the top of the repository in the LICENSE file.
|
2019-05-11 15:00:03 +00:00
|
|
|
|
2023-01-03 18:58:25 +00:00
|
|
|
use std::{
|
|
|
|
fmt,
|
|
|
|
marker::PhantomData,
|
|
|
|
mem,
|
|
|
|
ops::{Deref, DerefMut},
|
|
|
|
ptr, slice,
|
|
|
|
};
|
2019-05-11 15:00:03 +00:00
|
|
|
|
2022-05-06 19:41:15 +00:00
|
|
|
use glib::translate::{from_glib, from_glib_full, from_glib_none, IntoGlibPtr, ToGlibPtr};
|
2019-05-11 15:00:03 +00:00
|
|
|
|
2023-01-03 18:58:25 +00:00
|
|
|
use crate::{AllocationParams, Allocator, MemoryFlags};
|
2019-05-11 15:00:03 +00:00
|
|
|
|
2020-12-20 15:09:22 +00:00
|
|
|
mini_object_wrapper!(Memory, MemoryRef, ffi::GstMemory, || {
|
2020-11-21 13:46:48 +00:00
|
|
|
ffi::gst_memory_get_type()
|
2019-05-11 15:00:03 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
pub struct MemoryMap<'a, T> {
|
|
|
|
memory: &'a MemoryRef,
|
2020-11-21 13:46:48 +00:00
|
|
|
map_info: ffi::GstMapInfo,
|
2019-05-11 15:00:03 +00:00
|
|
|
phantom: PhantomData<T>,
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct MappedMemory<T> {
|
|
|
|
memory: Option<Memory>,
|
2020-11-21 13:46:48 +00:00
|
|
|
map_info: ffi::GstMapInfo,
|
2019-05-11 15:00:03 +00:00
|
|
|
phantom: PhantomData<T>,
|
|
|
|
}
|
|
|
|
|
2020-06-29 21:12:46 +00:00
|
|
|
impl fmt::Debug for Memory {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
MemoryRef::fmt(self, f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-11 15:00:03 +00:00
|
|
|
impl fmt::Debug for MemoryRef {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
f.debug_struct("Memory")
|
2022-08-11 14:02:02 +00:00
|
|
|
.field("ptr", &self.as_ptr())
|
2021-04-11 19:39:50 +00:00
|
|
|
.field("allocator", &self.allocator())
|
|
|
|
.field("parent", &self.parent())
|
|
|
|
.field("maxsize", &self.maxsize())
|
|
|
|
.field("align", &self.align())
|
|
|
|
.field("offset", &self.offset())
|
|
|
|
.field("size", &self.size())
|
|
|
|
.field("flags", &self.flags())
|
2019-05-11 15:00:03 +00:00
|
|
|
.finish()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub enum Readable {}
|
|
|
|
pub enum Writable {}
|
|
|
|
|
|
|
|
impl Memory {
|
2020-11-21 13:46:48 +00:00
|
|
|
unsafe extern "C" fn drop_box<T>(vec: glib::ffi::gpointer) {
|
2019-05-11 15:00:03 +00:00
|
|
|
let slice: Box<T> = Box::from_raw(vec as *mut T);
|
|
|
|
drop(slice);
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn with_size(size: usize) -> Self {
|
2020-03-22 14:18:47 +00:00
|
|
|
assert_initialized_main_thread!();
|
2019-05-11 15:00:03 +00:00
|
|
|
unsafe {
|
2020-11-21 13:46:48 +00:00
|
|
|
from_glib_full(ffi::gst_allocator_alloc(
|
2019-05-11 15:00:03 +00:00
|
|
|
ptr::null_mut(),
|
|
|
|
size,
|
|
|
|
ptr::null_mut(),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self {
|
2020-03-22 14:18:47 +00:00
|
|
|
assert_initialized_main_thread!();
|
2019-05-11 15:00:03 +00:00
|
|
|
unsafe {
|
2020-11-21 13:46:48 +00:00
|
|
|
from_glib_full(ffi::gst_allocator_alloc(
|
2019-05-11 15:00:03 +00:00
|
|
|
ptr::null_mut(),
|
|
|
|
size,
|
|
|
|
params.as_ptr() as *mut _,
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
|
|
|
|
assert_initialized_main_thread!();
|
|
|
|
unsafe {
|
|
|
|
let b = Box::new(slice);
|
|
|
|
let (size, data) = {
|
|
|
|
let slice = (*b).as_ref();
|
|
|
|
(slice.len(), slice.as_ptr())
|
|
|
|
};
|
|
|
|
let user_data = Box::into_raw(b);
|
2020-11-21 13:46:48 +00:00
|
|
|
from_glib_full(ffi::gst_memory_new_wrapped(
|
|
|
|
ffi::GST_MEMORY_FLAG_READONLY,
|
|
|
|
data as glib::ffi::gpointer,
|
2019-05-11 15:00:03 +00:00
|
|
|
size,
|
|
|
|
0,
|
|
|
|
size,
|
2020-11-21 13:46:48 +00:00
|
|
|
user_data as glib::ffi::gpointer,
|
2019-05-11 15:00:03 +00:00
|
|
|
Some(Self::drop_box::<T>),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self {
|
|
|
|
assert_initialized_main_thread!();
|
|
|
|
|
|
|
|
unsafe {
|
|
|
|
let mut b = Box::new(slice);
|
|
|
|
let (size, data) = {
|
|
|
|
let slice = (*b).as_mut();
|
|
|
|
(slice.len(), slice.as_mut_ptr())
|
|
|
|
};
|
|
|
|
let user_data = Box::into_raw(b);
|
2020-11-21 13:46:48 +00:00
|
|
|
from_glib_full(ffi::gst_memory_new_wrapped(
|
2019-05-11 15:00:03 +00:00
|
|
|
0,
|
2020-11-21 13:46:48 +00:00
|
|
|
data as glib::ffi::gpointer,
|
2019-05-11 15:00:03 +00:00
|
|
|
size,
|
|
|
|
0,
|
|
|
|
size,
|
2020-11-21 13:46:48 +00:00
|
|
|
user_data as glib::ffi::gpointer,
|
2019-05-11 15:00:03 +00:00
|
|
|
Some(Self::drop_box::<T>),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> {
|
2019-07-11 13:02:46 +00:00
|
|
|
unsafe {
|
|
|
|
let mut map_info = mem::MaybeUninit::zeroed();
|
2020-11-21 13:46:48 +00:00
|
|
|
let res: bool = from_glib(ffi::gst_memory_map(
|
2019-05-11 15:00:03 +00:00
|
|
|
self.as_mut_ptr(),
|
2019-07-11 13:02:46 +00:00
|
|
|
map_info.as_mut_ptr(),
|
2020-11-21 13:46:48 +00:00
|
|
|
ffi::GST_MAP_READ,
|
2019-07-11 13:02:46 +00:00
|
|
|
));
|
|
|
|
if res {
|
|
|
|
Ok(MappedMemory {
|
|
|
|
memory: Some(self),
|
|
|
|
map_info: map_info.assume_init(),
|
|
|
|
phantom: PhantomData,
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
Err(self)
|
|
|
|
}
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> {
|
2019-07-11 13:02:46 +00:00
|
|
|
unsafe {
|
|
|
|
let mut map_info = mem::MaybeUninit::zeroed();
|
2020-11-21 13:46:48 +00:00
|
|
|
let res: bool = from_glib(ffi::gst_memory_map(
|
2019-05-11 15:00:03 +00:00
|
|
|
self.as_mut_ptr(),
|
2019-07-11 13:02:46 +00:00
|
|
|
map_info.as_mut_ptr(),
|
2020-11-21 13:46:48 +00:00
|
|
|
ffi::GST_MAP_READWRITE,
|
2019-07-11 13:02:46 +00:00
|
|
|
));
|
|
|
|
if res {
|
|
|
|
Ok(MappedMemory {
|
|
|
|
memory: Some(self),
|
|
|
|
map_info: map_info.assume_init(),
|
|
|
|
phantom: PhantomData,
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
Err(self)
|
|
|
|
}
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl MemoryRef {
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_allocator")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn allocator(&self) -> Option<Allocator> {
|
2019-05-12 14:50:03 +00:00
|
|
|
unsafe { from_glib_none(self.0.allocator) }
|
|
|
|
}
|
|
|
|
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_parent")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn parent(&self) -> Option<&MemoryRef> {
|
2019-05-11 15:00:03 +00:00
|
|
|
unsafe {
|
|
|
|
if self.0.parent.is_null() {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(MemoryRef::from_ptr(self.0.parent))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_maxsize")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn maxsize(&self) -> usize {
|
2019-05-11 15:00:03 +00:00
|
|
|
self.0.maxsize
|
|
|
|
}
|
|
|
|
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_align")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn align(&self) -> usize {
|
2019-05-11 15:00:03 +00:00
|
|
|
self.0.align
|
|
|
|
}
|
|
|
|
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_offset")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn offset(&self) -> usize {
|
2019-05-11 15:00:03 +00:00
|
|
|
self.0.offset
|
|
|
|
}
|
|
|
|
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_size")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn size(&self) -> usize {
|
2019-05-11 15:00:03 +00:00
|
|
|
self.0.size
|
|
|
|
}
|
|
|
|
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_flags")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn flags(&self) -> MemoryFlags {
|
2020-12-08 14:07:12 +00:00
|
|
|
unsafe { from_glib(self.0.mini_object.flags) }
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn copy_part(&self, offset: isize, size: Option<usize>) -> Memory {
|
|
|
|
let pos_sz = match size {
|
|
|
|
Some(val) => val as isize,
|
|
|
|
None => 0,
|
|
|
|
};
|
2021-04-11 19:39:50 +00:00
|
|
|
assert!(offset + pos_sz < (self.maxsize() as isize));
|
2019-05-11 15:00:03 +00:00
|
|
|
unsafe {
|
2020-11-21 13:46:48 +00:00
|
|
|
from_glib_full(ffi::gst_memory_copy(
|
2019-05-11 15:00:03 +00:00
|
|
|
self.as_mut_ptr(),
|
|
|
|
offset,
|
|
|
|
match size {
|
|
|
|
Some(val) => val as isize,
|
|
|
|
None => -1,
|
|
|
|
},
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-19 20:35:47 +00:00
|
|
|
#[doc(alias = "gst_memory_is_span")]
|
2019-05-11 15:00:03 +00:00
|
|
|
pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> {
|
|
|
|
unsafe {
|
2019-07-11 12:34:28 +00:00
|
|
|
let mut offset = mem::MaybeUninit::uninit();
|
2020-11-21 13:46:48 +00:00
|
|
|
let res = from_glib(ffi::gst_memory_is_span(
|
2019-05-11 15:00:03 +00:00
|
|
|
self.as_mut_ptr(),
|
|
|
|
mem2.as_mut_ptr(),
|
2019-07-11 12:34:28 +00:00
|
|
|
offset.as_mut_ptr(),
|
2019-05-11 15:00:03 +00:00
|
|
|
));
|
|
|
|
if res {
|
2019-07-11 12:34:28 +00:00
|
|
|
Some(offset.assume_init())
|
2019-05-11 15:00:03 +00:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-19 20:35:47 +00:00
|
|
|
#[doc(alias = "gst_memory_is_type")]
|
2019-05-11 15:00:03 +00:00
|
|
|
pub fn is_type(&self, mem_type: &str) -> bool {
|
|
|
|
unsafe {
|
2020-11-21 13:46:48 +00:00
|
|
|
from_glib(ffi::gst_memory_is_type(
|
2019-05-11 15:00:03 +00:00
|
|
|
self.as_mut_ptr(),
|
|
|
|
mem_type.to_glib_none().0,
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-17 19:00:42 +00:00
|
|
|
pub fn map_readable(&self) -> Result<MemoryMap<Readable>, glib::BoolError> {
|
2019-07-11 13:02:46 +00:00
|
|
|
unsafe {
|
|
|
|
let mut map_info = mem::MaybeUninit::zeroed();
|
2020-11-21 13:46:48 +00:00
|
|
|
let res =
|
|
|
|
ffi::gst_memory_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
|
|
|
|
if res == glib::ffi::GTRUE {
|
2019-12-17 19:00:42 +00:00
|
|
|
Ok(MemoryMap {
|
2019-07-11 13:02:46 +00:00
|
|
|
memory: self,
|
|
|
|
map_info: map_info.assume_init(),
|
|
|
|
phantom: PhantomData,
|
|
|
|
})
|
|
|
|
} else {
|
2020-12-17 22:38:06 +00:00
|
|
|
Err(glib::bool_error!("Failed to map memory readable"))
|
2019-07-11 13:02:46 +00:00
|
|
|
}
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-17 19:00:42 +00:00
|
|
|
pub fn map_writable(&mut self) -> Result<MemoryMap<Writable>, glib::BoolError> {
|
2019-07-11 13:02:46 +00:00
|
|
|
unsafe {
|
|
|
|
let mut map_info = mem::MaybeUninit::zeroed();
|
2020-11-21 13:46:48 +00:00
|
|
|
let res = ffi::gst_memory_map(
|
2019-07-11 13:02:46 +00:00
|
|
|
self.as_mut_ptr(),
|
|
|
|
map_info.as_mut_ptr(),
|
2020-11-21 13:46:48 +00:00
|
|
|
ffi::GST_MAP_READWRITE,
|
2019-07-11 13:02:46 +00:00
|
|
|
);
|
2020-11-21 13:46:48 +00:00
|
|
|
if res == glib::ffi::GTRUE {
|
2019-12-17 19:00:42 +00:00
|
|
|
Ok(MemoryMap {
|
2019-07-11 13:02:46 +00:00
|
|
|
memory: self,
|
|
|
|
map_info: map_info.assume_init(),
|
|
|
|
phantom: PhantomData,
|
|
|
|
})
|
|
|
|
} else {
|
2020-12-17 22:38:06 +00:00
|
|
|
Err(glib::bool_error!("Failed to map memory writable"))
|
2019-07-11 13:02:46 +00:00
|
|
|
}
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-19 20:35:47 +00:00
|
|
|
#[doc(alias = "gst_memory_share")]
|
2019-05-11 15:00:03 +00:00
|
|
|
pub fn share(&self, offset: isize, size: Option<usize>) -> Memory {
|
|
|
|
let pos_sz = match size {
|
|
|
|
Some(val) => val as isize,
|
|
|
|
None => 0,
|
|
|
|
};
|
2021-04-11 19:39:50 +00:00
|
|
|
assert!(offset + pos_sz < (self.maxsize() as isize));
|
2019-05-11 15:00:03 +00:00
|
|
|
unsafe {
|
2020-11-21 13:46:48 +00:00
|
|
|
from_glib_full(ffi::gst_memory_share(
|
2019-05-11 15:00:03 +00:00
|
|
|
self.as_ptr() as *mut _,
|
|
|
|
offset,
|
|
|
|
match size {
|
|
|
|
Some(val) => val as isize,
|
|
|
|
None => -1,
|
|
|
|
},
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-19 20:35:47 +00:00
|
|
|
#[doc(alias = "gst_memory_resize")]
|
2019-05-11 15:00:03 +00:00
|
|
|
pub fn resize(&mut self, offset: isize, size: usize) {
|
2021-04-11 19:39:50 +00:00
|
|
|
assert!(offset + (size as isize) < (self.maxsize() as isize));
|
2020-11-21 13:46:48 +00:00
|
|
|
unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size) }
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
2019-12-23 09:44:11 +00:00
|
|
|
|
|
|
|
pub fn dump(&self, size: Option<usize>) -> Dump {
|
|
|
|
Dump { memory: self, size }
|
|
|
|
}
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, T> MemoryMap<'a, T> {
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_size")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn size(&self) -> usize {
|
2019-05-11 15:00:03 +00:00
|
|
|
self.map_info.size
|
|
|
|
}
|
|
|
|
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_memory")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn memory(&self) -> &MemoryRef {
|
2019-05-11 15:00:03 +00:00
|
|
|
self.memory
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn as_slice(&self) -> &[u8] {
|
2022-02-07 10:21:13 +00:00
|
|
|
if self.map_info.size == 0 {
|
|
|
|
return &[];
|
|
|
|
}
|
2019-05-11 15:00:03 +00:00
|
|
|
unsafe { slice::from_raw_parts(self.map_info.data as *const u8, self.map_info.size) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> MemoryMap<'a, Writable> {
|
|
|
|
pub fn as_mut_slice(&mut self) -> &mut [u8] {
|
2022-02-07 10:21:13 +00:00
|
|
|
if self.map_info.size == 0 {
|
|
|
|
return &mut [];
|
|
|
|
}
|
2019-05-11 15:00:03 +00:00
|
|
|
unsafe { slice::from_raw_parts_mut(self.map_info.data as *mut u8, self.map_info.size) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, T> AsRef<[u8]> for MemoryMap<'a, T> {
|
|
|
|
fn as_ref(&self) -> &[u8] {
|
|
|
|
self.as_slice()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> AsMut<[u8]> for MemoryMap<'a, Writable> {
|
|
|
|
fn as_mut(&mut self) -> &mut [u8] {
|
|
|
|
self.as_mut_slice()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-26 15:15:21 +00:00
|
|
|
impl<'a, T> Deref for MemoryMap<'a, T> {
|
2019-05-11 15:00:03 +00:00
|
|
|
type Target = [u8];
|
|
|
|
|
|
|
|
fn deref(&self) -> &[u8] {
|
|
|
|
self.as_slice()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-26 15:15:21 +00:00
|
|
|
impl<'a> DerefMut for MemoryMap<'a, Writable> {
|
2019-05-11 15:00:03 +00:00
|
|
|
fn deref_mut(&mut self) -> &mut [u8] {
|
|
|
|
self.as_mut_slice()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, T> fmt::Debug for MemoryMap<'a, T> {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
2021-04-11 19:39:50 +00:00
|
|
|
f.debug_tuple("MemoryMap").field(&self.memory()).finish()
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, T> PartialEq for MemoryMap<'a, T> {
|
|
|
|
fn eq(&self, other: &MemoryMap<'a, T>) -> bool {
|
|
|
|
self.as_slice().eq(other.as_slice())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, T> Eq for MemoryMap<'a, T> {}
|
|
|
|
|
|
|
|
impl<'a, T> Drop for MemoryMap<'a, T> {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
unsafe {
|
2020-11-21 13:46:48 +00:00
|
|
|
ffi::gst_memory_unmap(self.memory.as_mut_ptr(), &mut self.map_info);
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-18 15:04:42 +00:00
|
|
|
unsafe impl<'a, T> Send for MemoryMap<'a, T> {}
|
|
|
|
unsafe impl<'a, T> Sync for MemoryMap<'a, T> {}
|
|
|
|
|
2019-05-11 15:00:03 +00:00
|
|
|
impl<T> MappedMemory<T> {
|
|
|
|
pub fn as_slice(&self) -> &[u8] {
|
2022-02-07 10:21:13 +00:00
|
|
|
if self.map_info.size == 0 {
|
|
|
|
return &[];
|
|
|
|
}
|
2019-05-11 15:00:03 +00:00
|
|
|
unsafe { slice::from_raw_parts(self.map_info.data as *const u8, self.map_info.size) }
|
|
|
|
}
|
|
|
|
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_size")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn size(&self) -> usize {
|
2019-05-11 15:00:03 +00:00
|
|
|
self.map_info.size
|
|
|
|
}
|
|
|
|
|
2021-05-02 09:41:18 +00:00
|
|
|
#[doc(alias = "get_memory")]
|
2021-04-11 19:39:50 +00:00
|
|
|
pub fn memory(&self) -> &MemoryRef {
|
2019-05-11 15:00:03 +00:00
|
|
|
self.memory.as_ref().unwrap().as_ref()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn into_memory(mut self) -> Memory {
|
|
|
|
let memory = self.memory.take().unwrap();
|
|
|
|
unsafe {
|
2020-11-21 13:46:48 +00:00
|
|
|
ffi::gst_memory_unmap(memory.as_mut_ptr(), &mut self.map_info);
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
memory
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl MappedMemory<Writable> {
|
|
|
|
pub fn as_mut_slice(&mut self) -> &mut [u8] {
|
2022-02-07 10:21:13 +00:00
|
|
|
if self.map_info.size == 0 {
|
|
|
|
return &mut [];
|
|
|
|
}
|
2019-05-11 15:00:03 +00:00
|
|
|
unsafe { slice::from_raw_parts_mut(self.map_info.data as *mut u8, self.map_info.size) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> AsRef<[u8]> for MappedMemory<T> {
|
|
|
|
fn as_ref(&self) -> &[u8] {
|
|
|
|
self.as_slice()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsMut<[u8]> for MappedMemory<Writable> {
|
|
|
|
fn as_mut(&mut self) -> &mut [u8] {
|
|
|
|
self.as_mut_slice()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-26 15:15:21 +00:00
|
|
|
impl<T> Deref for MappedMemory<T> {
|
2019-05-11 15:00:03 +00:00
|
|
|
type Target = [u8];
|
|
|
|
|
|
|
|
fn deref(&self) -> &[u8] {
|
|
|
|
self.as_slice()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-26 15:15:21 +00:00
|
|
|
impl DerefMut for MappedMemory<Writable> {
|
2019-05-11 15:00:03 +00:00
|
|
|
fn deref_mut(&mut self) -> &mut [u8] {
|
|
|
|
self.as_mut_slice()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> Drop for MappedMemory<T> {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
if let Some(ref memory) = self.memory {
|
|
|
|
unsafe {
|
2020-11-21 13:46:48 +00:00
|
|
|
ffi::gst_memory_unmap(memory.as_mut_ptr(), &mut self.map_info);
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> fmt::Debug for MappedMemory<T> {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
2021-04-11 19:39:50 +00:00
|
|
|
f.debug_tuple("MappedMemory").field(&self.memory()).finish()
|
2019-05-11 15:00:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> PartialEq for MappedMemory<T> {
|
|
|
|
fn eq(&self, other: &MappedMemory<T>) -> bool {
|
|
|
|
self.as_slice().eq(other.as_slice())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> Eq for MappedMemory<T> {}
|
|
|
|
|
|
|
|
unsafe impl<T> Send for MappedMemory<T> {}
|
2019-12-18 15:04:42 +00:00
|
|
|
unsafe impl<T> Sync for MappedMemory<T> {}
|
2019-12-23 09:44:11 +00:00
|
|
|
|
|
|
|
pub struct Dump<'a> {
|
|
|
|
memory: &'a MemoryRef,
|
|
|
|
size: Option<usize>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> Dump<'a> {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
|
|
|
|
use pretty_hex::*;
|
|
|
|
|
|
|
|
let map = self.memory.map_readable().expect("Failed to map memory");
|
|
|
|
let data = map.as_slice();
|
2021-04-11 19:39:50 +00:00
|
|
|
let size = self.size.unwrap_or_else(|| self.memory.size());
|
2019-12-23 09:44:11 +00:00
|
|
|
let data = &data[0..size];
|
|
|
|
|
|
|
|
if debug {
|
|
|
|
write!(f, "{:?}", data.hex_dump())
|
|
|
|
} else {
|
|
|
|
write!(f, "{}", data.hex_dump())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> fmt::Display for Dump<'a> {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
self.fmt(f, false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> fmt::Debug for Dump<'a> {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
self.fmt(f, true)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-26 15:15:21 +00:00
|
|
|
pub unsafe trait MemoryType: crate::prelude::IsMiniObject + AsRef<Memory>
|
|
|
|
where
|
|
|
|
<Self as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
|
|
|
|
{
|
|
|
|
fn check_memory_type(mem: &MemoryRef) -> bool;
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, thiserror::Error)]
|
|
|
|
pub enum MemoryTypeMismatchError {
|
|
|
|
#[error(transparent)]
|
|
|
|
ValueTypeMismatch(#[from] glib::value::ValueTypeMismatchError),
|
|
|
|
#[error("the memory is not of the requested type {requested}")]
|
|
|
|
MemoryTypeMismatch { requested: &'static str },
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct MemoryTypeValueTypeChecker<M>(PhantomData<M>);
|
|
|
|
|
|
|
|
unsafe impl<M> glib::value::ValueTypeChecker for MemoryTypeValueTypeChecker<M>
|
|
|
|
where
|
|
|
|
M: MemoryType + glib::StaticType,
|
|
|
|
<M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
|
|
|
|
{
|
|
|
|
type Error = glib::value::ValueTypeMismatchOrNoneError<MemoryTypeMismatchError>;
|
|
|
|
|
|
|
|
fn check(value: &glib::Value) -> Result<(), Self::Error> {
|
|
|
|
skip_assert_initialized!();
|
|
|
|
let mem = value.get::<&Memory>().map_err(|err| match err {
|
|
|
|
glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone => {
|
|
|
|
glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone
|
|
|
|
}
|
|
|
|
glib::value::ValueTypeMismatchOrNoneError::WrongValueType(err) => {
|
|
|
|
glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
|
|
|
|
MemoryTypeMismatchError::ValueTypeMismatch(err),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
})?;
|
|
|
|
|
|
|
|
if mem.is_memory_type::<M>() {
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
Err(glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
|
|
|
|
MemoryTypeMismatchError::MemoryTypeMismatch {
|
|
|
|
requested: std::any::type_name::<M>(),
|
|
|
|
},
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsRef<MemoryRef> for MemoryRef {
|
|
|
|
fn as_ref(&self) -> &MemoryRef {
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsMut<MemoryRef> for MemoryRef {
|
|
|
|
fn as_mut(&mut self) -> &mut MemoryRef {
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsRef<Memory> for Memory {
|
|
|
|
fn as_ref(&self) -> &Memory {
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
unsafe impl MemoryType for Memory {
|
|
|
|
fn check_memory_type(_mem: &MemoryRef) -> bool {
|
|
|
|
skip_assert_initialized!();
|
|
|
|
true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Memory {
|
|
|
|
pub fn downcast_memory<M: MemoryType>(self) -> Result<M, Self>
|
|
|
|
where
|
|
|
|
<M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
|
|
|
|
{
|
|
|
|
if M::check_memory_type(&self) {
|
2022-05-06 19:41:15 +00:00
|
|
|
unsafe { Ok(from_glib_full(self.into_glib_ptr() as *mut M::FfiType)) }
|
2022-02-26 15:15:21 +00:00
|
|
|
} else {
|
|
|
|
Err(self)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl MemoryRef {
|
|
|
|
pub fn is_memory_type<M: MemoryType>(&self) -> bool
|
|
|
|
where
|
|
|
|
<M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
|
|
|
|
{
|
|
|
|
M::check_memory_type(self)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn downcast_memory_ref<M: MemoryType>(&self) -> Option<&M::RefType>
|
|
|
|
where
|
|
|
|
<M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
|
|
|
|
{
|
|
|
|
if M::check_memory_type(self) {
|
|
|
|
unsafe { Some(&*(self as *const Self as *const M::RefType)) }
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn downcast_memory_mut<M: MemoryType>(&mut self) -> Option<&mut M::RefType>
|
|
|
|
where
|
|
|
|
<M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
|
|
|
|
{
|
|
|
|
if M::check_memory_type(self) {
|
|
|
|
unsafe { Some(&mut *(self as *mut Self as *mut M::RefType)) }
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[macro_export]
|
|
|
|
macro_rules! memory_object_wrapper {
|
|
|
|
($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path) => {
|
|
|
|
$crate::mini_object_wrapper!($name, $ref_name, $ffi_name);
|
|
|
|
|
|
|
|
unsafe impl $crate::memory::MemoryType for $name {
|
|
|
|
fn check_memory_type(mem: &$crate::MemoryRef) -> bool {
|
|
|
|
skip_assert_initialized!();
|
|
|
|
$mem_type_check(mem)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl $name {
|
|
|
|
pub fn downcast_memory<M: $crate::memory::MemoryType>(self) -> Result<M, Self>
|
|
|
|
where
|
|
|
|
<M as $crate::miniobject::IsMiniObject>::RefType: AsRef<$crate::MemoryRef>
|
|
|
|
+ AsMut<$crate::MemoryRef>
|
|
|
|
+ AsRef<$ref_name>
|
|
|
|
+ AsMut<$ref_name>,
|
|
|
|
{
|
|
|
|
if M::check_memory_type(&self) {
|
|
|
|
unsafe {
|
|
|
|
Ok($crate::glib::translate::from_glib_full(
|
2022-05-06 19:41:15 +00:00
|
|
|
self.into_glib_ptr() as *mut M::FfiType
|
2022-02-26 15:15:21 +00:00
|
|
|
))
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Err(self)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn upcast_memory<M>(self) -> M
|
|
|
|
where
|
|
|
|
M: $crate::memory::MemoryType
|
|
|
|
+ $crate::glib::translate::FromGlibPtrFull<
|
|
|
|
*const <M as $crate::miniobject::IsMiniObject>::FfiType,
|
|
|
|
>,
|
|
|
|
<M as $crate::miniobject::IsMiniObject>::RefType:
|
|
|
|
AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
|
|
|
|
Self: AsRef<M>,
|
|
|
|
{
|
|
|
|
unsafe {
|
|
|
|
$crate::glib::translate::from_glib_full(
|
2022-05-06 19:41:15 +00:00
|
|
|
self.into_glib_ptr() as *const <M as $crate::miniobject::IsMiniObject>::FfiType
|
2022-02-26 15:15:21 +00:00
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl $ref_name {
|
|
|
|
pub fn upcast_memory_ref<M>(&self) -> &M::RefType
|
|
|
|
where
|
|
|
|
M: $crate::memory::MemoryType,
|
|
|
|
<M as $crate::miniobject::IsMiniObject>::RefType:
|
|
|
|
AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
|
|
|
|
Self: AsRef<M::RefType> + AsMut<M::RefType>
|
|
|
|
{
|
|
|
|
self.as_ref()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn upcast_memory_mut<M>(&mut self) -> &mut M::RefType
|
|
|
|
where
|
|
|
|
M: $crate::memory::MemoryType,
|
|
|
|
<M as $crate::miniobject::IsMiniObject>::RefType:
|
|
|
|
AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
|
|
|
|
Self: AsRef<M::RefType> + AsMut<M::RefType>
|
|
|
|
{
|
|
|
|
self.as_mut()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl std::ops::Deref for $ref_name {
|
|
|
|
type Target = $parent_memory_ref_type;
|
|
|
|
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
unsafe { &*(self as *const _ as *const Self::Target) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl std::ops::DerefMut for $ref_name {
|
|
|
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
|
|
unsafe { &mut *(self as *mut _ as *mut Self::Target) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsRef<$parent_memory_type> for $name {
|
|
|
|
fn as_ref(&self) -> &$parent_memory_type {
|
|
|
|
unsafe { &*(self as *const _ as *const $parent_memory_type) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsRef<$parent_memory_ref_type> for $ref_name {
|
|
|
|
fn as_ref(&self) -> &$parent_memory_ref_type {
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsMut<$parent_memory_ref_type> for $ref_name {
|
|
|
|
fn as_mut(&mut self) -> &mut $parent_memory_ref_type {
|
|
|
|
&mut *self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl $crate::glib::types::StaticType for $name {
|
|
|
|
fn static_type() -> glib::types::Type {
|
|
|
|
$ref_name::static_type()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl $crate::glib::types::StaticType for $ref_name {
|
|
|
|
fn static_type() -> $crate::glib::types::Type {
|
|
|
|
unsafe { $crate::glib::translate::from_glib($crate::ffi::gst_memory_get_type()) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl $crate::glib::value::ValueType for $name {
|
|
|
|
type Type = Self;
|
|
|
|
}
|
|
|
|
|
|
|
|
unsafe impl<'a> $crate::glib::value::FromValue<'a> for $name {
|
|
|
|
type Checker = $crate::memory::MemoryTypeValueTypeChecker<Self>;
|
|
|
|
|
|
|
|
unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
|
|
|
|
skip_assert_initialized!();
|
|
|
|
$crate::glib::translate::from_glib_none($crate::glib::gobject_ffi::g_value_get_boxed(
|
|
|
|
$crate::glib::translate::ToGlibPtr::to_glib_none(value).0,
|
|
|
|
) as *mut $ffi_name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $name {
|
|
|
|
type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
|
|
|
|
|
|
|
|
unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
|
|
|
|
skip_assert_initialized!();
|
|
|
|
assert_eq!(
|
|
|
|
std::mem::size_of::<$name>(),
|
|
|
|
std::mem::size_of::<$crate::glib::ffi::gpointer>()
|
|
|
|
);
|
|
|
|
let value = &*(value as *const $crate::glib::Value as *const $crate::glib::gobject_ffi::GValue);
|
|
|
|
let ptr = &value.data[0].v_pointer as *const $crate::glib::ffi::gpointer
|
|
|
|
as *const *const $ffi_name;
|
|
|
|
assert!(!(*ptr).is_null());
|
|
|
|
&*(ptr as *const $name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl $crate::glib::value::ToValue for $name {
|
|
|
|
fn to_value(&self) -> $crate::glib::Value {
|
|
|
|
let mut value = $crate::glib::Value::for_value_type::<Self>();
|
|
|
|
unsafe {
|
|
|
|
$crate::glib::gobject_ffi::g_value_set_boxed(
|
|
|
|
$crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
|
|
|
|
$crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(self).0
|
|
|
|
as *mut _,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
value
|
|
|
|
}
|
|
|
|
|
|
|
|
fn value_type(&self) -> glib::Type {
|
|
|
|
<Self as glib::StaticType>::static_type()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl $crate::glib::value::ToValueOptional for $name {
|
|
|
|
fn to_value_optional(s: Option<&Self>) -> $crate::glib::Value {
|
|
|
|
skip_assert_initialized!();
|
|
|
|
let mut value = $crate::glib::Value::for_value_type::<Self>();
|
|
|
|
unsafe {
|
|
|
|
$crate::glib::gobject_ffi::g_value_set_boxed(
|
|
|
|
$crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
|
|
|
|
$crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(&s).0
|
|
|
|
as *mut _,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
value
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-26 11:56:53 +00:00
|
|
|
impl From<$name> for $crate::glib::Value {
|
|
|
|
fn from(v: $name) -> $crate::glib::Value {
|
|
|
|
skip_assert_initialized!();
|
|
|
|
let mut value = $crate::glib::Value::for_value_type::<$name>();
|
|
|
|
unsafe {
|
|
|
|
$crate::glib::gobject_ffi::g_value_take_boxed(
|
|
|
|
$crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
|
|
|
|
$crate::glib::translate::IntoGlibPtr::<*mut $ffi_name>::into_glib_ptr(v) as *mut _,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
value
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-26 15:15:21 +00:00
|
|
|
unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $ref_name {
|
|
|
|
type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
|
|
|
|
|
|
|
|
unsafe fn from_value(value: &'a glib::Value) -> Self {
|
|
|
|
skip_assert_initialized!();
|
|
|
|
&*($crate::glib::gobject_ffi::g_value_get_boxed($crate::glib::translate::ToGlibPtr::to_glib_none(value).0)
|
|
|
|
as *const $ref_name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Can't have SetValue/SetValueOptional impls as otherwise one could use it to get
|
|
|
|
// immutable references from a mutable reference without borrowing via the value
|
|
|
|
};
|
|
|
|
($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path, $($parent_parent_memory_type:path, $parent_parent_memory_ref_type:path),*) => {
|
|
|
|
$crate::memory_object_wrapper!($name, $ref_name, $ffi_name, $mem_type_check, $parent_memory_type, $parent_memory_ref_type);
|
|
|
|
|
|
|
|
$(
|
|
|
|
impl AsRef<$parent_parent_memory_type> for $name {
|
|
|
|
fn as_ref(&self) -> &$parent_parent_memory_type {
|
|
|
|
unsafe { &*(self as *const _ as *const $parent_parent_memory_type) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsRef<$parent_parent_memory_ref_type> for $ref_name {
|
|
|
|
fn as_ref(&self) -> &$parent_parent_memory_ref_type {
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsMut<$parent_parent_memory_ref_type> for $ref_name {
|
|
|
|
fn as_mut(&mut self) -> &mut $parent_parent_memory_ref_type {
|
|
|
|
&mut *self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)*
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2019-12-23 09:44:11 +00:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
#[test]
|
|
|
|
fn test_dump() {
|
2020-11-21 13:46:48 +00:00
|
|
|
crate::init().unwrap();
|
2019-12-23 09:44:11 +00:00
|
|
|
|
2020-11-21 13:46:48 +00:00
|
|
|
let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
|
2021-04-11 19:39:50 +00:00
|
|
|
println!("{}", mem.dump(Some(mem.size())));
|
2019-12-23 09:44:11 +00:00
|
|
|
|
2020-11-21 13:46:48 +00:00
|
|
|
let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
|
2019-12-23 09:44:11 +00:00
|
|
|
println!("{:?}", mem.dump(Some(2)));
|
|
|
|
|
2020-11-21 13:46:48 +00:00
|
|
|
let mem = crate::Memory::from_slice(vec![0; 64]);
|
2019-12-23 09:44:11 +00:00
|
|
|
dbg!(mem.dump(None));
|
|
|
|
}
|
2022-02-26 15:15:21 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_value() {
|
|
|
|
use glib::prelude::*;
|
|
|
|
|
|
|
|
crate::init().unwrap();
|
|
|
|
|
|
|
|
let v = None::<&crate::Memory>.to_value();
|
|
|
|
assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(None)));
|
|
|
|
|
|
|
|
let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
|
|
|
|
let v = mem.to_value();
|
|
|
|
assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(Some(_))));
|
|
|
|
assert!(matches!(v.get::<crate::Memory>(), Ok(_)));
|
|
|
|
}
|
2019-12-23 09:44:11 +00:00
|
|
|
}
|