mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer-rs.git
synced 2024-11-29 13:01:05 +00:00
TagList: handle scope in serde
These changes break compatibility for the serde representations of `TagList` and `Toc`. Previous representation for the `TagList` was a sequence. We now have to rely on a struct representation in order to add `scope`.
This commit is contained in:
parent
bec3d84627
commit
9a01bd6202
2 changed files with 206 additions and 131 deletions
|
@ -14,7 +14,7 @@ use glib::{SendValue, ToValue};
|
||||||
use serde::de;
|
use serde::de;
|
||||||
use serde::de::{Deserialize, DeserializeSeed, Deserializer, SeqAccess, Visitor};
|
use serde::de::{Deserialize, DeserializeSeed, Deserializer, SeqAccess, Visitor};
|
||||||
use serde::ser;
|
use serde::ser;
|
||||||
use serde::ser::{Serialize, SerializeSeq, SerializeTuple, Serializer};
|
use serde::ser::{Serialize, SerializeSeq, SerializeStruct, SerializeTuple, Serializer};
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
@ -25,6 +25,7 @@ use value_serde::{DATE_TIME_OTHER_TYPE_ID, SAMPLE_OTHER_TYPE_ID};
|
||||||
use DateTime;
|
use DateTime;
|
||||||
use Sample;
|
use Sample;
|
||||||
use TagMergeMode;
|
use TagMergeMode;
|
||||||
|
use TagScope;
|
||||||
|
|
||||||
macro_rules! ser_tag (
|
macro_rules! ser_tag (
|
||||||
($value:ident, $seq:ident, $t:ty) => (
|
($value:ident, $seq:ident, $t:ty) => (
|
||||||
|
@ -92,12 +93,13 @@ impl<'a> Serialize for TagsSer<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Serialize for TagListRef {
|
struct TagListSer<'a>(&'a TagListRef);
|
||||||
|
impl<'a> Serialize for TagListSer<'a> {
|
||||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||||
let tag_count = self.n_tags();
|
let tag_count = self.0.n_tags();
|
||||||
if tag_count > 0 {
|
if tag_count > 0 {
|
||||||
let mut seq = serializer.serialize_seq(Some(tag_count as usize))?;
|
let mut seq = serializer.serialize_seq(Some(tag_count as usize))?;
|
||||||
let tag_list_iter = self.iter_generic();
|
let tag_list_iter = self.0.iter_generic();
|
||||||
for (tag_name, tag_iter) in tag_list_iter {
|
for (tag_name, tag_iter) in tag_list_iter {
|
||||||
seq.serialize_element(&TagsSer::new(tag_name, tag_iter))?;
|
seq.serialize_element(&TagsSer::new(tag_name, tag_iter))?;
|
||||||
}
|
}
|
||||||
|
@ -111,6 +113,15 @@ impl Serialize for TagListRef {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Serialize for TagListRef {
|
||||||
|
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||||
|
let mut tag_list = serializer.serialize_struct("TagList", 3)?;
|
||||||
|
tag_list.serialize_field("scope", &self.get_scope())?;
|
||||||
|
tag_list.serialize_field("tags", &TagListSer(self))?;
|
||||||
|
tag_list.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Serialize for TagList {
|
impl Serialize for TagList {
|
||||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||||
self.as_ref().serialize(serializer)
|
self.as_ref().serialize(serializer)
|
||||||
|
@ -218,9 +229,11 @@ impl<'de, 'a> DeserializeSeed<'de> for TagValuesTuple<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct TagListVisitor;
|
struct TagsDe(TagList);
|
||||||
impl<'de> Visitor<'de> for TagListVisitor {
|
|
||||||
type Value = TagList;
|
struct TagsVisitor;
|
||||||
|
impl<'de> Visitor<'de> for TagsVisitor {
|
||||||
|
type Value = TagsDe;
|
||||||
|
|
||||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
formatter.write_str("a sequence of `Tag`s")
|
formatter.write_str("a sequence of `Tag`s")
|
||||||
|
@ -234,13 +247,34 @@ impl<'de> Visitor<'de> for TagListVisitor {
|
||||||
// tags are added in the dedicated deserializers
|
// tags are added in the dedicated deserializers
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(tag_list)
|
Ok(TagsDe(tag_list))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for TagsDe {
|
||||||
|
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
||||||
|
deserializer.deserialize_seq(TagsVisitor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct TagListDe {
|
||||||
|
scope: TagScope,
|
||||||
|
tags: TagsDe,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<TagListDe> for TagList {
|
||||||
|
fn from(tag_list_de: TagListDe) -> Self {
|
||||||
|
let mut tag_list = tag_list_de.tags.0;
|
||||||
|
tag_list.get_mut().unwrap().set_scope(tag_list_de.scope);
|
||||||
|
|
||||||
|
tag_list
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for TagList {
|
impl<'de> Deserialize<'de> for TagList {
|
||||||
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
||||||
deserializer.deserialize_seq(TagListVisitor)
|
TagListDe::deserialize(deserializer).map(|tag_list_de| tag_list_de.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -253,6 +287,7 @@ mod tests {
|
||||||
use GenericFormattedValue;
|
use GenericFormattedValue;
|
||||||
use Sample;
|
use Sample;
|
||||||
use TagMergeMode;
|
use TagMergeMode;
|
||||||
|
use TagScope;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_serialize() {
|
fn test_serialize() {
|
||||||
|
@ -287,57 +322,60 @@ mod tests {
|
||||||
let res = ron::ser::to_string_pretty(&tags, pretty_config);
|
let res = ron::ser::to_string_pretty(&tags, pretty_config);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Ok(concat!(
|
Ok(concat!(
|
||||||
"[",
|
"(",
|
||||||
" (\"title\", [",
|
" scope: Stream,",
|
||||||
" \"a title\",",
|
" tags: [",
|
||||||
" \"another title\",",
|
" (\"title\", [",
|
||||||
" ]),",
|
" \"a title\",",
|
||||||
" (\"duration\", [",
|
" \"another title\",",
|
||||||
" 120000000000,",
|
" ]),",
|
||||||
" ]),",
|
" (\"duration\", [",
|
||||||
" (\"bitrate\", [",
|
" 120000000000,",
|
||||||
" 96000,",
|
" ]),",
|
||||||
" ]),",
|
" (\"bitrate\", [",
|
||||||
" (\"replaygain-track-gain\", [",
|
" 96000,",
|
||||||
" 1,",
|
" ]),",
|
||||||
" ]),",
|
" (\"replaygain-track-gain\", [",
|
||||||
" (\"datetime\", [",
|
" 1,",
|
||||||
" YMD(2018, 5, 28),",
|
" ]),",
|
||||||
" ]),",
|
" (\"datetime\", [",
|
||||||
" (\"image\", [",
|
" YMD(2018, 5, 28),",
|
||||||
" (",
|
" ]),",
|
||||||
" buffer: Some((",
|
" (\"image\", [",
|
||||||
" pts: None,",
|
" (",
|
||||||
" dts: None,",
|
" buffer: Some((",
|
||||||
" duration: None,",
|
" pts: None,",
|
||||||
" offset: 0,",
|
" dts: None,",
|
||||||
" offset_end: 0,",
|
" duration: None,",
|
||||||
" flags: (",
|
" offset: 0,",
|
||||||
" bits: 0,",
|
" offset_end: 0,",
|
||||||
" ),",
|
" flags: (",
|
||||||
" buffer: \"AQIDBA==\",",
|
" bits: 0,",
|
||||||
" )),",
|
" ),",
|
||||||
" buffer_list: None,",
|
" buffer: \"AQIDBA==\",",
|
||||||
" caps: None,",
|
" )),",
|
||||||
" segment: Some((",
|
" buffer_list: None,",
|
||||||
" flags: (",
|
" caps: None,",
|
||||||
" bits: 0,",
|
" segment: Some((",
|
||||||
" ),",
|
" flags: (",
|
||||||
" rate: 1,",
|
" bits: 0,",
|
||||||
" applied_rate: 1,",
|
" ),",
|
||||||
" format: Time,",
|
" rate: 1,",
|
||||||
" base: 0,",
|
" applied_rate: 1,",
|
||||||
" offset: 0,",
|
" format: Time,",
|
||||||
" start: 0,",
|
" base: 0,",
|
||||||
" stop: -1,",
|
" offset: 0,",
|
||||||
" time: 0,",
|
" start: 0,",
|
||||||
" position: 0,",
|
" stop: -1,",
|
||||||
" duration: -1,",
|
" time: 0,",
|
||||||
" )),",
|
" position: 0,",
|
||||||
" info: None,",
|
" duration: -1,",
|
||||||
" ),",
|
" )),",
|
||||||
" ]),",
|
" info: None,",
|
||||||
"]",
|
" ),",
|
||||||
|
" ]),",
|
||||||
|
" ],",
|
||||||
|
")",
|
||||||
)
|
)
|
||||||
.to_owned()),
|
.to_owned()),
|
||||||
res,
|
res,
|
||||||
|
@ -351,39 +389,44 @@ mod tests {
|
||||||
::init().unwrap();
|
::init().unwrap();
|
||||||
|
|
||||||
let tag_list_ron = r#"
|
let tag_list_ron = r#"
|
||||||
[
|
(
|
||||||
("title", [
|
scope: Global,
|
||||||
"a title",
|
tags: [
|
||||||
"another title",
|
("title", [
|
||||||
]),
|
"a title",
|
||||||
("duration", [120000000000]),
|
"another title",
|
||||||
("bitrate", [96000]),
|
]),
|
||||||
("replaygain-track-gain", [1]),
|
("duration", [120000000000]),
|
||||||
("datetime", [
|
("bitrate", [96000]),
|
||||||
YMD(2018, 5, 28),
|
("replaygain-track-gain", [1]),
|
||||||
]),
|
("datetime", [
|
||||||
("image", [
|
YMD(2018, 5, 28),
|
||||||
(
|
]),
|
||||||
buffer: Some((
|
("image", [
|
||||||
pts: None,
|
(
|
||||||
dts: None,
|
buffer: Some((
|
||||||
duration: None,
|
pts: None,
|
||||||
offset: 0,
|
dts: None,
|
||||||
offset_end: 0,
|
duration: None,
|
||||||
flags: (
|
offset: 0,
|
||||||
bits: 0,
|
offset_end: 0,
|
||||||
),
|
flags: (
|
||||||
buffer: "AQIDBA==",
|
bits: 0,
|
||||||
)),
|
),
|
||||||
buffer_list: None,
|
buffer: "AQIDBA==",
|
||||||
caps: None,
|
)),
|
||||||
segment: None,
|
buffer_list: None,
|
||||||
info: None,
|
caps: None,
|
||||||
),
|
segment: None,
|
||||||
])
|
info: None,
|
||||||
]
|
),
|
||||||
|
])
|
||||||
|
],
|
||||||
|
)
|
||||||
"#;
|
"#;
|
||||||
let tags: TagList = ron::de::from_str(tag_list_ron).unwrap();
|
let tags: TagList = ron::de::from_str(tag_list_ron).unwrap();
|
||||||
|
assert_eq!(tags.get_scope(), TagScope::Global);
|
||||||
|
|
||||||
assert_eq!(tags.get_index::<Title>(0).unwrap().get(), Some("a title"));
|
assert_eq!(tags.get_index::<Title>(0).unwrap().get(), Some("a title"));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tags.get_index::<Title>(1).unwrap().get(),
|
tags.get_index::<Title>(1).unwrap().get(),
|
||||||
|
@ -407,16 +450,21 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
let tag_json = r#"
|
let tag_json = r#"
|
||||||
[
|
{
|
||||||
["title", ["a title", "another title"]],
|
"scope":"Global",
|
||||||
["duration", [120000000000]],
|
"tags":[
|
||||||
["bitrate", [96000]],
|
["title", ["a title", "another title"]],
|
||||||
["replaygain-track-gain", [1.0]],
|
["duration", [120000000000]],
|
||||||
["datetime",[{"YMD":[2018,5,28]}]],
|
["bitrate", [96000]],
|
||||||
["image",[{"buffer":{"pts":null,"dts":null,"duration":null,"offset":0,"offset_end":0,"flags":{"bits":0},"buffer":[1,2,3,4]},"buffer_list":null,"caps":null,"segment":null,"info":null}]]
|
["replaygain-track-gain", [1.0]],
|
||||||
]
|
["datetime",[{"YMD":[2018,5,28]}]],
|
||||||
|
["image",[{"buffer":{"pts":null,"dts":null,"duration":null,"offset":0,"offset_end":0,"flags":{"bits":0},"buffer":[1,2,3,4]},"buffer_list":null,"caps":null,"segment":null,"info":null}]]
|
||||||
|
]
|
||||||
|
}
|
||||||
"#;
|
"#;
|
||||||
let tags: TagList = serde_json::from_str(tag_json).unwrap();
|
let tags: TagList = serde_json::from_str(tag_json).unwrap();
|
||||||
|
assert_eq!(tags.get_scope(), TagScope::Global);
|
||||||
|
|
||||||
assert_eq!(tags.get_index::<Title>(0).unwrap().get(), Some("a title"));
|
assert_eq!(tags.get_index::<Title>(0).unwrap().get(), Some("a title"));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tags.get_index::<Title>(1).unwrap().get(),
|
tags.get_index::<Title>(1).unwrap().get(),
|
||||||
|
@ -444,6 +492,7 @@ mod tests {
|
||||||
assert_eq!(tags.to_string(), "taglist;");
|
assert_eq!(tags.to_string(), "taglist;");
|
||||||
{
|
{
|
||||||
let tags = tags.get_mut().unwrap();
|
let tags = tags.get_mut().unwrap();
|
||||||
|
tags.set_scope(TagScope::Global);
|
||||||
tags.add::<Title>(&"a title", TagMergeMode::Append); // String
|
tags.add::<Title>(&"a title", TagMergeMode::Append); // String
|
||||||
tags.add::<Title>(&"another title", TagMergeMode::Append); // String
|
tags.add::<Title>(&"another title", TagMergeMode::Append); // String
|
||||||
tags.add::<Duration>(&(::SECOND * 120).into(), TagMergeMode::Append); // u64
|
tags.add::<Duration>(&(::SECOND * 120).into(), TagMergeMode::Append); // u64
|
||||||
|
@ -465,6 +514,8 @@ mod tests {
|
||||||
let tags_ser = ron::ser::to_string(&tags).unwrap();
|
let tags_ser = ron::ser::to_string(&tags).unwrap();
|
||||||
|
|
||||||
let tags_de: TagList = ron::de::from_str(tags_ser.as_str()).unwrap();
|
let tags_de: TagList = ron::de::from_str(tags_ser.as_str()).unwrap();
|
||||||
|
assert_eq!(tags_de.get_scope(), TagScope::Global);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tags_de.get_index::<Title>(0).unwrap().get(),
|
tags_de.get_index::<Title>(0).unwrap().get(),
|
||||||
tags.get_index::<Title>(0).unwrap().get(),
|
tags.get_index::<Title>(0).unwrap().get(),
|
||||||
|
|
|
@ -203,11 +203,14 @@ mod tests {
|
||||||
Ok(concat!(
|
Ok(concat!(
|
||||||
"(",
|
"(",
|
||||||
" scope: Global,",
|
" scope: Global,",
|
||||||
" tags: Some([",
|
" tags: Some((",
|
||||||
" (\"title\", [",
|
" scope: Stream,",
|
||||||
" \"toc\",",
|
" tags: [",
|
||||||
" ]),",
|
" (\"title\", [",
|
||||||
" ]),",
|
" \"toc\",",
|
||||||
|
" ]),",
|
||||||
|
" ],",
|
||||||
|
" )),",
|
||||||
" entries: [",
|
" entries: [",
|
||||||
" (",
|
" (",
|
||||||
" entry_type: Edition,",
|
" entry_type: Edition,",
|
||||||
|
@ -227,11 +230,14 @@ mod tests {
|
||||||
" entry_type: Chapter,",
|
" entry_type: Chapter,",
|
||||||
" uid: \"chapter1.1\",",
|
" uid: \"chapter1.1\",",
|
||||||
" start_stop: Some((0, 4)),",
|
" start_stop: Some((0, 4)),",
|
||||||
" tags: Some([",
|
" tags: Some((",
|
||||||
" (\"title\", [",
|
" scope: Stream,",
|
||||||
" \"chapter 1.1\",",
|
" tags: [",
|
||||||
" ]),",
|
" (\"title\", [",
|
||||||
" ]),",
|
" \"chapter 1.1\",",
|
||||||
|
" ]),",
|
||||||
|
" ],",
|
||||||
|
" )),",
|
||||||
" loop: Some((None, 0)),",
|
" loop: Some((None, 0)),",
|
||||||
" sub_entries: [",
|
" sub_entries: [",
|
||||||
" ],",
|
" ],",
|
||||||
|
@ -240,11 +246,14 @@ mod tests {
|
||||||
" entry_type: Chapter,",
|
" entry_type: Chapter,",
|
||||||
" uid: \"chapter1.2\",",
|
" uid: \"chapter1.2\",",
|
||||||
" start_stop: Some((4, 10)),",
|
" start_stop: Some((4, 10)),",
|
||||||
" tags: Some([",
|
" tags: Some((",
|
||||||
" (\"title\", [",
|
" scope: Stream,",
|
||||||
" \"chapter 1.2\",",
|
" tags: [",
|
||||||
" ]),",
|
" (\"title\", [",
|
||||||
" ]),",
|
" \"chapter 1.2\",",
|
||||||
|
" ]),",
|
||||||
|
" ],",
|
||||||
|
" )),",
|
||||||
" loop: Some((None, 0)),",
|
" loop: Some((None, 0)),",
|
||||||
" sub_entries: [",
|
" sub_entries: [",
|
||||||
" ],",
|
" ],",
|
||||||
|
@ -255,11 +264,14 @@ mod tests {
|
||||||
" entry_type: Chapter,",
|
" entry_type: Chapter,",
|
||||||
" uid: \"chapter2\",",
|
" uid: \"chapter2\",",
|
||||||
" start_stop: Some((10, 15)),",
|
" start_stop: Some((10, 15)),",
|
||||||
" tags: Some([",
|
" tags: Some((",
|
||||||
" (\"title\", [",
|
" scope: Stream,",
|
||||||
" \"chapter 2\",",
|
" tags: [",
|
||||||
" ]),",
|
" (\"title\", [",
|
||||||
" ]),",
|
" \"chapter 2\",",
|
||||||
|
" ]),",
|
||||||
|
" ],",
|
||||||
|
" )),",
|
||||||
" loop: Some((None, 0)),",
|
" loop: Some((None, 0)),",
|
||||||
" sub_entries: [",
|
" sub_entries: [",
|
||||||
" ],",
|
" ],",
|
||||||
|
@ -283,9 +295,12 @@ mod tests {
|
||||||
let toc_ron = r#"
|
let toc_ron = r#"
|
||||||
(
|
(
|
||||||
scope: Global,
|
scope: Global,
|
||||||
tags: Some([
|
tags: Some((
|
||||||
("title", ["toc"]),
|
scope: Stream,
|
||||||
]),
|
tags: [
|
||||||
|
("title", ["toc"]),
|
||||||
|
],
|
||||||
|
)),
|
||||||
entries: [
|
entries: [
|
||||||
(
|
(
|
||||||
entry_type: Edition,
|
entry_type: Edition,
|
||||||
|
@ -305,9 +320,12 @@ mod tests {
|
||||||
entry_type: Chapter,
|
entry_type: Chapter,
|
||||||
uid: "chapter1.1",
|
uid: "chapter1.1",
|
||||||
start_stop: Some((0, 4)),
|
start_stop: Some((0, 4)),
|
||||||
tags: Some([
|
tags: Some((
|
||||||
("title", ["chapter 1.1"]),
|
scope: Stream,
|
||||||
]),
|
tags: [
|
||||||
|
("title", ["chapter 1.1"]),
|
||||||
|
],
|
||||||
|
)),
|
||||||
loop: Some((None, 0)),
|
loop: Some((None, 0)),
|
||||||
sub_entries: [
|
sub_entries: [
|
||||||
],
|
],
|
||||||
|
@ -316,9 +334,12 @@ mod tests {
|
||||||
entry_type: Chapter,
|
entry_type: Chapter,
|
||||||
uid: "chapter1.2",
|
uid: "chapter1.2",
|
||||||
start_stop: Some((4, 10)),
|
start_stop: Some((4, 10)),
|
||||||
tags: Some([
|
tags: Some((
|
||||||
("title", ["chapter 1.2"]),
|
scope: Stream,
|
||||||
]),
|
tags: [
|
||||||
|
("title", ["chapter 1.2"]),
|
||||||
|
],
|
||||||
|
)),
|
||||||
loop: Some((None, 0)),
|
loop: Some((None, 0)),
|
||||||
sub_entries: [
|
sub_entries: [
|
||||||
],
|
],
|
||||||
|
@ -329,9 +350,12 @@ mod tests {
|
||||||
entry_type: Chapter,
|
entry_type: Chapter,
|
||||||
uid: "chapter2",
|
uid: "chapter2",
|
||||||
start_stop: Some((10, 15)),
|
start_stop: Some((10, 15)),
|
||||||
tags: Some([
|
tags: Some((
|
||||||
("title", ["chapter 2"]),
|
scope: Stream,
|
||||||
]),
|
tags: [
|
||||||
|
("title", ["chapter 2"]),
|
||||||
|
],
|
||||||
|
)),
|
||||||
loop: Some((None, 0)),
|
loop: Some((None, 0)),
|
||||||
sub_entries: [
|
sub_entries: [
|
||||||
],
|
],
|
||||||
|
|
Loading…
Reference in a new issue