TagList: handle scope in serde

These changes break compatibility for the serde representations of
`TagList` and `Toc`. Previous representation for the `TagList` was
a sequence. We now have to rely on a struct representation in order
to add `scope`.
This commit is contained in:
François Laignel 2019-03-19 18:45:26 +01:00 committed by Sebastian Dröge
parent bec3d84627
commit 9a01bd6202
2 changed files with 206 additions and 131 deletions

View file

@ -14,7 +14,7 @@ use glib::{SendValue, ToValue};
use serde::de; use serde::de;
use serde::de::{Deserialize, DeserializeSeed, Deserializer, SeqAccess, Visitor}; use serde::de::{Deserialize, DeserializeSeed, Deserializer, SeqAccess, Visitor};
use serde::ser; use serde::ser;
use serde::ser::{Serialize, SerializeSeq, SerializeTuple, Serializer}; use serde::ser::{Serialize, SerializeSeq, SerializeStruct, SerializeTuple, Serializer};
use std::cell::RefCell; use std::cell::RefCell;
use std::fmt; use std::fmt;
@ -25,6 +25,7 @@ use value_serde::{DATE_TIME_OTHER_TYPE_ID, SAMPLE_OTHER_TYPE_ID};
use DateTime; use DateTime;
use Sample; use Sample;
use TagMergeMode; use TagMergeMode;
use TagScope;
macro_rules! ser_tag ( macro_rules! ser_tag (
($value:ident, $seq:ident, $t:ty) => ( ($value:ident, $seq:ident, $t:ty) => (
@ -92,12 +93,13 @@ impl<'a> Serialize for TagsSer<'a> {
} }
} }
impl Serialize for TagListRef { struct TagListSer<'a>(&'a TagListRef);
impl<'a> Serialize for TagListSer<'a> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let tag_count = self.n_tags(); let tag_count = self.0.n_tags();
if tag_count > 0 { if tag_count > 0 {
let mut seq = serializer.serialize_seq(Some(tag_count as usize))?; let mut seq = serializer.serialize_seq(Some(tag_count as usize))?;
let tag_list_iter = self.iter_generic(); let tag_list_iter = self.0.iter_generic();
for (tag_name, tag_iter) in tag_list_iter { for (tag_name, tag_iter) in tag_list_iter {
seq.serialize_element(&TagsSer::new(tag_name, tag_iter))?; seq.serialize_element(&TagsSer::new(tag_name, tag_iter))?;
} }
@ -111,6 +113,15 @@ impl Serialize for TagListRef {
} }
} }
impl Serialize for TagListRef {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let mut tag_list = serializer.serialize_struct("TagList", 3)?;
tag_list.serialize_field("scope", &self.get_scope())?;
tag_list.serialize_field("tags", &TagListSer(self))?;
tag_list.end()
}
}
impl Serialize for TagList { impl Serialize for TagList {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
self.as_ref().serialize(serializer) self.as_ref().serialize(serializer)
@ -218,9 +229,11 @@ impl<'de, 'a> DeserializeSeed<'de> for TagValuesTuple<'a> {
} }
} }
struct TagListVisitor; struct TagsDe(TagList);
impl<'de> Visitor<'de> for TagListVisitor {
type Value = TagList; struct TagsVisitor;
impl<'de> Visitor<'de> for TagsVisitor {
type Value = TagsDe;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a sequence of `Tag`s") formatter.write_str("a sequence of `Tag`s")
@ -234,13 +247,34 @@ impl<'de> Visitor<'de> for TagListVisitor {
// tags are added in the dedicated deserializers // tags are added in the dedicated deserializers
} }
} }
Ok(tag_list) Ok(TagsDe(tag_list))
}
}
impl<'de> Deserialize<'de> for TagsDe {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
deserializer.deserialize_seq(TagsVisitor)
}
}
#[derive(Deserialize)]
struct TagListDe {
scope: TagScope,
tags: TagsDe,
}
impl From<TagListDe> for TagList {
fn from(tag_list_de: TagListDe) -> Self {
let mut tag_list = tag_list_de.tags.0;
tag_list.get_mut().unwrap().set_scope(tag_list_de.scope);
tag_list
} }
} }
impl<'de> Deserialize<'de> for TagList { impl<'de> Deserialize<'de> for TagList {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
deserializer.deserialize_seq(TagListVisitor) TagListDe::deserialize(deserializer).map(|tag_list_de| tag_list_de.into())
} }
} }
@ -253,6 +287,7 @@ mod tests {
use GenericFormattedValue; use GenericFormattedValue;
use Sample; use Sample;
use TagMergeMode; use TagMergeMode;
use TagScope;
#[test] #[test]
fn test_serialize() { fn test_serialize() {
@ -287,7 +322,9 @@ mod tests {
let res = ron::ser::to_string_pretty(&tags, pretty_config); let res = ron::ser::to_string_pretty(&tags, pretty_config);
assert_eq!( assert_eq!(
Ok(concat!( Ok(concat!(
"[", "(",
" scope: Stream,",
" tags: [",
" (\"title\", [", " (\"title\", [",
" \"a title\",", " \"a title\",",
" \"another title\",", " \"another title\",",
@ -337,7 +374,8 @@ mod tests {
" info: None,", " info: None,",
" ),", " ),",
" ]),", " ]),",
"]", " ],",
")",
) )
.to_owned()), .to_owned()),
res, res,
@ -351,7 +389,9 @@ mod tests {
::init().unwrap(); ::init().unwrap();
let tag_list_ron = r#" let tag_list_ron = r#"
[ (
scope: Global,
tags: [
("title", [ ("title", [
"a title", "a title",
"another title", "another title",
@ -381,9 +421,12 @@ mod tests {
info: None, info: None,
), ),
]) ])
] ],
)
"#; "#;
let tags: TagList = ron::de::from_str(tag_list_ron).unwrap(); let tags: TagList = ron::de::from_str(tag_list_ron).unwrap();
assert_eq!(tags.get_scope(), TagScope::Global);
assert_eq!(tags.get_index::<Title>(0).unwrap().get(), Some("a title")); assert_eq!(tags.get_index::<Title>(0).unwrap().get(), Some("a title"));
assert_eq!( assert_eq!(
tags.get_index::<Title>(1).unwrap().get(), tags.get_index::<Title>(1).unwrap().get(),
@ -407,7 +450,9 @@ mod tests {
} }
let tag_json = r#" let tag_json = r#"
[ {
"scope":"Global",
"tags":[
["title", ["a title", "another title"]], ["title", ["a title", "another title"]],
["duration", [120000000000]], ["duration", [120000000000]],
["bitrate", [96000]], ["bitrate", [96000]],
@ -415,8 +460,11 @@ mod tests {
["datetime",[{"YMD":[2018,5,28]}]], ["datetime",[{"YMD":[2018,5,28]}]],
["image",[{"buffer":{"pts":null,"dts":null,"duration":null,"offset":0,"offset_end":0,"flags":{"bits":0},"buffer":[1,2,3,4]},"buffer_list":null,"caps":null,"segment":null,"info":null}]] ["image",[{"buffer":{"pts":null,"dts":null,"duration":null,"offset":0,"offset_end":0,"flags":{"bits":0},"buffer":[1,2,3,4]},"buffer_list":null,"caps":null,"segment":null,"info":null}]]
] ]
}
"#; "#;
let tags: TagList = serde_json::from_str(tag_json).unwrap(); let tags: TagList = serde_json::from_str(tag_json).unwrap();
assert_eq!(tags.get_scope(), TagScope::Global);
assert_eq!(tags.get_index::<Title>(0).unwrap().get(), Some("a title")); assert_eq!(tags.get_index::<Title>(0).unwrap().get(), Some("a title"));
assert_eq!( assert_eq!(
tags.get_index::<Title>(1).unwrap().get(), tags.get_index::<Title>(1).unwrap().get(),
@ -444,6 +492,7 @@ mod tests {
assert_eq!(tags.to_string(), "taglist;"); assert_eq!(tags.to_string(), "taglist;");
{ {
let tags = tags.get_mut().unwrap(); let tags = tags.get_mut().unwrap();
tags.set_scope(TagScope::Global);
tags.add::<Title>(&"a title", TagMergeMode::Append); // String tags.add::<Title>(&"a title", TagMergeMode::Append); // String
tags.add::<Title>(&"another title", TagMergeMode::Append); // String tags.add::<Title>(&"another title", TagMergeMode::Append); // String
tags.add::<Duration>(&(::SECOND * 120).into(), TagMergeMode::Append); // u64 tags.add::<Duration>(&(::SECOND * 120).into(), TagMergeMode::Append); // u64
@ -465,6 +514,8 @@ mod tests {
let tags_ser = ron::ser::to_string(&tags).unwrap(); let tags_ser = ron::ser::to_string(&tags).unwrap();
let tags_de: TagList = ron::de::from_str(tags_ser.as_str()).unwrap(); let tags_de: TagList = ron::de::from_str(tags_ser.as_str()).unwrap();
assert_eq!(tags_de.get_scope(), TagScope::Global);
assert_eq!( assert_eq!(
tags_de.get_index::<Title>(0).unwrap().get(), tags_de.get_index::<Title>(0).unwrap().get(),
tags.get_index::<Title>(0).unwrap().get(), tags.get_index::<Title>(0).unwrap().get(),

View file

@ -203,11 +203,14 @@ mod tests {
Ok(concat!( Ok(concat!(
"(", "(",
" scope: Global,", " scope: Global,",
" tags: Some([", " tags: Some((",
" scope: Stream,",
" tags: [",
" (\"title\", [", " (\"title\", [",
" \"toc\",", " \"toc\",",
" ]),", " ]),",
" ]),", " ],",
" )),",
" entries: [", " entries: [",
" (", " (",
" entry_type: Edition,", " entry_type: Edition,",
@ -227,11 +230,14 @@ mod tests {
" entry_type: Chapter,", " entry_type: Chapter,",
" uid: \"chapter1.1\",", " uid: \"chapter1.1\",",
" start_stop: Some((0, 4)),", " start_stop: Some((0, 4)),",
" tags: Some([", " tags: Some((",
" scope: Stream,",
" tags: [",
" (\"title\", [", " (\"title\", [",
" \"chapter 1.1\",", " \"chapter 1.1\",",
" ]),", " ]),",
" ]),", " ],",
" )),",
" loop: Some((None, 0)),", " loop: Some((None, 0)),",
" sub_entries: [", " sub_entries: [",
" ],", " ],",
@ -240,11 +246,14 @@ mod tests {
" entry_type: Chapter,", " entry_type: Chapter,",
" uid: \"chapter1.2\",", " uid: \"chapter1.2\",",
" start_stop: Some((4, 10)),", " start_stop: Some((4, 10)),",
" tags: Some([", " tags: Some((",
" scope: Stream,",
" tags: [",
" (\"title\", [", " (\"title\", [",
" \"chapter 1.2\",", " \"chapter 1.2\",",
" ]),", " ]),",
" ]),", " ],",
" )),",
" loop: Some((None, 0)),", " loop: Some((None, 0)),",
" sub_entries: [", " sub_entries: [",
" ],", " ],",
@ -255,11 +264,14 @@ mod tests {
" entry_type: Chapter,", " entry_type: Chapter,",
" uid: \"chapter2\",", " uid: \"chapter2\",",
" start_stop: Some((10, 15)),", " start_stop: Some((10, 15)),",
" tags: Some([", " tags: Some((",
" scope: Stream,",
" tags: [",
" (\"title\", [", " (\"title\", [",
" \"chapter 2\",", " \"chapter 2\",",
" ]),", " ]),",
" ]),", " ],",
" )),",
" loop: Some((None, 0)),", " loop: Some((None, 0)),",
" sub_entries: [", " sub_entries: [",
" ],", " ],",
@ -283,9 +295,12 @@ mod tests {
let toc_ron = r#" let toc_ron = r#"
( (
scope: Global, scope: Global,
tags: Some([ tags: Some((
scope: Stream,
tags: [
("title", ["toc"]), ("title", ["toc"]),
]), ],
)),
entries: [ entries: [
( (
entry_type: Edition, entry_type: Edition,
@ -305,9 +320,12 @@ mod tests {
entry_type: Chapter, entry_type: Chapter,
uid: "chapter1.1", uid: "chapter1.1",
start_stop: Some((0, 4)), start_stop: Some((0, 4)),
tags: Some([ tags: Some((
scope: Stream,
tags: [
("title", ["chapter 1.1"]), ("title", ["chapter 1.1"]),
]), ],
)),
loop: Some((None, 0)), loop: Some((None, 0)),
sub_entries: [ sub_entries: [
], ],
@ -316,9 +334,12 @@ mod tests {
entry_type: Chapter, entry_type: Chapter,
uid: "chapter1.2", uid: "chapter1.2",
start_stop: Some((4, 10)), start_stop: Some((4, 10)),
tags: Some([ tags: Some((
scope: Stream,
tags: [
("title", ["chapter 1.2"]), ("title", ["chapter 1.2"]),
]), ],
)),
loop: Some((None, 0)), loop: Some((None, 0)),
sub_entries: [ sub_entries: [
], ],
@ -329,9 +350,12 @@ mod tests {
entry_type: Chapter, entry_type: Chapter,
uid: "chapter2", uid: "chapter2",
start_stop: Some((10, 15)), start_stop: Some((10, 15)),
tags: Some([ tags: Some((
scope: Stream,
tags: [
("title", ["chapter 2"]), ("title", ["chapter 2"]),
]), ],
)),
loop: Some((None, 0)), loop: Some((None, 0)),
sub_entries: [ sub_entries: [
], ],