mirror of
https://git.asonix.dog/asonix/activitystreams.git
synced 2024-11-25 13:21:00 +00:00
Add helpers to get datetimes
This commit is contained in:
parent
07f3d2add3
commit
176903ffd6
3 changed files with 30 additions and 6 deletions
|
@ -5,6 +5,7 @@ authors = ["asonix <asonix.dev@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
activitystreams-derive = { version = "0.1", path = "activitystreams-derive" }
|
activitystreams-derive = { version = "0.1", path = "activitystreams-derive" }
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
failure = "0.1"
|
failure = "0.1"
|
||||||
mime = "0.3"
|
mime = "0.3"
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate activitystreams_derive;
|
extern crate activitystreams_derive;
|
||||||
|
extern crate chrono;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate failure;
|
extern crate failure;
|
||||||
extern crate mime;
|
extern crate mime;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use chrono::{offset::Utc, DateTime};
|
||||||
use mime;
|
use mime;
|
||||||
use serde_json::{self, Value};
|
use serde_json::{self, Value};
|
||||||
|
|
||||||
|
@ -38,7 +39,6 @@ pub struct ObjectProperties {
|
||||||
#[activitystreams(concrete(String))]
|
#[activitystreams(concrete(String))]
|
||||||
name: Option<serde_json::Value>,
|
name: Option<serde_json::Value>,
|
||||||
|
|
||||||
// TODO: DateTime<Utc>
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
#[activitystreams(concrete(String), functional)]
|
#[activitystreams(concrete(String), functional)]
|
||||||
end_time: Option<serde_json::Value>,
|
end_time: Option<serde_json::Value>,
|
||||||
|
@ -67,7 +67,6 @@ pub struct ObjectProperties {
|
||||||
#[activitystreams(ab(Object, Link))]
|
#[activitystreams(ab(Object, Link))]
|
||||||
preview: Option<serde_json::Value>,
|
preview: Option<serde_json::Value>,
|
||||||
|
|
||||||
// TODO: DateTime<Utc>
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
#[activitystreams(concrete(String), functional)]
|
#[activitystreams(concrete(String), functional)]
|
||||||
published: Option<serde_json::Value>,
|
published: Option<serde_json::Value>,
|
||||||
|
@ -76,7 +75,6 @@ pub struct ObjectProperties {
|
||||||
#[activitystreams(concrete(Collection), functional)]
|
#[activitystreams(concrete(Collection), functional)]
|
||||||
replies: Option<serde_json::Value>,
|
replies: Option<serde_json::Value>,
|
||||||
|
|
||||||
// TODO: DateTime<Utc>
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
#[activitystreams(concrete(String), functional)]
|
#[activitystreams(concrete(String), functional)]
|
||||||
start_time: Option<serde_json::Value>,
|
start_time: Option<serde_json::Value>,
|
||||||
|
@ -89,7 +87,6 @@ pub struct ObjectProperties {
|
||||||
#[activitystreams(ab(Object, Link))]
|
#[activitystreams(ab(Object, Link))]
|
||||||
tag: Option<serde_json::Value>,
|
tag: Option<serde_json::Value>,
|
||||||
|
|
||||||
// TODO: DateTime<Utc>
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
#[activitystreams(concrete(String), functional)]
|
#[activitystreams(concrete(String), functional)]
|
||||||
updated: Option<serde_json::Value>,
|
updated: Option<serde_json::Value>,
|
||||||
|
@ -114,7 +111,6 @@ pub struct ObjectProperties {
|
||||||
#[activitystreams(ab(Object, Link))]
|
#[activitystreams(ab(Object, Link))]
|
||||||
bcc: Option<serde_json::Value>,
|
bcc: Option<serde_json::Value>,
|
||||||
|
|
||||||
// TODO: mime
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
#[activitystreams(concrete(String), functional)]
|
#[activitystreams(concrete(String), functional)]
|
||||||
media_type: Option<serde_json::Value>,
|
media_type: Option<serde_json::Value>,
|
||||||
|
@ -130,6 +126,26 @@ impl ObjectProperties {
|
||||||
self.media_type_string()
|
self.media_type_string()
|
||||||
.and_then(|s| s.parse().map_err(|_| Error::Deserialize))
|
.and_then(|s| s.parse().map_err(|_| Error::Deserialize))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn end_time(&self) -> Result<DateTime<Utc>> {
|
||||||
|
self.end_time_string()
|
||||||
|
.and_then(|s| s.parse().map_err(|_| Error::Deserialize))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn published(&self) -> Result<DateTime<Utc>> {
|
||||||
|
self.published_string()
|
||||||
|
.and_then(|s| s.parse().map_err(|_| Error::Deserialize))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn start_time(&self) -> Result<DateTime<Utc>> {
|
||||||
|
self.start_time_string()
|
||||||
|
.and_then(|s| s.parse().map_err(|_| Error::Deserialize))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn updated(&self) -> Result<DateTime<Utc>> {
|
||||||
|
self.updated_string()
|
||||||
|
.and_then(|s| s.parse().map_err(|_| Error::Deserialize))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Properties)]
|
#[derive(Clone, Debug, Serialize, Deserialize, Properties)]
|
||||||
|
@ -163,8 +179,14 @@ pub struct TombstoneProperties {
|
||||||
#[activitystreams(concrete(String))]
|
#[activitystreams(concrete(String))]
|
||||||
former_type: Option<serde_json::Value>,
|
former_type: Option<serde_json::Value>,
|
||||||
|
|
||||||
// TODO: DateTime<Utc>
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
#[activitystreams(concrete(String), functional)]
|
#[activitystreams(concrete(String), functional)]
|
||||||
deleted: Option<serde_json::Value>,
|
deleted: Option<serde_json::Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TombstoneProperties {
|
||||||
|
pub fn deleted(&self) -> Result<DateTime<Utc>> {
|
||||||
|
self.deleted_string()
|
||||||
|
.and_then(|s| s.parse().map_err(|_| Error::Deserialize))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue