mirror of
https://git.joinplu.me/Plume/Plume.git
synced 2024-11-26 05:21:00 +00:00
Refactor with the help of Clippy (#462)
We add clippy as our build — also rectifying the missing `plume-cli` build! In the next step we follow clippy's advise and fix some of the "simple" mistakes in our code, such as style or map usage. Finally, we refactor some hard bits that need extraction of new types, or refactoring of function call-types, especially those that thread thru macros, and, of course functions with ~15 parameters should probably be rethought.
This commit is contained in:
parent
570d7fe2d0
commit
732f514da7
34 changed files with 393 additions and 297 deletions
26
.travis.yml
26
.travis.yml
|
@ -27,17 +27,36 @@ jobs:
|
|||
include:
|
||||
- stage: build
|
||||
name: "Build front"
|
||||
script: (cargo web -h || cargo install cargo-web) && cd plume-front && cargo web check
|
||||
script: (cargo web -h || cargo install cargo-web) && cd plume-front && cargo clippy -- -D warnings && cargo web check
|
||||
before_script: rustup component add clippy
|
||||
|
||||
- stage: build
|
||||
name: "Build with postgresql"
|
||||
env:
|
||||
- MIGRATION_DIR=migrations/postgres FEATURES=postgres DATABASE_URL=postgres://postgres@localhost/plume
|
||||
script: cargo build --no-default-features --features="${FEATURES}" --release
|
||||
script: cargo clippy --no-default-features --features="${FEATURES}" --release -- -D warnings
|
||||
before_script: rustup component add clippy
|
||||
|
||||
- stage: build
|
||||
name: "Build CLI with postgresql"
|
||||
env:
|
||||
- MIGRATION_DIR=migrations/postgres FEATURES=postgres DATABASE_URL=postgres://postgres@localhost/plume
|
||||
script: cd plume-cli && cargo clippy --no-default-features --features="${FEATURES}" --release -- -D warnings
|
||||
before_script: rustup component add clippy
|
||||
- stage: build
|
||||
name: "Build with sqlite"
|
||||
env:
|
||||
- MIGRATION_DIR=migrations/sqlite FEATURES=sqlite DATABASE_URL=plume.sqlite3
|
||||
script: cargo build --no-default-features --features="${FEATURES}" --release
|
||||
script: cargo clippy --no-default-features --features="${FEATURES}" --release -- -D warnings
|
||||
before_script: rustup component add clippy
|
||||
|
||||
- stage: build
|
||||
name: "Build CLI with sqlite"
|
||||
env:
|
||||
- MIGRATION_DIR=migrations/sqlite FEATURES=sqlite DATABASE_URL=plume.sqlite3
|
||||
script: cd plume-cli && cargo clippy --no-default-features --features="${FEATURES}" --release -- -D warnings
|
||||
before_script: rustup component add clippy
|
||||
|
||||
- stage: test and coverage
|
||||
name: "Test with potgresql backend"
|
||||
env:
|
||||
|
@ -48,6 +67,7 @@ jobs:
|
|||
- |
|
||||
cargo test --features "${FEATURES}" --no-default-features --all --exclude plume-front &&
|
||||
./script/compute_coverage.sh
|
||||
|
||||
- stage: test and coverage
|
||||
name: "Test with Sqlite backend"
|
||||
env:
|
||||
|
|
|
@ -60,13 +60,7 @@ fn init<'a>(args: &ArgMatches<'a>, conn: &Connection) {
|
|||
let path = Path::new(path).join("search_index");
|
||||
|
||||
let can_do = match read_dir(path.clone()) { // try to read the directory specified
|
||||
Ok(mut contents) => {
|
||||
if contents.next().is_none() {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
},
|
||||
Ok(mut contents) => contents.next().is_none(),
|
||||
Err(e) => if e.kind() == ErrorKind::NotFound {
|
||||
true
|
||||
} else {
|
||||
|
@ -107,5 +101,3 @@ fn unlock<'a>(args: &ArgMatches<'a>) {
|
|||
|
||||
remove_file(path).unwrap();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ fn init_widget(
|
|||
}
|
||||
widget.append_child(&document().create_text_node(&content));
|
||||
if disable_return {
|
||||
widget.add_event_listener(no_return);
|
||||
widget.add_event_listener(no_return);
|
||||
}
|
||||
|
||||
parent.append_child(&widget);
|
||||
|
@ -128,7 +128,7 @@ pub fn init() -> Result<(), EditorError> {
|
|||
|
||||
popup.class_list().add("show").unwrap();
|
||||
bg.class_list().add("show").unwrap();
|
||||
}));
|
||||
}));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -233,7 +233,7 @@ fn make_editable(tag: &'static str) -> Element {
|
|||
elt
|
||||
}
|
||||
|
||||
fn placeholder<'a>(elt: HtmlElement, text: &'a str) -> HtmlElement {
|
||||
fn placeholder(elt: HtmlElement, text: &str) -> HtmlElement {
|
||||
elt.dataset().insert("placeholder", text).unwrap();
|
||||
elt.dataset().insert("edited", "false").unwrap();
|
||||
|
||||
|
@ -248,7 +248,7 @@ fn placeholder<'a>(elt: HtmlElement, text: &'a str) -> HtmlElement {
|
|||
|
||||
let ph = document().create_element("span").expect("Couldn't create placeholder");
|
||||
ph.class_list().add("placeholder").expect("Couldn't add class");
|
||||
ph.append_child(&document().create_text_node(&elt.dataset().get("placeholder").unwrap_or(String::new())));
|
||||
ph.append_child(&document().create_text_node(&elt.dataset().get("placeholder").unwrap_or_default()));
|
||||
elt.append_child(&ph);
|
||||
}
|
||||
}));
|
||||
|
|
|
@ -38,37 +38,34 @@ fn main() {
|
|||
/// It should normally be working fine even without this code
|
||||
/// But :focus-within is not yet supported by Webkit/Blink
|
||||
fn menu() {
|
||||
document().get_element_by_id("menu")
|
||||
.map(|button| {
|
||||
document().get_element_by_id("content")
|
||||
.map(|menu| {
|
||||
button.add_event_listener(|_: ClickEvent| {
|
||||
document().get_element_by_id("menu").map(|menu| menu.class_list().add("show"));
|
||||
});
|
||||
menu.add_event_listener(|_: ClickEvent| {
|
||||
document().get_element_by_id("menu").map(|menu| menu.class_list().remove("show"));
|
||||
});
|
||||
})
|
||||
});
|
||||
if let Some(button) = document().get_element_by_id("menu") {
|
||||
if let Some(menu) = document().get_element_by_id("content") {
|
||||
button.add_event_listener(|_: ClickEvent| {
|
||||
document().get_element_by_id("menu").map(|menu| menu.class_list().add("show"));
|
||||
});
|
||||
menu.add_event_listener(|_: ClickEvent| {
|
||||
document().get_element_by_id("menu").map(|menu| menu.class_list().remove("show"));
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear the URL of the search page before submitting request
|
||||
fn search() {
|
||||
document().get_element_by_id("form")
|
||||
.map(|form| {
|
||||
form.add_event_listener(|_: SubmitEvent| {
|
||||
document().query_selector_all("#form input").map(|inputs| {
|
||||
for input in inputs {
|
||||
js! {
|
||||
if (@{&input}.name === "") {
|
||||
@{&input}.name = @{&input}.id
|
||||
}
|
||||
if (@{&input}.name && !@{&input}.value) {
|
||||
@{&input}.name = "";
|
||||
}
|
||||
if let Some(form) = document().get_element_by_id("form") {
|
||||
form.add_event_listener(|_: SubmitEvent| {
|
||||
document().query_selector_all("#form input").map(|inputs| {
|
||||
for input in inputs {
|
||||
js! {
|
||||
if (@{&input}.name === "") {
|
||||
@{&input}.name = @{&input}.id
|
||||
}
|
||||
if (@{&input}.name && !@{&input}.value) {
|
||||
@{&input}.name = "";
|
||||
}
|
||||
}
|
||||
}).ok();
|
||||
});
|
||||
}
|
||||
}).ok();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -90,9 +90,9 @@ impl<'a, 'r> FromRequest<'a, 'r> for ApiToken {
|
|||
|
||||
let mut parsed_header = headers[0].split(' ');
|
||||
let auth_type = parsed_header.next()
|
||||
.map_or_else(|| Outcome::Failure((Status::BadRequest, TokenError::NoType)), |t| Outcome::Success(t))?;
|
||||
.map_or_else(|| Outcome::Failure((Status::BadRequest, TokenError::NoType)), Outcome::Success)?;
|
||||
let val = parsed_header.next()
|
||||
.map_or_else(|| Outcome::Failure((Status::BadRequest, TokenError::NoValue)), |t| Outcome::Success(t))?;
|
||||
.map_or_else(|| Outcome::Failure((Status::BadRequest, TokenError::NoValue)), Outcome::Success)?;
|
||||
|
||||
if auth_type == "Bearer" {
|
||||
let conn = request.guard::<DbConn>().map_failure(|_| (Status::InternalServerError, TokenError::DbError))?;
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#![feature(try_trait)]
|
||||
#![feature(never_type)]
|
||||
|
||||
extern crate activitypub;
|
||||
extern crate ammonia;
|
||||
|
|
|
@ -70,8 +70,8 @@ impl Media {
|
|||
pub fn page_for_user(conn: &Connection, user: &User, (min, max): (i32, i32)) -> Result<Vec<Media>> {
|
||||
medias::table
|
||||
.filter(medias::owner_id.eq(user.id))
|
||||
.offset(min as i64)
|
||||
.limit((max - min) as i64)
|
||||
.offset(i64::from(min))
|
||||
.limit(i64::from(max - min))
|
||||
.load::<Media>(conn)
|
||||
.map_err(Error::from)
|
||||
}
|
||||
|
|
|
@ -127,11 +127,11 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option<i32>)> for P
|
|||
published: Some(p.published),
|
||||
creation_date: Some(p.creation_date.format("%Y-%m-%d").to_string()),
|
||||
license: Some(p.license.clone()),
|
||||
tags: Some(Tag::for_post(conn, p.id).unwrap_or(vec![]).into_iter().map(|t| t.tag).collect()),
|
||||
tags: Some(Tag::for_post(conn, p.id).unwrap_or_else(|_| vec![]).into_iter().map(|t| t.tag).collect()),
|
||||
cover_id: p.cover_id,
|
||||
})
|
||||
.collect()
|
||||
).unwrap_or(vec![])
|
||||
).unwrap_or_else(|_| vec![])
|
||||
}
|
||||
|
||||
fn update(
|
||||
|
@ -146,7 +146,7 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option<i32>)> for P
|
|||
let user_id = user_id.expect("Post as Provider::delete: not authenticated");
|
||||
if let Ok(post) = Post::get(conn, id) {
|
||||
if post.is_author(conn, user_id).unwrap_or(false) {
|
||||
post.delete(&(conn, search)).ok().expect("Post as Provider::delete: delete error");
|
||||
post.delete(&(conn, search)).expect("Post as Provider::delete: delete error");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -168,7 +168,7 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option<i32>)> for P
|
|||
let domain = &Instance::get_local(&conn)
|
||||
.map_err(|_| ApiError::NotFound("posts::update: Error getting local instance".into()))?
|
||||
.public_domain;
|
||||
let (content, mentions, hashtags) = md_to_html(query.source.clone().unwrap_or(String::new()).clone().as_ref(), domain);
|
||||
let (content, mentions, hashtags) = md_to_html(query.source.clone().unwrap_or_default().clone().as_ref(), domain);
|
||||
|
||||
let author = User::get(conn, user_id.expect("<Post as Provider>::create: no user_id error"))
|
||||
.map_err(|_| ApiError::NotFound("Author not found".into()))?;
|
||||
|
@ -185,16 +185,16 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option<i32>)> for P
|
|||
|
||||
let post = Post::insert(conn, NewPost {
|
||||
blog_id: blog,
|
||||
slug: slug,
|
||||
title: title,
|
||||
slug,
|
||||
title,
|
||||
content: SafeString::new(content.as_ref()),
|
||||
published: query.published.unwrap_or(true),
|
||||
license: query.license.unwrap_or(Instance::get_local(conn)
|
||||
license: query.license.unwrap_or_else(|| Instance::get_local(conn)
|
||||
.map(|i| i.default_license)
|
||||
.unwrap_or(String::from("CC-BY-SA"))),
|
||||
.unwrap_or_else(|_| String::from("CC-BY-SA"))),
|
||||
creation_date: date,
|
||||
ap_url: String::new(),
|
||||
subtitle: query.subtitle.unwrap_or(String::new()),
|
||||
subtitle: query.subtitle.unwrap_or_default(),
|
||||
source: query.source.expect("Post API::create: no source error"),
|
||||
cover_id: query.cover_id,
|
||||
}, search).map_err(|_| ApiError::NotFound("Creation error".into()))?;
|
||||
|
@ -207,7 +207,7 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option<i32>)> for P
|
|||
if let Some(tags) = query.tags {
|
||||
for tag in tags {
|
||||
Tag::insert(conn, NewTag {
|
||||
tag: tag,
|
||||
tag,
|
||||
is_hashtag: false,
|
||||
post_id: post.id
|
||||
}).map_err(|_| ApiError::NotFound("Error saving tags".into()))?;
|
||||
|
@ -311,7 +311,7 @@ impl Post {
|
|||
.load(conn)?
|
||||
.iter()
|
||||
.next()
|
||||
.map(|x| *x)
|
||||
.cloned()
|
||||
.ok_or(Error::NotFound)
|
||||
}
|
||||
|
||||
|
@ -908,7 +908,7 @@ impl<'a> FromActivity<LicensedArticle, (&'a Connection, &'a Searcher)> for Post
|
|||
.content_string()?,
|
||||
),
|
||||
published: true,
|
||||
license: license,
|
||||
license,
|
||||
// FIXME: This is wrong: with this logic, we may use the display URL as the AP ID. We need two different fields
|
||||
ap_url: article.object_props.url_string().or_else(|_|
|
||||
article
|
||||
|
|
|
@ -24,7 +24,7 @@ lazy_static! {
|
|||
.url_relative(UrlRelative::Custom(Box::new(url_add_prefix)))
|
||||
.add_tag_attributes(
|
||||
"iframe",
|
||||
[ "width", "height", "src", "frameborder" ].iter().map(|&v| v),
|
||||
[ "width", "height", "src", "frameborder" ].iter().cloned(),
|
||||
)
|
||||
.add_tag_attributes(
|
||||
"video",
|
||||
|
|
|
@ -9,6 +9,7 @@ pub use self::query::PlumeQuery as Query;
|
|||
pub(crate) mod tests {
|
||||
use super::{Query, Searcher};
|
||||
use std::env::temp_dir;
|
||||
use std::str::FromStr;
|
||||
use diesel::Connection;
|
||||
|
||||
use plume_common::activity_pub::inbox::Deletable;
|
||||
|
@ -65,7 +66,7 @@ pub(crate) mod tests {
|
|||
("after:2017-11-05 after:2018-01-01", "after:2018-01-01"),
|
||||
];
|
||||
for (source, res) in vector {
|
||||
assert_eq!(&Query::from_str(source).to_string(), res);
|
||||
assert_eq!(&Query::from_str(source).unwrap().to_string(), res);
|
||||
assert_eq!(Query::new().parse_query(source).to_string(), res);
|
||||
}
|
||||
}
|
||||
|
@ -141,18 +142,18 @@ pub(crate) mod tests {
|
|||
}).unwrap();
|
||||
|
||||
searcher.commit();
|
||||
assert_eq!(searcher.search_document(conn, Query::from_str(&title), (0,1))[0].id, post.id);
|
||||
assert_eq!(searcher.search_document(conn, Query::from_str(&title).unwrap(), (0,1))[0].id, post.id);
|
||||
|
||||
let newtitle = random_hex()[..8].to_owned();
|
||||
post.title = newtitle.clone();
|
||||
post.update(conn, &searcher).unwrap();
|
||||
searcher.commit();
|
||||
assert_eq!(searcher.search_document(conn, Query::from_str(&newtitle), (0,1))[0].id, post.id);
|
||||
assert!(searcher.search_document(conn, Query::from_str(&title), (0,1)).is_empty());
|
||||
assert_eq!(searcher.search_document(conn, Query::from_str(&newtitle).unwrap(), (0,1))[0].id, post.id);
|
||||
assert!(searcher.search_document(conn, Query::from_str(&title).unwrap(), (0,1)).is_empty());
|
||||
|
||||
post.delete(&(conn, &searcher)).unwrap();
|
||||
searcher.commit();
|
||||
assert!(searcher.search_document(conn, Query::from_str(&newtitle), (0,1)).is_empty());
|
||||
assert!(searcher.search_document(conn, Query::from_str(&newtitle).unwrap(), (0,1)).is_empty());
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
|
|
@ -87,9 +87,9 @@ macro_rules! gen_to_string {
|
|||
$(
|
||||
for (occur, val) in &$self.$field {
|
||||
if val.contains(' ') {
|
||||
$result.push_str(&format!("{}{}:\"{}\" ", Self::occur_to_str(&occur), stringify!($field), val));
|
||||
$result.push_str(&format!("{}{}:\"{}\" ", Self::occur_to_str(*occur), stringify!($field), val));
|
||||
} else {
|
||||
$result.push_str(&format!("{}{}:{} ", Self::occur_to_str(&occur), stringify!($field), val));
|
||||
$result.push_str(&format!("{}{}:{} ", Self::occur_to_str(*occur), stringify!($field), val));
|
||||
}
|
||||
}
|
||||
)*
|
||||
|
@ -148,20 +148,6 @@ impl PlumeQuery {
|
|||
Default::default()
|
||||
}
|
||||
|
||||
/// Create a new Query from &str
|
||||
/// Same as doing
|
||||
/// ```rust
|
||||
/// # extern crate plume_models;
|
||||
/// # use plume_models::search::Query;
|
||||
/// let mut q = Query::new();
|
||||
/// q.parse_query("some query");
|
||||
/// ```
|
||||
pub fn from_str(query: &str) -> Self {
|
||||
let mut res: Self = Default::default();
|
||||
|
||||
res.from_str_req(&query.trim());
|
||||
res
|
||||
}
|
||||
|
||||
/// Parse a query string into this Query
|
||||
pub fn parse_query(&mut self, query: &str) -> &mut Self {
|
||||
|
@ -222,35 +208,31 @@ impl PlumeQuery {
|
|||
}
|
||||
|
||||
// split a string into a token and a rest
|
||||
pub fn get_first_token<'a>(mut query: &'a str) -> (&'a str, &'a str) {
|
||||
pub fn get_first_token(mut query: &str) -> (&str, &str) {
|
||||
query = query.trim();
|
||||
if query.is_empty() {
|
||||
("", "")
|
||||
} else {
|
||||
if query.get(0..1).map(|v| v=="\"").unwrap_or(false) {
|
||||
if let Some(index) = query[1..].find('"') {
|
||||
query.split_at(index+2)
|
||||
} else {
|
||||
(query, "")
|
||||
}
|
||||
} else if query.get(0..2).map(|v| v=="+\"" || v=="-\"").unwrap_or(false) {
|
||||
if let Some(index) = query[2..].find('"') {
|
||||
query.split_at(index+3)
|
||||
} else {
|
||||
(query, "")
|
||||
}
|
||||
} else {
|
||||
if let Some(index) = query.find(' ') {
|
||||
query.split_at(index)
|
||||
} else if query.get(0..1).map(|v| v=="\"").unwrap_or(false) {
|
||||
if let Some(index) = query[1..].find('"') {
|
||||
query.split_at(index+2)
|
||||
} else {
|
||||
(query, "")
|
||||
}
|
||||
}
|
||||
} else if query.get(0..2).map(|v| v=="+\"" || v=="-\"").unwrap_or(false) {
|
||||
if let Some(index) = query[2..].find('"') {
|
||||
query.split_at(index+3)
|
||||
} else {
|
||||
(query, "")
|
||||
}
|
||||
} else if let Some(index) = query.find(' ') {
|
||||
query.split_at(index)
|
||||
} else {
|
||||
(query, "")
|
||||
}
|
||||
}
|
||||
|
||||
// map each Occur state to a prefix
|
||||
fn occur_to_str(occur: &Occur) -> &'static str {
|
||||
fn occur_to_str(occur: Occur) -> &'static str {
|
||||
match occur {
|
||||
Occur::Should => "",
|
||||
Occur::Must => "+",
|
||||
|
@ -259,25 +241,28 @@ impl PlumeQuery {
|
|||
}
|
||||
|
||||
// recursive parser for query string
|
||||
// allow this clippy lint for now, until someone figures out how to
|
||||
// refactor this better.
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
fn from_str_req(&mut self, mut query: &str) -> &mut Self {
|
||||
query = query.trim_left();
|
||||
if query.is_empty() {
|
||||
self
|
||||
} else {
|
||||
let occur = if query.get(0..1).map(|v| v=="+").unwrap_or(false) {
|
||||
query = &query[1..];
|
||||
Occur::Must
|
||||
} else if query.get(0..1).map(|v| v=="-").unwrap_or(false) {
|
||||
query = &query[1..];
|
||||
Occur::MustNot
|
||||
} else {
|
||||
Occur::Should
|
||||
};
|
||||
gen_parser!(self, query, occur; normal: title, subtitle, content, tag,
|
||||
instance, author, blog, lang, license;
|
||||
date: after, before);
|
||||
self.from_str_req(query)
|
||||
return self
|
||||
}
|
||||
|
||||
let occur = if query.get(0..1).map(|v| v=="+").unwrap_or(false) {
|
||||
query = &query[1..];
|
||||
Occur::Must
|
||||
} else if query.get(0..1).map(|v| v=="-").unwrap_or(false) {
|
||||
query = &query[1..];
|
||||
Occur::MustNot
|
||||
} else {
|
||||
Occur::Should
|
||||
};
|
||||
gen_parser!(self, query, occur; normal: title, subtitle, content, tag,
|
||||
instance, author, blog, lang, license;
|
||||
date: after, before);
|
||||
self.from_str_req(query)
|
||||
}
|
||||
|
||||
// map a token and it's field to a query
|
||||
|
@ -290,7 +275,7 @@ impl PlumeQuery {
|
|||
let user_term = Term::from_field_text(field, &token[..pos]);
|
||||
let instance_term = Term::from_field_text(Searcher::schema().get_field("instance").unwrap(), &token[pos+1..]);
|
||||
Box::new(BooleanQuery::from(vec![
|
||||
(Occur::Must, Box::new(TermQuery::new(user_term, if field_name=="author" { IndexRecordOption::Basic }
|
||||
(Occur::Must, Box::new(TermQuery::new(user_term, if field_name=="author" { IndexRecordOption::Basic }
|
||||
else { IndexRecordOption::WithFreqsAndPositions }
|
||||
)) as Box<dyn Query + 'static>),
|
||||
(Occur::Must, Box::new(TermQuery::new(instance_term, IndexRecordOption::Basic))),
|
||||
|
@ -320,15 +305,34 @@ impl PlumeQuery {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for PlumeQuery {
|
||||
|
||||
type Err = !;
|
||||
|
||||
/// Create a new Query from &str
|
||||
/// Same as doing
|
||||
/// ```rust
|
||||
/// # extern crate plume_models;
|
||||
/// # use plume_models::search::Query;
|
||||
/// let mut q = Query::new();
|
||||
/// q.parse_query("some query");
|
||||
/// ```
|
||||
fn from_str(query: &str) -> Result<PlumeQuery, !> {
|
||||
let mut res: PlumeQuery = Default::default();
|
||||
|
||||
res.from_str_req(&query.trim());
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for PlumeQuery {
|
||||
fn to_string(&self) -> String {
|
||||
let mut result = String::new();
|
||||
for (occur, val) in &self.text {
|
||||
if val.contains(' ') {
|
||||
result.push_str(&format!("{}\"{}\" ", Self::occur_to_str(&occur), val));
|
||||
result.push_str(&format!("{}\"{}\" ", Self::occur_to_str(*occur), val));
|
||||
} else {
|
||||
result.push_str(&format!("{}{} ", Self::occur_to_str(&occur), val));
|
||||
result.push_str(&format!("{}{} ", Self::occur_to_str(*occur), val));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -340,4 +344,3 @@ impl ToString for PlumeQuery {
|
|||
result
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -183,7 +183,7 @@ impl Searcher {
|
|||
let res = searcher.search(&query.into_query(), &collector).unwrap();
|
||||
|
||||
res.get(min as usize..).unwrap_or(&[])
|
||||
.into_iter()
|
||||
.iter()
|
||||
.filter_map(|(_,doc_add)| {
|
||||
let doc = searcher.doc(*doc_add).ok()?;
|
||||
let id = doc.get_first(post_id)?;
|
||||
|
|
|
@ -171,6 +171,9 @@ impl User {
|
|||
.select(post_authors::post_id)
|
||||
.load(conn)?;
|
||||
for post_id in all_their_posts_ids {
|
||||
// disabling this lint, because otherwise we'd have to turn it on
|
||||
// the head, and make it even harder to follow!
|
||||
#[allow(clippy::op_ref)]
|
||||
let has_other_authors = post_authors::table
|
||||
.filter(post_authors::post_id.eq(post_id))
|
||||
.filter(post_authors::author_id.ne(self.id))
|
||||
|
@ -489,7 +492,7 @@ impl User {
|
|||
Ok(json["items"]
|
||||
.as_array()
|
||||
.unwrap_or(&vec![])
|
||||
.into_iter()
|
||||
.iter()
|
||||
.filter_map(|j| serde_json::from_value(j.clone()).ok())
|
||||
.collect::<Vec<T>>())
|
||||
}
|
||||
|
@ -512,7 +515,7 @@ impl User {
|
|||
Ok(json["items"]
|
||||
.as_array()
|
||||
.unwrap_or(&vec![])
|
||||
.into_iter()
|
||||
.iter()
|
||||
.filter_map(|j| serde_json::from_value(j.clone()).ok())
|
||||
.collect::<Vec<String>>())
|
||||
}
|
||||
|
@ -746,7 +749,7 @@ impl User {
|
|||
pub fn avatar_url(&self, conn: &Connection) -> String {
|
||||
self.avatar_id.and_then(|id|
|
||||
Media::get(conn, id).and_then(|m| m.url(conn)).ok()
|
||||
).unwrap_or("/static/default-avatar.png".to_string())
|
||||
).unwrap_or_else(|| "/static/default-avatar.png".to_string())
|
||||
}
|
||||
|
||||
pub fn webfinger(&self, conn: &Connection) -> Result<Webfinger> {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![warn(clippy::too_many_arguments)]
|
||||
use rocket::{response::{self, Responder}, request::{Form, Request}};
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json;
|
||||
|
|
11
src/inbox.rs
11
src/inbox.rs
|
@ -1,3 +1,4 @@
|
|||
#![warn(clippy::too_many_arguments)]
|
||||
use activitypub::{
|
||||
activity::{
|
||||
Announce,
|
||||
|
@ -14,7 +15,7 @@ use failure::Error;
|
|||
use rocket::{
|
||||
data::*,
|
||||
http::Status,
|
||||
Outcome::{self, *},
|
||||
Outcome::*,
|
||||
Request,
|
||||
};
|
||||
use rocket_contrib::json::*;
|
||||
|
@ -129,7 +130,7 @@ pub trait Inbox {
|
|||
_ => Err(InboxError::CantUndo)?,
|
||||
}
|
||||
} else {
|
||||
let link = act.undo_props.object.as_str().expect("Inbox::received: undo don't contain type and isn't Link");
|
||||
let link = act.undo_props.object.as_str().expect("Inbox::received: undo doesn't contain a type and isn't Link");
|
||||
if let Ok(like) = likes::Like::find_by_ap_url(conn, link) {
|
||||
likes::Like::delete_id(&like.ap_url, actor_id.as_ref(), conn).expect("Inbox::received: delete Like error");
|
||||
Ok(())
|
||||
|
@ -146,7 +147,7 @@ pub trait Inbox {
|
|||
}
|
||||
"Update" => {
|
||||
let act: Update = serde_json::from_value(act.clone())?;
|
||||
Post::handle_update(conn, &act.update_props.object_object()?, searcher).expect("Inbox::received: post update error");;
|
||||
Post::handle_update(conn, &act.update_props.object_object()?, searcher).expect("Inbox::received: post update error");
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(InboxError::InvalidType)?,
|
||||
|
@ -168,7 +169,7 @@ impl<'a, T: Deserialize<'a>> FromData<'a> for SignedJson<T> {
|
|||
type Owned = String;
|
||||
type Borrowed = str;
|
||||
|
||||
fn transform(r: &Request, d: Data) -> Transform<Outcome<Self::Owned, (Status, Self::Error), Data>> {
|
||||
fn transform(r: &Request, d: Data) -> Transform<rocket::data::Outcome<Self::Owned, Self::Error>> {
|
||||
let size_limit = r.limits().get("json").unwrap_or(JSON_LIMIT);
|
||||
let mut s = String::with_capacity(512);
|
||||
match d.open().take(size_limit).read_to_string(&mut s) {
|
||||
|
@ -177,7 +178,7 @@ impl<'a, T: Deserialize<'a>> FromData<'a> for SignedJson<T> {
|
|||
}
|
||||
}
|
||||
|
||||
fn from_data(_: &Request, o: Transformed<'a, Self>) -> Outcome<Self, (Status, Self::Error), Data> {
|
||||
fn from_data(_: &Request, o: Transformed<'a, Self>) -> rocket::data::Outcome<Self, Self::Error> {
|
||||
let string = o.borrowed()?;
|
||||
match serde_json::from_str(&string) {
|
||||
Ok(v) => Success(SignedJson(Digest::from_body(&string),Json(v))),
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![warn(clippy::too_many_arguments)]
|
||||
use lettre_email::Email;
|
||||
use std::env;
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(clippy::too_many_arguments)]
|
||||
#![feature(decl_macro, proc_macro_hygiene)]
|
||||
|
||||
extern crate activitypub;
|
||||
|
@ -83,6 +84,8 @@ fn init_pool() -> Option<DbPool> {
|
|||
fn main() {
|
||||
let dbpool = init_pool().expect("main: database pool initialization error");
|
||||
let workpool = ScheduledThreadPool::with_name("worker {}", num_cpus::get());
|
||||
// we want a fast exit here, so
|
||||
#[allow(clippy::match_wild_err_arm)]
|
||||
let searcher = match UnmanagedSearcher::open(&"search_index") {
|
||||
Err(Error::Search(e)) => match e {
|
||||
SearcherError::WriteLockAcquisitionError => panic!(
|
||||
|
|
|
@ -17,19 +17,20 @@ use plume_models::{
|
|||
db_conn::DbConn,
|
||||
instance::Instance,
|
||||
posts::Post,
|
||||
users::User
|
||||
};
|
||||
use routes::{Page, errors::ErrorPage};
|
||||
use routes::{Page, PlumeRocket, errors::ErrorPage};
|
||||
use template_utils::Ructe;
|
||||
use Searcher;
|
||||
|
||||
#[get("/~/<name>?<page>", rank = 2)]
|
||||
pub fn details(intl: I18n, name: String, conn: DbConn, user: Option<User>, page: Option<Page>) -> Result<Ructe, ErrorPage> {
|
||||
pub fn details(name: String, page: Option<Page>, rockets: PlumeRocket) -> Result<Ructe, ErrorPage> {
|
||||
let page = page.unwrap_or_default();
|
||||
let conn = rockets.conn;
|
||||
let blog = Blog::find_by_fqn(&*conn, &name)?;
|
||||
let posts = Post::blog_page(&*conn, &blog, page.limits())?;
|
||||
let articles_count = Post::count_for_blog(&*conn, &blog)?;
|
||||
let authors = &blog.list_authors(&*conn)?;
|
||||
let user = rockets.user;
|
||||
let intl = rockets.intl;
|
||||
|
||||
Ok(render!(blogs::details(
|
||||
&(&*conn, &intl.catalog, user.clone()),
|
||||
|
@ -50,7 +51,11 @@ pub fn activity_details(name: String, conn: DbConn, _ap: ApRequest) -> Option<Ac
|
|||
}
|
||||
|
||||
#[get("/blogs/new")]
|
||||
pub fn new(user: User, conn: DbConn, intl: I18n) -> Ructe {
|
||||
pub fn new(rockets: PlumeRocket) -> Ructe {
|
||||
let user = rockets.user.unwrap();
|
||||
let intl = rockets.intl;
|
||||
let conn = rockets.conn;
|
||||
|
||||
render!(blogs::new(
|
||||
&(&*conn, &intl.catalog, Some(user)),
|
||||
&NewBlogForm::default(),
|
||||
|
@ -82,8 +87,11 @@ fn valid_slug(title: &str) -> Result<(), ValidationError> {
|
|||
}
|
||||
|
||||
#[post("/blogs/new", data = "<form>")]
|
||||
pub fn create(conn: DbConn, form: LenientForm<NewBlogForm>, user: User, intl: I18n) -> Result<Redirect, Ructe> {
|
||||
pub fn create(form: LenientForm<NewBlogForm>, rockets: PlumeRocket) -> Result<Redirect, Ructe> {
|
||||
let slug = utils::make_actor_id(&form.title);
|
||||
let conn = rockets.conn;
|
||||
let intl = rockets.intl;
|
||||
let user = rockets.user.unwrap();
|
||||
|
||||
let mut errors = match form.validate() {
|
||||
Ok(_) => ValidationErrors::new(),
|
||||
|
@ -122,8 +130,13 @@ pub fn create(conn: DbConn, form: LenientForm<NewBlogForm>, user: User, intl: I1
|
|||
}
|
||||
|
||||
#[post("/~/<name>/delete")]
|
||||
pub fn delete(conn: DbConn, name: String, user: Option<User>, intl: I18n, searcher: Searcher) -> Result<Redirect, Ructe>{
|
||||
pub fn delete(name: String, rockets: PlumeRocket) -> Result<Redirect, Ructe>{
|
||||
let conn = rockets.conn;
|
||||
let blog = Blog::find_by_fqn(&*conn, &name).expect("blog::delete: blog not found");
|
||||
let user = rockets.user;
|
||||
let intl = rockets.intl;
|
||||
let searcher = rockets.searcher;
|
||||
|
||||
if user.clone().and_then(|u| u.is_author_in(&*conn, &blog).ok()).unwrap_or(false) {
|
||||
blog.delete(&conn, &searcher).expect("blog::expect: deletion error");
|
||||
Ok(Redirect::to(uri!(super::instance::index)))
|
||||
|
|
|
@ -35,7 +35,7 @@ pub fn upload(user: User, data: Data, ct: &ContentType, conn: DbConn) -> Result<
|
|||
SaveResult::Full(entries) => {
|
||||
let fields = entries.fields;
|
||||
|
||||
let filename = fields.get("file").and_then(|v| v.into_iter().next())
|
||||
let filename = fields.get("file").and_then(|v| v.iter().next())
|
||||
.ok_or_else(|| status::BadRequest(Some("No file uploaded")))?.headers
|
||||
.filename.clone();
|
||||
// Remove extension if it contains something else than just letters and numbers
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![warn(clippy::too_many_arguments)]
|
||||
use atom_syndication::{ContentBuilder, Entry, EntryBuilder, LinkBuilder, Person, PersonBuilder};
|
||||
use rocket::{
|
||||
http::{
|
||||
|
@ -8,9 +9,45 @@ use rocket::{
|
|||
request::{self, FromFormValue, FromRequest, Request},
|
||||
response::NamedFile,
|
||||
};
|
||||
use rocket_i18n::I18n;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use plume_models::{Connection, posts::Post};
|
||||
use plume_models::{
|
||||
Connection,
|
||||
users::User,
|
||||
posts::Post,
|
||||
db_conn::DbConn,
|
||||
};
|
||||
|
||||
use Worker;
|
||||
use Searcher;
|
||||
|
||||
pub struct PlumeRocket<'a> {
|
||||
conn: DbConn,
|
||||
intl: I18n,
|
||||
user: Option<User>,
|
||||
searcher: Searcher<'a>,
|
||||
worker: Worker<'a>,
|
||||
}
|
||||
|
||||
impl<'a, 'r> FromRequest<'a, 'r> for PlumeRocket<'a> {
|
||||
type Error = ();
|
||||
|
||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<PlumeRocket<'a>, ()> {
|
||||
let conn = request.guard::<DbConn>()?;
|
||||
let intl = request.guard::<I18n>()?;
|
||||
let user = request.guard::<User>().succeeded();
|
||||
let worker = request.guard::<Worker>()?;
|
||||
let searcher = request.guard::<Searcher>()?;
|
||||
rocket::Outcome::Success(PlumeRocket {
|
||||
conn,
|
||||
intl,
|
||||
user,
|
||||
worker,
|
||||
searcher,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const ITEMS_PER_PAGE: i32 = 12;
|
||||
|
||||
|
@ -45,7 +82,7 @@ impl Page {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn limits(&self) -> (i32, i32) {
|
||||
pub fn limits(self) -> (i32, i32) {
|
||||
((self.0 - 1) * ITEMS_PER_PAGE, self.0 * ITEMS_PER_PAGE)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,10 +24,8 @@ use plume_models::{
|
|||
tags::*,
|
||||
users::User
|
||||
};
|
||||
use routes::{errors::ErrorPage, comments::NewCommentForm, ContentLen};
|
||||
use routes::{PlumeRocket, errors::ErrorPage, comments::NewCommentForm, ContentLen};
|
||||
use template_utils::Ructe;
|
||||
use Worker;
|
||||
use Searcher;
|
||||
|
||||
#[get("/~/<blog>/<slug>?<responding_to>", rank = 4)]
|
||||
pub fn details(blog: String, slug: String, conn: DbConn, user: Option<User>, responding_to: Option<i32>, intl: I18n) -> Result<Ructe, ErrorPage> {
|
||||
|
@ -103,86 +101,96 @@ pub fn new_auth(blog: String, i18n: I18n) -> Flash<Redirect> {
|
|||
}
|
||||
|
||||
#[get("/~/<blog>/new", rank = 1)]
|
||||
pub fn new(blog: String, user: User, cl: ContentLen, conn: DbConn, intl: I18n) -> Result<Ructe, ErrorPage> {
|
||||
pub fn new(blog: String, cl: ContentLen, rockets: PlumeRocket) -> Result<Ructe, ErrorPage> {
|
||||
let conn = rockets.conn;
|
||||
let b = Blog::find_by_fqn(&*conn, &blog)?;
|
||||
let user = rockets.user.unwrap();
|
||||
let intl = rockets.intl;
|
||||
|
||||
if !user.is_author_in(&*conn, &b)? {
|
||||
// TODO actually return 403 error code
|
||||
Ok(render!(errors::not_authorized(
|
||||
return Ok(render!(errors::not_authorized(
|
||||
&(&*conn, &intl.catalog, Some(user)),
|
||||
i18n!(intl.catalog, "You are not author in this blog.")
|
||||
)))
|
||||
} else {
|
||||
let medias = Media::for_user(&*conn, user.id)?;
|
||||
Ok(render!(posts::new(
|
||||
&(&*conn, &intl.catalog, Some(user)),
|
||||
i18n!(intl.catalog, "New post"),
|
||||
b,
|
||||
false,
|
||||
&NewPostForm {
|
||||
license: Instance::get_local(&*conn)?.default_license,
|
||||
..NewPostForm::default()
|
||||
},
|
||||
true,
|
||||
None,
|
||||
ValidationErrors::default(),
|
||||
medias,
|
||||
cl.0
|
||||
)))
|
||||
}
|
||||
|
||||
let medias = Media::for_user(&*conn, user.id)?;
|
||||
Ok(render!(posts::new(
|
||||
&(&*conn, &intl.catalog, Some(user)),
|
||||
i18n!(intl.catalog, "New post"),
|
||||
b,
|
||||
false,
|
||||
&NewPostForm {
|
||||
license: Instance::get_local(&*conn)?.default_license,
|
||||
..NewPostForm::default()
|
||||
},
|
||||
true,
|
||||
None,
|
||||
ValidationErrors::default(),
|
||||
medias,
|
||||
cl.0
|
||||
)))
|
||||
}
|
||||
|
||||
#[get("/~/<blog>/<slug>/edit")]
|
||||
pub fn edit(blog: String, slug: String, user: User, cl: ContentLen, conn: DbConn, intl: I18n) -> Result<Ructe, ErrorPage> {
|
||||
pub fn edit(blog: String, slug: String, cl: ContentLen, rockets: PlumeRocket) -> Result<Ructe, ErrorPage> {
|
||||
let conn = rockets.conn;
|
||||
let intl = rockets.intl;
|
||||
let b = Blog::find_by_fqn(&*conn, &blog)?;
|
||||
let post = Post::find_by_slug(&*conn, &slug, b.id)?;
|
||||
let user = rockets.user.unwrap();
|
||||
|
||||
if !user.is_author_in(&*conn, &b)? {
|
||||
Ok(render!(errors::not_authorized(
|
||||
return Ok(render!(errors::not_authorized(
|
||||
&(&*conn, &intl.catalog, Some(user)),
|
||||
i18n!(intl.catalog, "You are not author in this blog.")
|
||||
)))
|
||||
} else {
|
||||
let source = if !post.source.is_empty() {
|
||||
post.source.clone()
|
||||
} else {
|
||||
post.content.get().clone() // fallback to HTML if the markdown was not stored
|
||||
};
|
||||
|
||||
let medias = Media::for_user(&*conn, user.id)?;
|
||||
let title = post.title.clone();
|
||||
Ok(render!(posts::new(
|
||||
&(&*conn, &intl.catalog, Some(user)),
|
||||
i18n!(intl.catalog, "Edit {0}"; &title),
|
||||
b,
|
||||
true,
|
||||
&NewPostForm {
|
||||
title: post.title.clone(),
|
||||
subtitle: post.subtitle.clone(),
|
||||
content: source,
|
||||
tags: Tag::for_post(&*conn, post.id)?
|
||||
.into_iter()
|
||||
.filter_map(|t| if !t.is_hashtag {Some(t.tag)} else {None})
|
||||
.collect::<Vec<String>>()
|
||||
.join(", "),
|
||||
license: post.license.clone(),
|
||||
draft: true,
|
||||
cover: post.cover_id,
|
||||
},
|
||||
!post.published,
|
||||
Some(post),
|
||||
ValidationErrors::default(),
|
||||
medias,
|
||||
cl.0
|
||||
)))
|
||||
}
|
||||
|
||||
|
||||
let source = if !post.source.is_empty() {
|
||||
post.source.clone()
|
||||
} else {
|
||||
post.content.get().clone() // fallback to HTML if the markdown was not stored
|
||||
};
|
||||
|
||||
let medias = Media::for_user(&*conn, user.id)?;
|
||||
let title = post.title.clone();
|
||||
Ok(render!(posts::new(
|
||||
&(&*conn, &intl.catalog, Some(user)),
|
||||
i18n!(intl.catalog, "Edit {0}"; &title),
|
||||
b,
|
||||
true,
|
||||
&NewPostForm {
|
||||
title: post.title.clone(),
|
||||
subtitle: post.subtitle.clone(),
|
||||
content: source,
|
||||
tags: Tag::for_post(&*conn, post.id)?
|
||||
.into_iter()
|
||||
.filter_map(|t| if !t.is_hashtag {Some(t.tag)} else {None})
|
||||
.collect::<Vec<String>>()
|
||||
.join(", "),
|
||||
license: post.license.clone(),
|
||||
draft: true,
|
||||
cover: post.cover_id,
|
||||
},
|
||||
!post.published,
|
||||
Some(post),
|
||||
ValidationErrors::default(),
|
||||
medias,
|
||||
cl.0
|
||||
)))
|
||||
}
|
||||
|
||||
#[post("/~/<blog>/<slug>/edit", data = "<form>")]
|
||||
pub fn update(blog: String, slug: String, user: User, cl: ContentLen, form: LenientForm<NewPostForm>, worker: Worker, conn: DbConn, intl: I18n, searcher: Searcher)
|
||||
pub fn update(blog: String, slug: String, cl: ContentLen, form: LenientForm<NewPostForm>, rockets: PlumeRocket)
|
||||
-> Result<Redirect, Ructe> {
|
||||
let conn = rockets.conn;
|
||||
let b = Blog::find_by_fqn(&*conn, &blog).expect("post::update: blog error");
|
||||
let mut post = Post::find_by_slug(&*conn, &slug, b.id).expect("post::update: find by slug error");
|
||||
let user = rockets.user.unwrap();
|
||||
let intl = rockets.intl;
|
||||
|
||||
let new_slug = if !post.published {
|
||||
form.title.to_string().to_kebab_case()
|
||||
|
@ -219,6 +227,8 @@ pub fn update(blog: String, slug: String, user: User, cl: ContentLen, form: Leni
|
|||
false
|
||||
};
|
||||
|
||||
let searcher = rockets.searcher;
|
||||
let worker = rockets.worker;
|
||||
post.slug = new_slug.clone();
|
||||
post.title = form.title.clone();
|
||||
post.subtitle = form.subtitle.clone();
|
||||
|
@ -263,7 +273,7 @@ pub fn update(blog: String, slug: String, user: User, cl: ContentLen, form: Leni
|
|||
b,
|
||||
true,
|
||||
&*form,
|
||||
form.draft.clone(),
|
||||
form.draft,
|
||||
Some(post),
|
||||
errors.clone(),
|
||||
medias.clone(),
|
||||
|
@ -296,9 +306,11 @@ pub fn valid_slug(title: &str) -> Result<(), ValidationError> {
|
|||
}
|
||||
|
||||
#[post("/~/<blog_name>/new", data = "<form>")]
|
||||
pub fn create(blog_name: String, form: LenientForm<NewPostForm>, user: User, cl: ContentLen, conn: DbConn, worker: Worker, intl: I18n, searcher: Searcher) -> Result<Redirect, Result<Ructe, ErrorPage>> {
|
||||
pub fn create(blog_name: String, form: LenientForm<NewPostForm>, cl: ContentLen, rockets: PlumeRocket) -> Result<Redirect, Result<Ructe, ErrorPage>> {
|
||||
let conn = rockets.conn;
|
||||
let blog = Blog::find_by_fqn(&*conn, &blog_name).expect("post::create: blog error");;
|
||||
let slug = form.title.to_string().to_kebab_case();
|
||||
let user = rockets.user.unwrap();
|
||||
|
||||
let mut errors = match form.validate() {
|
||||
Ok(_) => ValidationErrors::new(),
|
||||
|
@ -315,73 +327,76 @@ pub fn create(blog_name: String, form: LenientForm<NewPostForm>, user: User, cl:
|
|||
if errors.is_empty() {
|
||||
if !user.is_author_in(&*conn, &blog).expect("post::create: is author in error") {
|
||||
// actually it's not "Ok"…
|
||||
Ok(Redirect::to(uri!(super::blogs::details: name = blog_name, page = _)))
|
||||
} else {
|
||||
let (content, mentions, hashtags) = utils::md_to_html(
|
||||
form.content.to_string().as_ref(),
|
||||
&Instance::get_local(&conn).expect("post::create: local instance error").public_domain
|
||||
);
|
||||
|
||||
let post = Post::insert(&*conn, NewPost {
|
||||
blog_id: blog.id,
|
||||
slug: slug.to_string(),
|
||||
title: form.title.to_string(),
|
||||
content: SafeString::new(&content),
|
||||
published: !form.draft,
|
||||
license: form.license.clone(),
|
||||
ap_url: "".to_string(),
|
||||
creation_date: None,
|
||||
subtitle: form.subtitle.clone(),
|
||||
source: form.content.clone(),
|
||||
cover_id: form.cover,
|
||||
},
|
||||
&searcher,
|
||||
).expect("post::create: post save error");
|
||||
|
||||
PostAuthor::insert(&*conn, NewPostAuthor {
|
||||
post_id: post.id,
|
||||
author_id: user.id
|
||||
}).expect("post::create: author save error");
|
||||
|
||||
let tags = form.tags.split(',')
|
||||
.map(|t| t.trim().to_camel_case())
|
||||
.filter(|t| !t.is_empty())
|
||||
.collect::<HashSet<_>>();
|
||||
for tag in tags {
|
||||
Tag::insert(&*conn, NewTag {
|
||||
tag,
|
||||
is_hashtag: false,
|
||||
post_id: post.id
|
||||
}).expect("post::create: tags save error");
|
||||
}
|
||||
for hashtag in hashtags {
|
||||
Tag::insert(&*conn, NewTag {
|
||||
tag: hashtag.to_camel_case(),
|
||||
is_hashtag: true,
|
||||
post_id: post.id
|
||||
}).expect("post::create: hashtags save error");
|
||||
}
|
||||
|
||||
if post.published {
|
||||
for m in mentions {
|
||||
Mention::from_activity(
|
||||
&*conn,
|
||||
&Mention::build_activity(&*conn, &m).expect("post::create: mention build error"),
|
||||
post.id,
|
||||
true,
|
||||
true
|
||||
).expect("post::create: mention save error");
|
||||
}
|
||||
|
||||
let act = post.create_activity(&*conn).expect("posts::create: activity error");
|
||||
let dest = User::one_by_instance(&*conn).expect("posts::create: dest error");
|
||||
worker.execute(move || broadcast(&user, act, dest));
|
||||
}
|
||||
|
||||
Ok(Redirect::to(uri!(details: blog = blog_name, slug = slug, responding_to = _)))
|
||||
return Ok(Redirect::to(uri!(super::blogs::details: name = blog_name, page = _)))
|
||||
}
|
||||
|
||||
let (content, mentions, hashtags) = utils::md_to_html(
|
||||
form.content.to_string().as_ref(),
|
||||
&Instance::get_local(&conn).expect("post::create: local instance error").public_domain
|
||||
);
|
||||
|
||||
let searcher = rockets.searcher;
|
||||
let post = Post::insert(&*conn, NewPost {
|
||||
blog_id: blog.id,
|
||||
slug: slug.to_string(),
|
||||
title: form.title.to_string(),
|
||||
content: SafeString::new(&content),
|
||||
published: !form.draft,
|
||||
license: form.license.clone(),
|
||||
ap_url: "".to_string(),
|
||||
creation_date: None,
|
||||
subtitle: form.subtitle.clone(),
|
||||
source: form.content.clone(),
|
||||
cover_id: form.cover,
|
||||
},
|
||||
&searcher,
|
||||
).expect("post::create: post save error");
|
||||
|
||||
PostAuthor::insert(&*conn, NewPostAuthor {
|
||||
post_id: post.id,
|
||||
author_id: user.id
|
||||
}).expect("post::create: author save error");
|
||||
|
||||
let tags = form.tags.split(',')
|
||||
.map(|t| t.trim().to_camel_case())
|
||||
.filter(|t| !t.is_empty())
|
||||
.collect::<HashSet<_>>();
|
||||
for tag in tags {
|
||||
Tag::insert(&*conn, NewTag {
|
||||
tag,
|
||||
is_hashtag: false,
|
||||
post_id: post.id
|
||||
}).expect("post::create: tags save error");
|
||||
}
|
||||
for hashtag in hashtags {
|
||||
Tag::insert(&*conn, NewTag {
|
||||
tag: hashtag.to_camel_case(),
|
||||
is_hashtag: true,
|
||||
post_id: post.id
|
||||
}).expect("post::create: hashtags save error");
|
||||
}
|
||||
|
||||
if post.published {
|
||||
for m in mentions {
|
||||
Mention::from_activity(
|
||||
&*conn,
|
||||
&Mention::build_activity(&*conn, &m).expect("post::create: mention build error"),
|
||||
post.id,
|
||||
true,
|
||||
true
|
||||
).expect("post::create: mention save error");
|
||||
}
|
||||
|
||||
let act = post.create_activity(&*conn).expect("posts::create: activity error");
|
||||
let dest = User::one_by_instance(&*conn).expect("posts::create: dest error");
|
||||
let worker = rockets.worker;
|
||||
worker.execute(move || broadcast(&user, act, dest));
|
||||
}
|
||||
|
||||
Ok(Redirect::to(uri!(details: blog = blog_name, slug = slug, responding_to = _)))
|
||||
} else {
|
||||
let medias = Media::for_user(&*conn, user.id).expect("posts::create: medias error");
|
||||
let intl = rockets.intl;
|
||||
Err(Ok(render!(posts::new(
|
||||
&(&*conn, &intl.catalog, Some(user)),
|
||||
i18n!(intl.catalog, "New post"),
|
||||
|
@ -398,22 +413,28 @@ pub fn create(blog_name: String, form: LenientForm<NewPostForm>, user: User, cl:
|
|||
}
|
||||
|
||||
#[post("/~/<blog_name>/<slug>/delete")]
|
||||
pub fn delete(blog_name: String, slug: String, conn: DbConn, user: User, worker: Worker, searcher: Searcher) -> Result<Redirect, ErrorPage> {
|
||||
pub fn delete(blog_name: String, slug: String, rockets: PlumeRocket) -> Result<Redirect, ErrorPage> {
|
||||
let conn = rockets.conn;
|
||||
let user = rockets.user.unwrap();
|
||||
let post = Blog::find_by_fqn(&*conn, &blog_name)
|
||||
.and_then(|blog| Post::find_by_slug(&*conn, &slug, blog.id));
|
||||
|
||||
if let Ok(post) = post {
|
||||
if !post.get_authors(&*conn)?.into_iter().any(|a| a.id == user.id) {
|
||||
Ok(Redirect::to(uri!(details: blog = blog_name.clone(), slug = slug.clone(), responding_to = _)))
|
||||
} else {
|
||||
let dest = User::one_by_instance(&*conn)?;
|
||||
let delete_activity = post.delete(&(&conn, &searcher))?;
|
||||
let user_c = user.clone();
|
||||
worker.execute(move || broadcast(&user_c, delete_activity, dest));
|
||||
worker.execute_after(Duration::from_secs(10*60), move || {user.rotate_keypair(&conn).expect("Failed to rotate keypair");});
|
||||
|
||||
Ok(Redirect::to(uri!(super::blogs::details: name = blog_name, page = _)))
|
||||
return Ok(Redirect::to(uri!(details: blog = blog_name.clone(), slug = slug.clone(), responding_to = _)))
|
||||
}
|
||||
|
||||
let searcher = rockets.searcher;
|
||||
let worker = rockets.worker;
|
||||
|
||||
let dest = User::one_by_instance(&*conn)?;
|
||||
let delete_activity = post.delete(&(&conn, &searcher))?;
|
||||
let user_c = user.clone();
|
||||
|
||||
worker.execute(move || broadcast(&user_c, delete_activity, dest));
|
||||
worker.execute_after(Duration::from_secs(10*60), move || {user.rotate_keypair(&conn).expect("Failed to rotate keypair");});
|
||||
|
||||
Ok(Redirect::to(uri!(super::blogs::details: name = blog_name, page = _)))
|
||||
} else {
|
||||
Ok(Redirect::to(uri!(super::blogs::details: name = blog_name, page = _)))
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ use plume_models::{
|
|||
use routes::Page;
|
||||
use template_utils::Ructe;
|
||||
use Searcher;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Default, FromForm)]
|
||||
pub struct SearchQuery {
|
||||
|
@ -57,7 +58,7 @@ macro_rules! param_to_query {
|
|||
pub fn search(query: Option<Form<SearchQuery>>, conn: DbConn, searcher: Searcher, user: Option<User>, intl: I18n) -> Ructe {
|
||||
let query = query.map(|f| f.into_inner()).unwrap_or_default();
|
||||
let page = query.page.unwrap_or_default();
|
||||
let mut parsed_query = Query::from_str(&query.q.as_ref().map(|q| q.as_str()).unwrap_or_default());
|
||||
let mut parsed_query = Query::from_str(&query.q.as_ref().map(|q| q.as_str()).unwrap_or_default()).unwrap_or_default();
|
||||
|
||||
param_to_query!(query, parsed_query; normal: title, subtitle, content, tag,
|
||||
instance, author, blog, lang, license;
|
||||
|
|
|
@ -161,10 +161,10 @@ pub fn password_reset_request(
|
|||
i18n!(intl.catalog, "Password reset"),
|
||||
i18n!(intl.catalog, "Here is the link to reset your password: {0}"; link)
|
||||
) {
|
||||
match *mail.lock().unwrap() {
|
||||
Some(ref mut mail) => { mail.send(message.into()).map_err(|_| eprintln!("Couldn't send password reset mail")).ok(); }
|
||||
None => {}
|
||||
}
|
||||
if let Some(ref mut mail) = *mail.lock().unwrap() {
|
||||
mail
|
||||
.send(message.into())
|
||||
.map_err(|_| eprintln!("Couldn't send password reset mail")).ok(); }
|
||||
}
|
||||
}
|
||||
render!(session::password_reset_request_ok(
|
||||
|
@ -214,7 +214,7 @@ pub fn password_reset(
|
|||
form.validate()
|
||||
.and_then(|_| {
|
||||
let mut requests = requests.lock().unwrap();
|
||||
let req = requests.iter().find(|x| x.id == token.clone()).ok_or(to_validation(0))?.clone();
|
||||
let req = requests.iter().find(|x| x.id == token.clone()).ok_or_else(|| to_validation(0))?.clone();
|
||||
if req.creation_date.elapsed().as_secs() < 60 * 60 * 2 { // Reset link is only valid for 2 hours
|
||||
requests.retain(|r| *r != req);
|
||||
let user = User::find_by_email(&*conn, &req.mail).map_err(to_validation)?;
|
||||
|
|
|
@ -23,7 +23,7 @@ use plume_models::{
|
|||
blogs::Blog, db_conn::DbConn, follows, headers::Headers, instance::Instance, posts::{LicensedArticle, Post},
|
||||
reshares::Reshare, users::*,
|
||||
};
|
||||
use routes::{Page, errors::ErrorPage};
|
||||
use routes::{Page, PlumeRocket, errors::ErrorPage};
|
||||
use template_utils::Ructe;
|
||||
use Worker;
|
||||
use Searcher;
|
||||
|
@ -39,18 +39,17 @@ pub fn me(user: Option<User>) -> Result<Redirect, Flash<Redirect>> {
|
|||
#[get("/@/<name>", rank = 2)]
|
||||
pub fn details(
|
||||
name: String,
|
||||
conn: DbConn,
|
||||
account: Option<User>,
|
||||
worker: Worker,
|
||||
rockets: PlumeRocket,
|
||||
fetch_articles_conn: DbConn,
|
||||
fetch_followers_conn: DbConn,
|
||||
update_conn: DbConn,
|
||||
intl: I18n,
|
||||
searcher: Searcher,
|
||||
) -> Result<Ructe, ErrorPage> {
|
||||
let conn = rockets.conn;
|
||||
let user = User::find_by_fqn(&*conn, &name)?;
|
||||
let recents = Post::get_recents_for_author(&*conn, &user, 6)?;
|
||||
let reshares = Reshare::get_recents_for_author(&*conn, &user, 6)?;
|
||||
let searcher = rockets.searcher;
|
||||
let worker = rockets.worker;
|
||||
|
||||
if !user.get_instance(&*conn)?.local {
|
||||
// Fetch new articles
|
||||
|
@ -99,6 +98,8 @@ pub fn details(
|
|||
}
|
||||
}
|
||||
|
||||
let account = rockets.user;
|
||||
let intl = rockets.intl;
|
||||
Ok(render!(users::details(
|
||||
&(&*conn, &intl.catalog, account.clone()),
|
||||
user.clone(),
|
||||
|
|
|
@ -27,7 +27,7 @@ impl<'r> Responder<'r> for Ructe {
|
|||
let mut hasher = DefaultHasher::new();
|
||||
hasher.write(&self.0);
|
||||
let etag = format!("{:x}", hasher.finish());
|
||||
if r.headers().get("If-None-Match").any(|s| &s[1..s.len()-1] == etag) {
|
||||
if r.headers().get("If-None-Match").any(|s| s[1..s.len()-1] == etag) {
|
||||
Response::build()
|
||||
.status(Status::NotModified)
|
||||
.header(ETag(EntityTag::strong(etag)))
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
@use template_utils::*;
|
||||
@use routes::*;
|
||||
|
||||
@(ctx: BaseContext, blog: Blog, authors: &Vec<User>, total_articles: i64, page: i32, n_pages: i32, is_author: bool, posts: Vec<Post>)
|
||||
@(ctx: BaseContext, blog: Blog, authors: &[User], total_articles: i64, page: i32, n_pages: i32, is_author: bool, posts: Vec<Post>)
|
||||
|
||||
@:base(ctx, blog.title.clone(), {}, {
|
||||
<a href="@uri!(blogs::details: name = &blog.fqn, page = _)">@blog.title</a>
|
||||
|
@ -36,7 +36,7 @@
|
|||
@i18n!(ctx.1, "Latest articles")
|
||||
<small><a href="@uri!(blogs::atom_feed: name = &blog.fqn)" title="Atom feed">@icon!("rss")</a></small>
|
||||
</h2>
|
||||
@if posts.len() < 1 {
|
||||
@if posts.is_empty() {
|
||||
<p>@i18n!(ctx.1, "No posts to see here yet.")</p>
|
||||
}
|
||||
@if is_author {
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<div class="h-feed">
|
||||
<h1 "p-name">@i18n!(ctx.1, "All the articles of the Fediverse")</h1>
|
||||
|
||||
@if let Some(_) = ctx.2 {
|
||||
@if ctx.2.is_some() {
|
||||
@tabs(&[
|
||||
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
|
||||
(&uri!(instance::feed: _).to_string(), i18n!(ctx.1, "Your feed"), false),
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
<div class="h-feed">
|
||||
<h1 class="p-name">@i18n!(ctx.1, "Articles from {}"; instance.name)</h1>
|
||||
|
||||
@if let Some(_) = ctx.2 {
|
||||
@if ctx.2.is_some() {
|
||||
@tabs(&[
|
||||
(&uri!(instance::index).to_string(), i18n!(ctx.1, "Latest articles"), false),
|
||||
(&uri!(instance::feed: _).to_string(), i18n!(ctx.1, "Your feed"), false),
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
<section>
|
||||
<figure class="media">
|
||||
@Html(media.html(ctx.0).unwrap_or(SafeString::new("")))
|
||||
@Html(media.html(ctx.0).unwrap_or_else(|_| SafeString::new("")))
|
||||
<figcaption>@media.alt_text</figcaption>
|
||||
</figure>
|
||||
<div>
|
||||
|
@ -22,7 +22,7 @@
|
|||
@i18n!(ctx.1, "Markdown syntax")
|
||||
<small>@i18n!(ctx.1, "Copy it into your articles, to insert this media:")</small>
|
||||
</p>
|
||||
<code>@media.markdown(ctx.0).unwrap_or(SafeString::new(""))</code>
|
||||
<code>@media.markdown(ctx.0).unwrap_or_else(|_| SafeString::new(""))</code>
|
||||
</div>
|
||||
<div>
|
||||
@if media.category() == MediaCategory::Image {
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
@(ctx: BaseContext, comment_tree: &CommentTree, in_reply_to: Option<&str>, blog: &str, slug: &str)
|
||||
|
||||
@if let Some(ref comm) = Some(&comment_tree.comment) {
|
||||
@if let Some(author) = comm.get_author(ctx.0).ok() {
|
||||
@if let Ok(author) = comm.get_author(ctx.0) {
|
||||
<div class="comment u-comment h-cite" id="comment-@comm.id">
|
||||
<a class="author u-author h-card" href="@uri!(user::details: name = &author.fqn)">
|
||||
@avatar(ctx.0, &author, Size::Small, true, ctx.1)
|
||||
<span class="display-name p-name">@author.name()</span>
|
||||
<small>@&author.fqn</small>
|
||||
<small>@author.fqn</small>
|
||||
</a>
|
||||
@if let Some(ref ap_url) = comm.ap_url {
|
||||
<a class="u-url" href="@ap_url"></a>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
@(ctx: BaseContext, articles: Vec<Post>, link: &str, title: String)
|
||||
|
||||
@if articles.len() > 0 {
|
||||
@if !articles.is_empty() {
|
||||
<div class="h-feed">
|
||||
<h2><span class="p-name">@title</span> — <a href="@link">@i18n!(ctx.1, "View all")</a></h2>
|
||||
<div class="cards spaced">
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
<a href="@uri!(blogs::details: name = &blog.fqn, page = _)">@blog.title</a>
|
||||
}, {
|
||||
<div class="h-entry">
|
||||
<h1 class="article p-name">@&article.title</h1>
|
||||
<h2 class="article p-summary">@&article.subtitle</h2>
|
||||
<h1 class="article p-name">@article.title</h1>
|
||||
<h2 class="article p-summary">@article.subtitle</h2>
|
||||
<div class="article-info">
|
||||
<span class="author">
|
||||
@Html(i18n!(ctx.1, "Written by {0}"; format!("<a href=\"{}\">{}</a>",
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
@input!(ctx.1, subtitle (optional text), "Subtitle", form, errors.clone(), "")
|
||||
|
||||
@if let Some(ValidationErrorsKind::Field(errs)) = errors.clone().errors().get("content") {
|
||||
@format!(r#"<p class="error">{}</p>"#, errs[0].message.clone().unwrap_or(Cow::from("Unknown error")))
|
||||
@format!(r#"<p class="error">{}</p>"#, errs[0].message.clone().unwrap_or_else(|| Cow::from("Unknown error")))
|
||||
}
|
||||
|
||||
<label for="editor-content">@i18n!(ctx.1, "Content")<small>@i18n!(ctx.1, "Markdown syntax is supported")</small></label>
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
<h1 class="grow flex vertical">
|
||||
<span class="p-name">@user.name()</span>
|
||||
<small class="p-nickname">@&user.fqn</small>
|
||||
<small class="p-nickname">@user.fqn</small>
|
||||
</h1>
|
||||
|
||||
<p>
|
||||
|
|
Loading…
Reference in a new issue