Configure linter and fix its warnings

This commit is contained in:
silverpill 2021-11-13 17:37:31 +00:00
parent e9c5bda55c
commit c473070fd4
30 changed files with 89 additions and 76 deletions

9
.cargo/config.toml Normal file
View file

@ -0,0 +1,9 @@
# https://github.com/rust-lang/cargo/issues/5034#issuecomment-927105016
[target.'cfg(feature = "cargo-clippy")']
rustflags = [
"-Aclippy::let_and_return",
"-Aclippy::map_entry",
"-Aclippy::or_fun_call",
"-Aclippy::redundant_field_names",
"-Aclippy::unused_unit",
]

View file

@ -54,6 +54,12 @@ cargo run
cargo run --bin mitractl cargo run --bin mitractl
``` ```
### Run linter
```
cargo clippy
```
### Run tests ### Run tests
``` ```

View file

@ -98,20 +98,19 @@ fn create_activity(
) -> Activity { ) -> Activity {
let actor_id = get_actor_url( let actor_id = get_actor_url(
instance_url, instance_url,
&actor_name, actor_name,
); );
let activity_id = get_object_url( let activity_id = get_object_url(
instance_url, instance_url,
&activity_uuid.unwrap_or(Uuid::new_v4()), &activity_uuid.unwrap_or(Uuid::new_v4()),
); );
let activity = Activity { Activity {
context: json!(AP_CONTEXT), context: json!(AP_CONTEXT),
id: activity_id, id: activity_id,
activity_type: activity_type.to_string(), activity_type: activity_type.to_string(),
actor: actor_id, actor: actor_id,
object: serde_json::to_value(object).unwrap(), object: serde_json::to_value(object).unwrap(),
}; }
activity
} }
pub fn create_note( pub fn create_note(

View file

@ -1,4 +1,4 @@
use std::path::PathBuf; use std::path::Path;
use serde_json::Value; use serde_json::Value;
@ -29,7 +29,7 @@ pub enum FetchError {
pub async fn fetch_avatar_and_banner( pub async fn fetch_avatar_and_banner(
actor: &Actor, actor: &Actor,
media_dir: &PathBuf, media_dir: &Path,
) -> Result<(Option<String>, Option<String>), FetchError> { ) -> Result<(Option<String>, Option<String>), FetchError> {
let avatar = match &actor.icon { let avatar = match &actor.icon {
Some(icon) => { Some(icon) => {
@ -57,7 +57,7 @@ pub async fn fetch_avatar_and_banner(
pub async fn fetch_profile( pub async fn fetch_profile(
username: &str, username: &str,
instance_host: &str, instance_host: &str,
media_dir: &PathBuf, media_dir: &Path,
) -> Result<ProfileCreateData, FetchError> { ) -> Result<ProfileCreateData, FetchError> {
let actor_address = format!("{}@{}", &username, &instance_host); let actor_address = format!("{}@{}", &username, &instance_host);
let webfinger_account_uri = format!("acct:{}", actor_address); let webfinger_account_uri = format!("acct:{}", actor_address);
@ -80,7 +80,7 @@ pub async fn fetch_profile(
pub async fn fetch_profile_by_actor_id( pub async fn fetch_profile_by_actor_id(
actor_url: &str, actor_url: &str,
media_dir: &PathBuf, media_dir: &Path,
) -> Result<ProfileCreateData, FetchError> { ) -> Result<ProfileCreateData, FetchError> {
let actor_host = url::Url::parse(actor_url)? let actor_host = url::Url::parse(actor_url)?
.host_str() .host_str()
@ -115,7 +115,7 @@ pub async fn fetch_profile_by_actor_id(
pub async fn fetch_attachment( pub async fn fetch_attachment(
url: &str, url: &str,
output_dir: &PathBuf, output_dir: &Path,
) -> Result<String, FetchError> { ) -> Result<String, FetchError> {
let response = reqwest::get(url).await?; let response = reqwest::get(url).await?;
let file_data = response.bytes().await?; let file_data = response.bytes().await?;

View file

@ -1,4 +1,4 @@
use std::path::PathBuf; use std::path::Path;
use regex::Regex; use regex::Regex;
use serde_json::Value; use serde_json::Value;
@ -52,7 +52,7 @@ fn parse_actor_id(
); );
let url_regexp = Regex::new(&url_regexp_str) let url_regexp = Regex::new(&url_regexp_str)
.map_err(|_| ValidationError("error"))?; .map_err(|_| ValidationError("error"))?;
let url_caps = url_regexp.captures(&actor_id) let url_caps = url_regexp.captures(actor_id)
.ok_or(ValidationError("invalid actor ID"))?; .ok_or(ValidationError("invalid actor ID"))?;
let username = url_caps.name("username") let username = url_caps.name("username")
.ok_or(ValidationError("invalid actor ID"))? .ok_or(ValidationError("invalid actor ID"))?
@ -71,7 +71,7 @@ fn parse_object_id(
); );
let url_regexp = Regex::new(&url_regexp_str) let url_regexp = Regex::new(&url_regexp_str)
.map_err(|_| ValidationError("error"))?; .map_err(|_| ValidationError("error"))?;
let url_caps = url_regexp.captures(&object_id) let url_caps = url_regexp.captures(object_id)
.ok_or(ValidationError("invalid object ID"))?; .ok_or(ValidationError("invalid object ID"))?;
let object_uuid: Uuid = url_caps.name("uuid") let object_uuid: Uuid = url_caps.name("uuid")
.ok_or(ValidationError("invalid object ID"))? .ok_or(ValidationError("invalid object ID"))?
@ -83,7 +83,7 @@ fn parse_object_id(
async fn get_or_fetch_profile_by_actor_id( async fn get_or_fetch_profile_by_actor_id(
db_client: &impl GenericClient, db_client: &impl GenericClient,
actor_id: &str, actor_id: &str,
media_dir: &PathBuf, media_dir: &Path,
) -> Result<DbActorProfile, HttpError> { ) -> Result<DbActorProfile, HttpError> {
let profile = match get_profile_by_actor_id(db_client, actor_id).await { let profile = match get_profile_by_actor_id(db_client, actor_id).await {
Ok(profile) => profile, Ok(profile) => profile,
@ -116,6 +116,7 @@ pub async fn process_note(
// Fetch ancestors by going through inReplyTo references // Fetch ancestors by going through inReplyTo references
// TODO: fetch replies too // TODO: fetch replies too
#[allow(clippy::while_let_loop)]
loop { loop {
let object_id = match maybe_parent_object_id { let object_id = match maybe_parent_object_id {
Some(parent_object_id) => { Some(parent_object_id) => {
@ -237,7 +238,7 @@ pub async fn receive_activity(
.and_then(|val| val.as_str()) .and_then(|val| val.as_str())
.unwrap_or("Unknown") .unwrap_or("Unknown")
.to_owned(); .to_owned();
let db_client = &mut **get_database_client(&db_pool).await?; let db_client = &mut **get_database_client(db_pool).await?;
match (activity_type.as_str(), object_type.as_str()) { match (activity_type.as_str(), object_type.as_str()) {
(ACCEPT, FOLLOW) => { (ACCEPT, FOLLOW) => {
let object: Object = serde_json::from_value(activity.object) let object: Object = serde_json::from_value(activity.object)
@ -313,7 +314,7 @@ pub async fn receive_activity(
// Send activity // Send activity
let recipients = vec![source_actor]; let recipients = vec![source_actor];
deliver_activity(&config, &target_user, new_activity, recipients); deliver_activity(config, &target_user, new_activity, recipients);
}, },
(UNDO, FOLLOW) => { (UNDO, FOLLOW) => {
let object: Object = serde_json::from_value(activity.object) let object: Object = serde_json::from_value(activity.object)

View file

@ -1,5 +1,4 @@
use clap::Clap; use clap::Clap;
use tokio;
use uuid::Uuid; use uuid::Uuid;
use mitra::config; use mitra::config;
@ -82,7 +81,7 @@ async fn main() {
}, },
SubCommand::ListInviteCodes(_) => { SubCommand::ListInviteCodes(_) => {
let invite_codes = get_invite_codes(db_client).await.unwrap(); let invite_codes = get_invite_codes(db_client).await.unwrap();
if invite_codes.len() == 0 { if invite_codes.is_empty() {
println!("no invite codes found"); println!("no invite codes found");
return; return;
} }

View file

@ -3,15 +3,14 @@ pub mod migrate;
pub type Pool = deadpool_postgres::Pool; pub type Pool = deadpool_postgres::Pool;
pub fn create_pool(database_url: &str) -> Pool { pub fn create_pool(database_url: &str) -> Pool {
let pool = deadpool_postgres::Pool::new( deadpool_postgres::Pool::new(
deadpool_postgres::Manager::new( deadpool_postgres::Manager::new(
database_url.parse().expect("invalid database URL"), database_url.parse().expect("invalid database URL"),
tokio_postgres::NoTls, tokio_postgres::NoTls,
), ),
// https://wiki.postgresql.org/wiki/Number_Of_Database_Connections // https://wiki.postgresql.org/wiki/Number_Of_Database_Connections
num_cpus::get() * 2, num_cpus::get() * 2,
); )
pool
} }
use tokio_postgres::error::{Error as PgError, SqlState}; use tokio_postgres::error::{Error as PgError, SqlState};

View file

@ -1,5 +1,5 @@
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::Path;
pub const COLLECTIBLE: &str = "Collectible"; pub const COLLECTIBLE: &str = "Collectible";
pub const MANAGER: &str = "Manager"; pub const MANAGER: &str = "Manager";
@ -17,7 +17,7 @@ pub enum ArtifactError {
} }
pub fn load_abi( pub fn load_abi(
contract_dir: &PathBuf, contract_dir: &Path,
contract_name: &str, contract_name: &str,
) -> Result<Vec<u8>, ArtifactError> { ) -> Result<Vec<u8>, ArtifactError> {
let contract_artifact_path = contract_dir.join(format!("{}.json", contract_name)); let contract_artifact_path = contract_dir.join(format!("{}.json", contract_name));

View file

@ -74,7 +74,7 @@ pub async fn process_events(
db_pool: &Pool, db_pool: &Pool,
token_waitlist_map: &mut HashMap<Uuid, DateTime<Utc>>, token_waitlist_map: &mut HashMap<Uuid, DateTime<Utc>>,
) -> Result<(), EthereumError> { ) -> Result<(), EthereumError> {
let db_client = &**get_database_client(&db_pool).await?; let db_client = &**get_database_client(db_pool).await?;
// Create/update token waitlist map // Create/update token waitlist map
let token_waitlist = get_token_waitlist(db_client).await?; let token_waitlist = get_token_waitlist(db_client).await?;
@ -196,7 +196,7 @@ pub fn create_mint_signature(
let contract_address = parse_address(&contract_config.address)?; let contract_address = parse_address(&contract_config.address)?;
let user_address = parse_address(user_address)?; let user_address = parse_address(user_address)?;
let chain_id: U256 = contract_config.chain_id.into(); let chain_id: U256 = contract_config.chain_id.into();
let chain_id_token = Token::Uint(chain_id.into()); let chain_id_token = Token::Uint(chain_id);
let chain_id_bin = encode(&[chain_id_token]); let chain_id_bin = encode(&[chain_id_token]);
let message = [ let message = [
&chain_id_bin, &chain_id_bin,

View file

@ -42,7 +42,7 @@ pub fn sign_message(
signing_key: &str, signing_key: &str,
message: &[u8], message: &[u8],
) -> Result<SignatureData, SignatureError> { ) -> Result<SignatureData, SignatureError> {
let key = SecretKey::from_str(&signing_key)?; let key = SecretKey::from_str(signing_key)?;
let message_hash = keccak256(message); let message_hash = keccak256(message);
let eip_191_message = [ let eip_191_message = [
"\x19Ethereum Signed Message:\n32".as_bytes(), "\x19Ethereum Signed Message:\n32".as_bytes(),

View file

@ -58,7 +58,7 @@ fn parse_http_signature(
); );
let signature_header_regexp = Regex::new(signature_header_regexp_raw).unwrap(); let signature_header_regexp = Regex::new(signature_header_regexp_raw).unwrap();
let signature_header_caps = signature_header_regexp let signature_header_caps = signature_header_regexp
.captures(&signature_header) .captures(signature_header)
.ok_or(VerificationError::HeaderError("invalid signature header"))?; .ok_or(VerificationError::HeaderError("invalid signature header"))?;
let key_id = signature_header_caps.name("key_id") let key_id = signature_header_caps.name("key_id")
.ok_or(VerificationError::ParseError("keyId parameter is missing"))? .ok_or(VerificationError::ParseError("keyId parameter is missing"))?
@ -78,7 +78,7 @@ fn parse_http_signature(
request_method.as_str().to_lowercase(), request_method.as_str().to_lowercase(),
request_uri, request_uri,
); );
for header in headers_parameter.split(" ") { for header in headers_parameter.split(' ') {
if header == "(request-target)" { if header == "(request-target)" {
continue; continue;
} }

View file

@ -12,7 +12,7 @@ pub struct ParseError;
pub fn parse_ipfs_url(url: &str) -> Result<String, ParseError> { pub fn parse_ipfs_url(url: &str) -> Result<String, ParseError> {
let regexp = Regex::new(r"ipfs://(?P<cid>\w+)").unwrap(); let regexp = Regex::new(r"ipfs://(?P<cid>\w+)").unwrap();
let caps = regexp.captures(&url).ok_or(ParseError)?; let caps = regexp.captures(url).ok_or(ParseError)?;
let cid = caps.name("cid") let cid = caps.name("cid")
.ok_or(ParseError)? .ok_or(ParseError)?
.as_str().to_string(); .as_str().to_string();

View file

@ -1,4 +1,4 @@
use std::path::PathBuf; use std::path::Path;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -49,9 +49,9 @@ pub struct Account {
impl Account { impl Account {
pub fn from_profile(profile: DbActorProfile, instance_url: &str) -> Self { pub fn from_profile(profile: DbActorProfile, instance_url: &str) -> Self {
let avatar_url = profile.avatar_file_name.as_ref() let avatar_url = profile.avatar_file_name.as_ref()
.map(|name| get_file_url(instance_url, &name)); .map(|name| get_file_url(instance_url, name));
let header_url = profile.banner_file_name.as_ref() let header_url = profile.banner_file_name.as_ref()
.map(|name| get_file_url(instance_url, &name)); .map(|name| get_file_url(instance_url, name));
let fields = profile.extra_fields.unpack().into_iter() let fields = profile.extra_fields.unpack().into_iter()
.map(|field| AccountField { name: field.name, value: field.value }) .map(|field| AccountField { name: field.name, value: field.value })
.collect(); .collect();
@ -126,17 +126,17 @@ pub struct AccountUpdateData {
fn process_b64_image_field_value( fn process_b64_image_field_value(
form_value: Option<String>, form_value: Option<String>,
db_value: Option<String>, db_value: Option<String>,
output_dir: &PathBuf, output_dir: &Path,
) -> Result<Option<String>, FileError> { ) -> Result<Option<String>, FileError> {
let maybe_file_name = match form_value { let maybe_file_name = match form_value {
Some(b64_data) => { Some(b64_data) => {
if b64_data == "" { if b64_data.is_empty() {
// Remove file // Remove file
None None
} else { } else {
// Decode and save file // Decode and save file
let (file_name, _) = save_validated_b64_file( let (file_name, _) = save_validated_b64_file(
&b64_data, &output_dir, "image/", &b64_data, output_dir, "image/",
)?; )?;
Some(file_name) Some(file_name)
} }
@ -152,7 +152,7 @@ impl AccountUpdateData {
self, self,
current_avatar: &Option<String>, current_avatar: &Option<String>,
current_banner: &Option<String>, current_banner: &Option<String>,
media_dir: &PathBuf, media_dir: &Path,
) -> Result<ProfileUpdateData, FileError> { ) -> Result<ProfileUpdateData, FileError> {
let avatar = process_b64_image_field_value( let avatar = process_b64_image_field_value(
self.avatar, current_avatar.clone(), media_dir, self.avatar, current_avatar.clone(), media_dir,

View file

@ -49,21 +49,21 @@ pub async fn create_account(
if !config.registrations_open { if !config.registrations_open {
let invite_code = user_data.invite_code.as_ref() let invite_code = user_data.invite_code.as_ref()
.ok_or(ValidationError("invite code is required"))?; .ok_or(ValidationError("invite code is required"))?;
if !is_valid_invite_code(db_client, &invite_code).await? { if !is_valid_invite_code(db_client, invite_code).await? {
Err(ValidationError("invalid invite code"))?; return Err(ValidationError("invalid invite code").into());
} }
} }
if config.ethereum_contract.is_some() { if config.ethereum_contract.is_some() {
let is_allowed = is_allowed_user(&config, &user_data.wallet_address).await let is_allowed = is_allowed_user(&config, &user_data.wallet_address).await
.map_err(|_| HttpError::InternalError)?; .map_err(|_| HttpError::InternalError)?;
if !is_allowed { if !is_allowed {
Err(ValidationError("not allowed to sign up"))?; return Err(ValidationError("not allowed to sign up").into());
} }
} }
// Hash password and generate private key // Hash password and generate private key
let password_hash = hash_password(&user_data.password) let password_hash = hash_password(&user_data.password)
.map_err(|_| HttpError::InternalError)?; .map_err(|_| HttpError::InternalError)?;
let private_key = match web::block(move || generate_private_key()).await { let private_key = match web::block(generate_private_key).await {
Ok(private_key) => private_key, Ok(private_key) => private_key,
Err(_) => return Err(HttpError::InternalError), Err(_) => return Err(HttpError::InternalError),
}; };

View file

@ -27,7 +27,7 @@ impl From<&Config> for InstanceInfo {
short_description: config.instance_short_description.clone(), short_description: config.instance_short_description.clone(),
description: config.instance_description.clone(), description: config.instance_description.clone(),
version: config.version.clone(), version: config.version.clone(),
registrations: config.registrations_open.clone(), registrations: config.registrations_open,
login_message: config.login_message.clone(), login_message: config.login_message.clone(),
ethereum_explorer_url: config.ethereum_explorer_url.clone(), ethereum_explorer_url: config.ethereum_explorer_url.clone(),
nft_contract_name: config.ethereum_contract.as_ref() nft_contract_name: config.ethereum_contract.as_ref()

View file

@ -18,8 +18,8 @@ async fn token_view(
db_pool: web::Data<Pool>, db_pool: web::Data<Pool>,
request_data: web::Json<TokenRequest>, request_data: web::Json<TokenRequest>,
) -> Result<HttpResponse, HttpError> { ) -> Result<HttpResponse, HttpError> {
if request_data.grant_type != "password".to_string() { if &request_data.grant_type != "password" {
Err(ValidationError("unsupported grant type"))?; return Err(ValidationError("unsupported grant type").into());
} }
let db_client = &**get_database_client(&db_pool).await?; let db_client = &**get_database_client(&db_pool).await?;
let user = get_user_by_wallet_address( let user = get_user_by_wallet_address(
@ -32,7 +32,7 @@ async fn token_view(
).map_err(|_| HttpError::InternalError)?; ).map_err(|_| HttpError::InternalError)?;
if !password_correct { if !password_correct {
// Invalid signature/password // Invalid signature/password
Err(ValidationError("incorrect password"))?; return Err(ValidationError("incorrect password").into());
} }
let access_token = generate_access_token(); let access_token = generate_access_token();
let created_at = Utc::now(); let created_at = Utc::now();

View file

@ -22,7 +22,7 @@ fn parse_profile_query(query: &str) ->
.ok_or(ValidationError("invalid search query"))? .ok_or(ValidationError("invalid search query"))?
.as_str().to_string(); .as_str().to_string();
let maybe_instance = acct_caps.name("instance") let maybe_instance = acct_caps.name("instance")
.and_then(|val| Some(val.as_str().to_string())); .map(|val| val.as_str().to_string());
Ok((username, maybe_instance)) Ok((username, maybe_instance))
} }
@ -39,7 +39,7 @@ async fn search_profiles(
}, },
}; };
let mut profiles = search_profile(db_client, &username, instance.as_ref()).await?; let mut profiles = search_profile(db_client, &username, instance.as_ref()).await?;
if profiles.len() == 0 && instance.is_some() { if profiles.is_empty() && instance.is_some() {
let instance_host = instance.unwrap(); let instance_host = instance.unwrap();
let media_dir = config.media_dir(); let media_dir = config.media_dir();
match fetch_profile(&username, &instance_host, &media_dir).await { match fetch_profile(&username, &instance_host, &media_dir).await {

View file

@ -18,7 +18,7 @@ async fn search_view(
) -> Result<HttpResponse, HttpError> { ) -> Result<HttpResponse, HttpError> {
let db_client = &mut **get_database_client(&db_pool).await?; let db_client = &mut **get_database_client(&db_pool).await?;
get_current_user(db_client, auth.token()).await?; get_current_user(db_client, auth.token()).await?;
let results = search(&config, db_client, &query_params.q.trim()).await?; let results = search(&config, db_client, query_params.q.trim()).await?;
Ok(HttpResponse::Ok().json(results)) Ok(HttpResponse::Ok().json(results))
} }

View file

@ -182,7 +182,7 @@ async fn favourite(
let activity = create_activity_like( let activity = create_activity_like(
&config.instance_url(), &config.instance_url(),
&current_user.profile, &current_user.profile,
&object_id, object_id,
); );
deliver_activity(&config, &current_user, activity, vec![remote_actor]); deliver_activity(&config, &current_user, activity, vec![remote_actor]);
} }
@ -238,7 +238,7 @@ async fn make_permanent(
let image_path = config.media_dir().join(&attachment.file_name); let image_path = config.media_dir().join(&attachment.file_name);
let image_data = std::fs::read(image_path) let image_data = std::fs::read(image_path)
.map_err(|_| HttpError::InternalError)?; .map_err(|_| HttpError::InternalError)?;
let image_cid = ipfs_store::add(&ipfs_api_url, image_data).await let image_cid = ipfs_store::add(ipfs_api_url, image_data).await
.map_err(|_| HttpError::InternalError)?; .map_err(|_| HttpError::InternalError)?;
set_attachment_ipfs_cid(db_client, &attachment.id, &image_cid).await?; set_attachment_ipfs_cid(db_client, &attachment.id, &image_cid).await?;
image_cid image_cid
@ -259,7 +259,7 @@ async fn make_permanent(
let post_metadata_json = serde_json::to_string(&post_metadata) let post_metadata_json = serde_json::to_string(&post_metadata)
.map_err(|_| HttpError::InternalError)? .map_err(|_| HttpError::InternalError)?
.as_bytes().to_vec(); .as_bytes().to_vec();
let post_metadata_cid = ipfs_store::add(&ipfs_api_url, post_metadata_json).await let post_metadata_cid = ipfs_store::add(ipfs_api_url, post_metadata_json).await
.map_err(|_| HttpError::InternalError)?; .map_err(|_| HttpError::InternalError)?;
// Update post // Update post
@ -284,14 +284,14 @@ async fn get_signature(
let post = get_post_by_id(db_client, &status_id).await?; let post = get_post_by_id(db_client, &status_id).await?;
if post.author.id != current_user.id { if post.author.id != current_user.id {
// Users can only tokenize their own posts // Users can only tokenize their own posts
Err(HttpError::NotFoundError("post"))?; return Err(HttpError::NotFoundError("post"));
} }
let ipfs_cid = post.ipfs_cid let ipfs_cid = post.ipfs_cid
// Post metadata is not immutable // Post metadata is not immutable
.ok_or(HttpError::ValidationError("post is not immutable".into()))?; .ok_or(HttpError::ValidationError("post is not immutable".into()))?;
let token_uri = get_ipfs_url(&ipfs_cid); let token_uri = get_ipfs_url(&ipfs_cid);
let signature = create_mint_signature( let signature = create_mint_signature(
&contract_config, contract_config,
&current_user.wallet_address, &current_user.wallet_address,
&token_uri, &token_uri,
).map_err(|_| HttpError::InternalError)?; ).map_err(|_| HttpError::InternalError)?;

View file

@ -13,7 +13,7 @@ pub struct DeletionQueue {
impl DeletionQueue { impl DeletionQueue {
pub async fn process(self, config: &Config) -> () { pub async fn process(self, config: &Config) -> () {
remove_files(self.files, &config.media_dir()); remove_files(self.files, &config.media_dir());
if self.ipfs_objects.len() > 0 { if self.ipfs_objects.is_empty() {
match &config.ipfs_api_url { match &config.ipfs_api_url {
Some(ipfs_api_url) => { Some(ipfs_api_url) => {
ipfs_store::remove(ipfs_api_url, self.ipfs_objects).await ipfs_store::remove(ipfs_api_url, self.ipfs_objects).await

View file

@ -93,7 +93,7 @@ pub async fn get_notifications(
&[&recipient_id], &[&recipient_id],
).await?; ).await?;
let mut notifications: Vec<Notification> = rows.iter() let mut notifications: Vec<Notification> = rows.iter()
.map(|row| Notification::try_from(row)) .map(Notification::try_from)
.collect::<Result<_, _>>()?; .collect::<Result<_, _>>()?;
let posts = notifications.iter_mut() let posts = notifications.iter_mut()
.filter_map(|item| item.post.as_mut()) .filter_map(|item| item.post.as_mut())

View file

@ -53,7 +53,7 @@ pub fn replace_mentions(
) -> String { ) -> String {
let mention_re = Regex::new(MENTION_RE).unwrap(); let mention_re = Regex::new(MENTION_RE).unwrap();
let result = mention_re.replace_all(text, |caps: &Captures| { let result = mention_re.replace_all(text, |caps: &Captures| {
let acct = pattern_to_acct(&caps, instance_host); let acct = pattern_to_acct(caps, instance_host);
match mention_map.get(&acct) { match mention_map.get(&acct) {
Some(profile) => { Some(profile) => {
// Replace with a link // Replace with a link

View file

@ -63,7 +63,7 @@ pub async fn get_home_timeline(
&[&current_user_id], &[&current_user_id],
).await?; ).await?;
let posts: Vec<Post> = rows.iter() let posts: Vec<Post> = rows.iter()
.map(|row| Post::try_from(row)) .map(Post::try_from)
.collect::<Result<_, _>>()?; .collect::<Result<_, _>>()?;
Ok(posts) Ok(posts)
} }
@ -98,7 +98,7 @@ pub async fn get_posts_by_author(
&[&account_id], &[&account_id],
).await?; ).await?;
let posts: Vec<Post> = rows.iter() let posts: Vec<Post> = rows.iter()
.map(|row| Post::try_from(row)) .map(Post::try_from)
.collect::<Result<_, _>>()?; .collect::<Result<_, _>>()?;
Ok(posts) Ok(posts)
} }
@ -257,9 +257,9 @@ pub async fn get_thread(
&[&post_id], &[&post_id],
).await?; ).await?;
let posts: Vec<Post> = rows.iter() let posts: Vec<Post> = rows.iter()
.map(|row| Post::try_from(row)) .map(Post::try_from)
.collect::<Result<_, _>>()?; .collect::<Result<_, _>>()?;
if posts.len() == 0 { if posts.is_empty() {
return Err(DatabaseError::NotFound("post")); return Err(DatabaseError::NotFound("post"));
} }
Ok(posts) Ok(posts)

View file

@ -125,7 +125,7 @@ impl PostCreateData {
pub fn validate(&mut self) -> Result<(), ValidationError> { pub fn validate(&mut self) -> Result<(), ValidationError> {
let content_safe = clean_html(&self.content); let content_safe = clean_html(&self.content);
let content_trimmed = content_safe.trim(); let content_trimmed = content_safe.trim();
if content_trimmed == "" { if content_trimmed.is_empty() {
return Err(ValidationError("post can not be empty")); return Err(ValidationError("post can not be empty"));
} }
self.content = content_trimmed.to_string(); self.content = content_trimmed.to_string();

View file

@ -148,7 +148,7 @@ impl ProfileUpdateData {
field.value = clean_html(&field.value); field.value = clean_html(&field.value);
field field
}) })
.filter(|field| field.name.len() > 0) .filter(|field| !field.name.is_empty())
.collect(); .collect();
// Validate extra fields // Validate extra fields
if self.extra_fields.len() >= 10 { if self.extra_fields.len() >= 10 {

View file

@ -45,7 +45,7 @@ pub async fn get_relationships(
&[&source_id, &target_ids], &[&source_id, &target_ids],
).await?; ).await?;
let relationships = rows.iter() let relationships = rows.iter()
.map(|row| Relationship::try_from(row)) .map(Relationship::try_from)
.collect::<Result<_, _>>()?; .collect::<Result<_, _>>()?;
Ok(relationships) Ok(relationships)
} }

View file

@ -123,7 +123,7 @@ pub async fn create_user(
&[&invite_code], &[&invite_code],
).await?; ).await?;
if updated_count == 0 { if updated_count == 0 {
Err(DatabaseError::NotFound("invite code"))?; return Err(DatabaseError::NotFound("invite code"));
} }
} }
// Create profile // Create profile

View file

@ -34,7 +34,7 @@ pub fn serialize_private_key(
pub fn deserialize_private_key( pub fn deserialize_private_key(
private_key_pem: &str, private_key_pem: &str,
) -> Result<RsaPrivateKey, rsa::pkcs8::Error> { ) -> Result<RsaPrivateKey, rsa::pkcs8::Error> {
RsaPrivateKey::from_pkcs8_pem(&private_key_pem) RsaPrivateKey::from_pkcs8_pem(private_key_pem)
} }
pub fn get_public_key_pem( pub fn get_public_key_pem(
@ -47,7 +47,7 @@ pub fn get_public_key_pem(
pub fn deserialize_public_key( pub fn deserialize_public_key(
public_key_pem: &str, public_key_pem: &str,
) -> Result<RsaPublicKey, rsa::pkcs8::Error> { ) -> Result<RsaPublicKey, rsa::pkcs8::Error> {
RsaPublicKey::from_public_key_pem(&public_key_pem.trim()) RsaPublicKey::from_public_key_pem(public_key_pem.trim())
} }
pub fn sign_message( pub fn sign_message(

View file

@ -1,6 +1,6 @@
use std::fs::{remove_file, File}; use std::fs::{remove_file, File};
use std::io::prelude::*; use std::io::prelude::*;
use std::path::PathBuf; use std::path::Path;
use mime_guess::get_mime_extensions_str; use mime_guess::get_mime_extensions_str;
use mime_sniffer::MimeTypeSniffer; use mime_sniffer::MimeTypeSniffer;
@ -18,7 +18,7 @@ pub enum FileError {
InvalidMediaType, InvalidMediaType,
} }
pub fn save_file(data: Vec<u8>, output_dir: &PathBuf) -> Result<String, FileError> { pub fn save_file(data: Vec<u8>, output_dir: &Path) -> Result<String, FileError> {
let digest = Sha256::digest(&data); let digest = Sha256::digest(&data);
let mut file_name = hex::encode(digest); let mut file_name = hex::encode(digest);
let maybe_extension = data.sniff_mime_type() let maybe_extension = data.sniff_mime_type()
@ -34,13 +34,13 @@ pub fn save_file(data: Vec<u8>, output_dir: &PathBuf) -> Result<String, FileErro
Ok(file_name) Ok(file_name)
} }
fn sniff_media_type(data: &Vec<u8>) -> Option<String> { fn sniff_media_type(data: &[u8]) -> Option<String> {
data.sniff_mime_type().map(|val| val.to_string()) data.sniff_mime_type().map(|val| val.to_string())
} }
pub fn save_b64_file( pub fn save_b64_file(
b64data: &str, b64data: &str,
output_dir: &PathBuf, output_dir: &Path,
) -> Result<(String, Option<String>), FileError> { ) -> Result<(String, Option<String>), FileError> {
let data = base64::decode(b64data)?; let data = base64::decode(b64data)?;
let media_type = sniff_media_type(&data); let media_type = sniff_media_type(&data);
@ -50,7 +50,7 @@ pub fn save_b64_file(
pub fn save_validated_b64_file( pub fn save_validated_b64_file(
b64data: &str, b64data: &str,
output_dir: &PathBuf, output_dir: &Path,
media_type_prefix: &str, media_type_prefix: &str,
) -> Result<(String, String), FileError> { ) -> Result<(String, String), FileError> {
let data = base64::decode(b64data)?; let data = base64::decode(b64data)?;
@ -67,7 +67,7 @@ pub fn get_file_url(instance_url: &str, file_name: &str) -> String {
format!("{}/media/{}", instance_url, file_name) format!("{}/media/{}", instance_url, file_name)
} }
pub fn remove_files(files: Vec<String>, from_dir: &PathBuf) -> () { pub fn remove_files(files: Vec<String>, from_dir: &Path) -> () {
for file_name in files { for file_name in files {
let file_path = from_dir.join(&file_name); let file_path = from_dir.join(&file_name);
let file_path_str = file_path.to_string_lossy(); let file_path_str = file_path.to_string_lossy();

View file

@ -6,7 +6,7 @@ use crate::activitypub::views::get_actor_url;
use crate::activitypub::constants::ACTIVITY_CONTENT_TYPE; use crate::activitypub::constants::ACTIVITY_CONTENT_TYPE;
use crate::config::{Config, Instance}; use crate::config::{Config, Instance};
use crate::database::{Pool, get_database_client}; use crate::database::{Pool, get_database_client};
use crate::errors::HttpError; use crate::errors::{HttpError, ValidationError};
use crate::models::users::queries::is_registered_user; use crate::models::users::queries::is_registered_user;
use super::types::{ use super::types::{
JRD_CONTENT_TYPE, JRD_CONTENT_TYPE,
@ -24,22 +24,22 @@ async fn get_user_info(
// https://datatracker.ietf.org/doc/html/rfc7565#section-7 // https://datatracker.ietf.org/doc/html/rfc7565#section-7
let uri_regexp = Regex::new(r"acct:(?P<user>\w+)@(?P<instance>.+)").unwrap(); let uri_regexp = Regex::new(r"acct:(?P<user>\w+)@(?P<instance>.+)").unwrap();
let uri_caps = uri_regexp.captures(&query_params.resource) let uri_caps = uri_regexp.captures(&query_params.resource)
.ok_or(HttpError::ValidationError("invalid query target".into()))?; .ok_or(ValidationError("invalid query target"))?;
let username = uri_caps.name("user") let username = uri_caps.name("user")
.ok_or(HttpError::ValidationError("invalid query target".into()))? .ok_or(ValidationError("invalid query target"))?
.as_str(); .as_str();
let instance_host = uri_caps.name("instance") let instance_host = uri_caps.name("instance")
.ok_or(HttpError::ValidationError("invalid query target".into()))? .ok_or(ValidationError("invalid query target"))?
.as_str(); .as_str();
if instance_host != instance.host() { if instance_host != instance.host() {
// Wrong instance // Wrong instance
return Err(HttpError::NotFoundError("user")); return Err(HttpError::NotFoundError("user"));
} }
if !is_registered_user(db_client, &username).await? { if !is_registered_user(db_client, username).await? {
return Err(HttpError::NotFoundError("user")); return Err(HttpError::NotFoundError("user"));
} }
let actor_url = get_actor_url(&instance.url(), &username); let actor_url = get_actor_url(&instance.url(), username);
let link = Link { let link = Link {
rel: "self".to_string(), rel: "self".to_string(),
link_type: Some(ACTIVITY_CONTENT_TYPE.to_string()), link_type: Some(ACTIVITY_CONTENT_TYPE.to_string()),