Add read-only mode

This commit is contained in:
asonix 2023-07-17 14:24:49 -05:00
parent dd1d509bb1
commit c4c920191f
6 changed files with 87 additions and 11 deletions

View file

@ -1,6 +1,7 @@
[server]
address = "0.0.0.0:8080"
worker_id = "pict-rs-1"
read_only = false
[client]
pool_size = 100

View file

@ -71,12 +71,14 @@ impl Args {
media_video_codec,
media_video_audio_codec,
media_filters,
read_only,
store,
}) => {
let server = Server {
address,
api_key,
worker_id,
read_only,
};
let client = Client {
@ -315,6 +317,8 @@ struct Server {
worker_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
api_key: Option<String>,
#[serde(skip_serializing_if = "std::ops::Not::not")]
read_only: bool,
}
#[derive(Debug, Default, serde::Serialize)]
@ -455,10 +459,10 @@ impl Animation {
#[derive(Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")]
struct Video {
#[serde(skip_serializing_if = "Option::is_none")]
enable: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
allow_audio: Option<bool>,
#[serde(skip_serializing_if = "std::ops::Not::not")]
enable: bool,
#[serde(skip_serializing_if = "std::ops::Not::not")]
allow_audio: bool,
#[serde(skip_serializing_if = "Option::is_none")]
max_width: Option<u16>,
#[serde(skip_serializing_if = "Option::is_none")]
@ -477,8 +481,8 @@ struct Video {
impl Video {
fn set(self) -> Option<Self> {
let any_set = self.enable.is_some()
|| self.allow_audio.is_some()
let any_set = self.enable
|| self.allow_audio
|| self.max_width.is_some()
|| self.max_height.is_some()
|| self.max_area.is_some()
@ -627,9 +631,10 @@ struct Run {
/// Whether to enable video uploads
#[arg(long)]
media_video_enable: Option<bool>,
media_video_enable: bool,
/// Whether to enable audio in video uploads
media_video_allow_audio: Option<bool>,
#[arg(long)]
media_video_allow_audio: bool,
/// The maximum width, in pixels, for uploaded videos
#[arg(long)]
media_video_max_width: Option<u16>,
@ -652,6 +657,10 @@ struct Run {
#[arg(long)]
media_video_audio_codec: Option<AudioCodec>,
/// Don't permit ingesting media
#[arg(long)]
read_only: bool,
#[command(subcommand)]
store: Option<RunStore>,
}

View file

@ -22,6 +22,7 @@ pub(crate) struct Defaults {
struct ServerDefaults {
address: SocketAddr,
worker_id: String,
read_only: bool,
}
#[derive(Clone, Debug, serde::Serialize)]
@ -156,6 +157,7 @@ impl Default for ServerDefaults {
ServerDefaults {
address: "0.0.0.0:8080".parse().expect("Valid address string"),
worker_id: String::from("pict-rs-1"),
read_only: false,
}
}
}

View file

@ -97,6 +97,8 @@ pub(crate) struct Server {
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) api_key: Option<String>,
pub(crate) read_only: bool,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]

View file

@ -82,6 +82,9 @@ pub(crate) enum UploadError {
#[error("Error in exiftool")]
Exiftool(#[from] crate::exiftool::ExifError),
#[error("pict-rs is in read-only mode")]
ReadOnly,
#[error("Requested file extension cannot be served by source file")]
InvalidProcessExtension,
@ -178,7 +181,8 @@ impl ResponseError for Error {
| UploadError::Repo(crate::repo::RepoError::AlreadyClaimed)
| UploadError::Validation(_)
| UploadError::UnsupportedProcessExtension
| UploadError::InvalidProcessExtension,
| UploadError::InvalidProcessExtension
| UploadError::ReadOnly,
) => StatusCode::BAD_REQUEST,
Some(UploadError::Magick(e)) if e.is_client_error() => StatusCode::BAD_REQUEST,
Some(UploadError::Ffmpeg(e)) if e.is_client_error() => StatusCode::BAD_REQUEST,

View file

@ -128,6 +128,10 @@ async fn ensure_details<R: FullRepo, S: Store + 'static>(
tracing::debug!("details exist");
Ok(details)
} else {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
tracing::debug!("generating new details from {:?}", identifier);
let new_details = Details::from_store(store, &identifier).await?;
tracing::debug!("storing details for {:?}", identifier);
@ -172,6 +176,10 @@ impl<R: FullRepo, S: Store + 'static> FormData for Upload<R, S> {
Box::pin(
async move {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
ingest::ingest(&**repo, &**store, stream, None, &CONFIG.media).await
}
.instrument(span),
@ -220,6 +228,10 @@ impl<R: FullRepo, S: Store + 'static> FormData for Import<R, S> {
Box::pin(
async move {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
ingest::ingest(
&**repo,
&**store,
@ -341,8 +353,14 @@ impl<R: FullRepo, S: Store + 'static> FormData for BackgroundedUpload<R, S> {
let stream = stream.map_err(Error::from);
Box::pin(
async move { Backgrounded::proxy(repo, store, stream).await }
.instrument(span),
async move {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
Backgrounded::proxy(repo, store, stream).await
}
.instrument(span),
)
})),
)
@ -457,6 +475,10 @@ async fn download<R: FullRepo + 'static, S: Store + 'static>(
store: web::Data<S>,
query: web::Query<UrlQuery>,
) -> Result<HttpResponse, Error> {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
let res = client.get(&query.url).send().await?;
if !res.status().is_success() {
@ -531,6 +553,10 @@ async fn delete<R: FullRepo>(
repo: web::Data<R>,
path_entries: web::Path<(String, String)>,
) -> Result<HttpResponse, Error> {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
let (token, alias) = path_entries.into_inner();
let token = DeleteToken::from_existing(&token);
@ -666,6 +692,10 @@ async fn process<R: FullRepo, S: Store + 'static>(
tracing::debug!("details exist");
details
} else {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
tracing::debug!("generating new details from {:?}", identifier);
let new_details = Details::from_store(&store, &identifier).await?;
tracing::debug!("storing details for {:?}", identifier);
@ -683,6 +713,10 @@ async fn process<R: FullRepo, S: Store + 'static>(
return ranged_file_resp(&store, identifier, range, details, not_found).await;
}
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
let original_details = ensure_details(&repo, &store, &alias).await?;
let (details, bytes) = generate::generate(
@ -768,6 +802,10 @@ async fn process_head<R: FullRepo, S: Store + 'static>(
tracing::debug!("details exist");
details
} else {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
tracing::debug!("generating new details from {:?}", identifier);
let new_details = Details::from_store(&store, &identifier).await?;
tracing::debug!("storing details for {:?}", identifier);
@ -811,6 +849,10 @@ async fn process_backgrounded<R: FullRepo, S: Store>(
return Ok(HttpResponse::Accepted().finish());
}
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
queue_generate(&repo, target_format, source, process_path, process_args).await?;
Ok(HttpResponse::Accepted().finish())
@ -1029,6 +1071,10 @@ fn srv_head(
#[tracing::instrument(name = "Spawning variant cleanup", skip(repo))]
async fn clean_variants<R: FullRepo>(repo: web::Data<R>) -> Result<HttpResponse, Error> {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
queue::cleanup_all_variants(&repo).await?;
Ok(HttpResponse::NoContent().finish())
}
@ -1043,6 +1089,10 @@ async fn set_not_found<R: FullRepo>(
json: web::Json<AliasQuery>,
repo: web::Data<R>,
) -> Result<HttpResponse, Error> {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
let alias = json.into_inner().alias;
if repo.hash(&alias).await?.is_none() {
@ -1063,6 +1113,10 @@ async fn purge<R: FullRepo>(
query: web::Query<AliasQuery>,
repo: web::Data<R>,
) -> Result<HttpResponse, Error> {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
let alias = query.into_inner().alias;
let aliases = repo.aliases_from_alias(&alias).await?;
@ -1481,6 +1535,10 @@ pub async fn run() -> color_eyre::Result<()> {
}
}
if CONFIG.server.read_only {
tracing::warn!("Launching in READ ONLY mode");
}
match CONFIG.store.clone() {
config::Store::Filesystem(config::Filesystem { path }) => {
repo.migrate_identifiers().await?;