2
0
Fork 0
mirror of https://git.asonix.dog/asonix/pict-rs synced 2024-11-09 22:14:59 +00:00

Add read-only mode

This commit is contained in:
asonix 2023-07-17 14:24:49 -05:00
parent dd1d509bb1
commit c4c920191f
6 changed files with 87 additions and 11 deletions

View file

@ -1,6 +1,7 @@
[server] [server]
address = "0.0.0.0:8080" address = "0.0.0.0:8080"
worker_id = "pict-rs-1" worker_id = "pict-rs-1"
read_only = false
[client] [client]
pool_size = 100 pool_size = 100

View file

@ -71,12 +71,14 @@ impl Args {
media_video_codec, media_video_codec,
media_video_audio_codec, media_video_audio_codec,
media_filters, media_filters,
read_only,
store, store,
}) => { }) => {
let server = Server { let server = Server {
address, address,
api_key, api_key,
worker_id, worker_id,
read_only,
}; };
let client = Client { let client = Client {
@ -315,6 +317,8 @@ struct Server {
worker_id: Option<String>, worker_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
api_key: Option<String>, api_key: Option<String>,
#[serde(skip_serializing_if = "std::ops::Not::not")]
read_only: bool,
} }
#[derive(Debug, Default, serde::Serialize)] #[derive(Debug, Default, serde::Serialize)]
@ -455,10 +459,10 @@ impl Animation {
#[derive(Debug, Default, serde::Serialize)] #[derive(Debug, Default, serde::Serialize)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
struct Video { struct Video {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "std::ops::Not::not")]
enable: Option<bool>, enable: bool,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "std::ops::Not::not")]
allow_audio: Option<bool>, allow_audio: bool,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
max_width: Option<u16>, max_width: Option<u16>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
@ -477,8 +481,8 @@ struct Video {
impl Video { impl Video {
fn set(self) -> Option<Self> { fn set(self) -> Option<Self> {
let any_set = self.enable.is_some() let any_set = self.enable
|| self.allow_audio.is_some() || self.allow_audio
|| self.max_width.is_some() || self.max_width.is_some()
|| self.max_height.is_some() || self.max_height.is_some()
|| self.max_area.is_some() || self.max_area.is_some()
@ -627,9 +631,10 @@ struct Run {
/// Whether to enable video uploads /// Whether to enable video uploads
#[arg(long)] #[arg(long)]
media_video_enable: Option<bool>, media_video_enable: bool,
/// Whether to enable audio in video uploads /// Whether to enable audio in video uploads
media_video_allow_audio: Option<bool>, #[arg(long)]
media_video_allow_audio: bool,
/// The maximum width, in pixels, for uploaded videos /// The maximum width, in pixels, for uploaded videos
#[arg(long)] #[arg(long)]
media_video_max_width: Option<u16>, media_video_max_width: Option<u16>,
@ -652,6 +657,10 @@ struct Run {
#[arg(long)] #[arg(long)]
media_video_audio_codec: Option<AudioCodec>, media_video_audio_codec: Option<AudioCodec>,
/// Don't permit ingesting media
#[arg(long)]
read_only: bool,
#[command(subcommand)] #[command(subcommand)]
store: Option<RunStore>, store: Option<RunStore>,
} }

View file

@ -22,6 +22,7 @@ pub(crate) struct Defaults {
struct ServerDefaults { struct ServerDefaults {
address: SocketAddr, address: SocketAddr,
worker_id: String, worker_id: String,
read_only: bool,
} }
#[derive(Clone, Debug, serde::Serialize)] #[derive(Clone, Debug, serde::Serialize)]
@ -156,6 +157,7 @@ impl Default for ServerDefaults {
ServerDefaults { ServerDefaults {
address: "0.0.0.0:8080".parse().expect("Valid address string"), address: "0.0.0.0:8080".parse().expect("Valid address string"),
worker_id: String::from("pict-rs-1"), worker_id: String::from("pict-rs-1"),
read_only: false,
} }
} }
} }

View file

@ -97,6 +97,8 @@ pub(crate) struct Server {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub(crate) api_key: Option<String>, pub(crate) api_key: Option<String>,
pub(crate) read_only: bool,
} }
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] #[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]

View file

@ -82,6 +82,9 @@ pub(crate) enum UploadError {
#[error("Error in exiftool")] #[error("Error in exiftool")]
Exiftool(#[from] crate::exiftool::ExifError), Exiftool(#[from] crate::exiftool::ExifError),
#[error("pict-rs is in read-only mode")]
ReadOnly,
#[error("Requested file extension cannot be served by source file")] #[error("Requested file extension cannot be served by source file")]
InvalidProcessExtension, InvalidProcessExtension,
@ -178,7 +181,8 @@ impl ResponseError for Error {
| UploadError::Repo(crate::repo::RepoError::AlreadyClaimed) | UploadError::Repo(crate::repo::RepoError::AlreadyClaimed)
| UploadError::Validation(_) | UploadError::Validation(_)
| UploadError::UnsupportedProcessExtension | UploadError::UnsupportedProcessExtension
| UploadError::InvalidProcessExtension, | UploadError::InvalidProcessExtension
| UploadError::ReadOnly,
) => StatusCode::BAD_REQUEST, ) => StatusCode::BAD_REQUEST,
Some(UploadError::Magick(e)) if e.is_client_error() => StatusCode::BAD_REQUEST, Some(UploadError::Magick(e)) if e.is_client_error() => StatusCode::BAD_REQUEST,
Some(UploadError::Ffmpeg(e)) if e.is_client_error() => StatusCode::BAD_REQUEST, Some(UploadError::Ffmpeg(e)) if e.is_client_error() => StatusCode::BAD_REQUEST,

View file

@ -128,6 +128,10 @@ async fn ensure_details<R: FullRepo, S: Store + 'static>(
tracing::debug!("details exist"); tracing::debug!("details exist");
Ok(details) Ok(details)
} else { } else {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
tracing::debug!("generating new details from {:?}", identifier); tracing::debug!("generating new details from {:?}", identifier);
let new_details = Details::from_store(store, &identifier).await?; let new_details = Details::from_store(store, &identifier).await?;
tracing::debug!("storing details for {:?}", identifier); tracing::debug!("storing details for {:?}", identifier);
@ -172,6 +176,10 @@ impl<R: FullRepo, S: Store + 'static> FormData for Upload<R, S> {
Box::pin( Box::pin(
async move { async move {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
ingest::ingest(&**repo, &**store, stream, None, &CONFIG.media).await ingest::ingest(&**repo, &**store, stream, None, &CONFIG.media).await
} }
.instrument(span), .instrument(span),
@ -220,6 +228,10 @@ impl<R: FullRepo, S: Store + 'static> FormData for Import<R, S> {
Box::pin( Box::pin(
async move { async move {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
ingest::ingest( ingest::ingest(
&**repo, &**repo,
&**store, &**store,
@ -341,8 +353,14 @@ impl<R: FullRepo, S: Store + 'static> FormData for BackgroundedUpload<R, S> {
let stream = stream.map_err(Error::from); let stream = stream.map_err(Error::from);
Box::pin( Box::pin(
async move { Backgrounded::proxy(repo, store, stream).await } async move {
.instrument(span), if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
Backgrounded::proxy(repo, store, stream).await
}
.instrument(span),
) )
})), })),
) )
@ -457,6 +475,10 @@ async fn download<R: FullRepo + 'static, S: Store + 'static>(
store: web::Data<S>, store: web::Data<S>,
query: web::Query<UrlQuery>, query: web::Query<UrlQuery>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
let res = client.get(&query.url).send().await?; let res = client.get(&query.url).send().await?;
if !res.status().is_success() { if !res.status().is_success() {
@ -531,6 +553,10 @@ async fn delete<R: FullRepo>(
repo: web::Data<R>, repo: web::Data<R>,
path_entries: web::Path<(String, String)>, path_entries: web::Path<(String, String)>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
let (token, alias) = path_entries.into_inner(); let (token, alias) = path_entries.into_inner();
let token = DeleteToken::from_existing(&token); let token = DeleteToken::from_existing(&token);
@ -666,6 +692,10 @@ async fn process<R: FullRepo, S: Store + 'static>(
tracing::debug!("details exist"); tracing::debug!("details exist");
details details
} else { } else {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
tracing::debug!("generating new details from {:?}", identifier); tracing::debug!("generating new details from {:?}", identifier);
let new_details = Details::from_store(&store, &identifier).await?; let new_details = Details::from_store(&store, &identifier).await?;
tracing::debug!("storing details for {:?}", identifier); tracing::debug!("storing details for {:?}", identifier);
@ -683,6 +713,10 @@ async fn process<R: FullRepo, S: Store + 'static>(
return ranged_file_resp(&store, identifier, range, details, not_found).await; return ranged_file_resp(&store, identifier, range, details, not_found).await;
} }
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
let original_details = ensure_details(&repo, &store, &alias).await?; let original_details = ensure_details(&repo, &store, &alias).await?;
let (details, bytes) = generate::generate( let (details, bytes) = generate::generate(
@ -768,6 +802,10 @@ async fn process_head<R: FullRepo, S: Store + 'static>(
tracing::debug!("details exist"); tracing::debug!("details exist");
details details
} else { } else {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
tracing::debug!("generating new details from {:?}", identifier); tracing::debug!("generating new details from {:?}", identifier);
let new_details = Details::from_store(&store, &identifier).await?; let new_details = Details::from_store(&store, &identifier).await?;
tracing::debug!("storing details for {:?}", identifier); tracing::debug!("storing details for {:?}", identifier);
@ -811,6 +849,10 @@ async fn process_backgrounded<R: FullRepo, S: Store>(
return Ok(HttpResponse::Accepted().finish()); return Ok(HttpResponse::Accepted().finish());
} }
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
queue_generate(&repo, target_format, source, process_path, process_args).await?; queue_generate(&repo, target_format, source, process_path, process_args).await?;
Ok(HttpResponse::Accepted().finish()) Ok(HttpResponse::Accepted().finish())
@ -1029,6 +1071,10 @@ fn srv_head(
#[tracing::instrument(name = "Spawning variant cleanup", skip(repo))] #[tracing::instrument(name = "Spawning variant cleanup", skip(repo))]
async fn clean_variants<R: FullRepo>(repo: web::Data<R>) -> Result<HttpResponse, Error> { async fn clean_variants<R: FullRepo>(repo: web::Data<R>) -> Result<HttpResponse, Error> {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
queue::cleanup_all_variants(&repo).await?; queue::cleanup_all_variants(&repo).await?;
Ok(HttpResponse::NoContent().finish()) Ok(HttpResponse::NoContent().finish())
} }
@ -1043,6 +1089,10 @@ async fn set_not_found<R: FullRepo>(
json: web::Json<AliasQuery>, json: web::Json<AliasQuery>,
repo: web::Data<R>, repo: web::Data<R>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
let alias = json.into_inner().alias; let alias = json.into_inner().alias;
if repo.hash(&alias).await?.is_none() { if repo.hash(&alias).await?.is_none() {
@ -1063,6 +1113,10 @@ async fn purge<R: FullRepo>(
query: web::Query<AliasQuery>, query: web::Query<AliasQuery>,
repo: web::Data<R>, repo: web::Data<R>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
if CONFIG.server.read_only {
return Err(UploadError::ReadOnly.into());
}
let alias = query.into_inner().alias; let alias = query.into_inner().alias;
let aliases = repo.aliases_from_alias(&alias).await?; let aliases = repo.aliases_from_alias(&alias).await?;
@ -1481,6 +1535,10 @@ pub async fn run() -> color_eyre::Result<()> {
} }
} }
if CONFIG.server.read_only {
tracing::warn!("Launching in READ ONLY mode");
}
match CONFIG.store.clone() { match CONFIG.store.clone() {
config::Store::Filesystem(config::Filesystem { path }) => { config::Store::Filesystem(config::Filesystem { path }) => {
repo.migrate_identifiers().await?; repo.migrate_identifiers().await?;