2
0
Fork 0
mirror of https://git.asonix.dog/asonix/pict-rs synced 2024-11-10 06:25:00 +00:00

Dry download client logic

This commit is contained in:
Aode (lion) 2022-04-08 13:16:22 -05:00
parent 5f19b63166
commit 7436f15267

View file

@ -250,22 +250,7 @@ async fn download<R: FullRepo + 'static, S: Store + 'static>(
store: web::Data<S>, store: web::Data<S>,
query: web::Query<UrlQuery>, query: web::Query<UrlQuery>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
if query.backgrounded { let res = client.get(&query.url).send().await?;
do_download_backgrounded(client, repo, store, &query.url, query.ephemeral).await
} else {
do_download_inline(client, repo, store, &query.url, query.ephemeral).await
}
}
#[instrument(name = "Downloading file inline", skip(client, repo))]
async fn do_download_inline<R: FullRepo + 'static, S: Store + 'static>(
client: web::Data<Client>,
repo: web::Data<R>,
store: web::Data<S>,
url: &str,
is_cached: bool,
) -> Result<HttpResponse, Error> {
let res = client.get(url).send().await?;
if !res.status().is_success() { if !res.status().is_success() {
return Err(UploadError::Download(res.status()).into()); return Err(UploadError::Download(res.status()).into());
@ -275,6 +260,20 @@ async fn do_download_inline<R: FullRepo + 'static, S: Store + 'static>(
.map_err(Error::from) .map_err(Error::from)
.limit((CONFIG.media.max_file_size * MEGABYTES) as u64); .limit((CONFIG.media.max_file_size * MEGABYTES) as u64);
if query.backgrounded {
do_download_backgrounded(stream, repo, store, query.ephemeral).await
} else {
do_download_inline(stream, repo, store, query.ephemeral).await
}
}
#[instrument(name = "Downloading file inline", skip(stream))]
async fn do_download_inline<R: FullRepo + 'static, S: Store + 'static>(
stream: impl Stream<Item = Result<web::Bytes, Error>>,
repo: web::Data<R>,
store: web::Data<S>,
is_cached: bool,
) -> Result<HttpResponse, Error> {
let mut session = ingest::ingest(&**repo, &**store, stream, None, true, is_cached).await?; let mut session = ingest::ingest(&**repo, &**store, stream, None, true, is_cached).await?;
let alias = session.alias().expect("alias should exist").to_owned(); let alias = session.alias().expect("alias should exist").to_owned();
@ -294,24 +293,13 @@ async fn do_download_inline<R: FullRepo + 'static, S: Store + 'static>(
}))) })))
} }
#[instrument(name = "Downloading file in background", skip(client))] #[instrument(name = "Downloading file in background", skip(stream))]
async fn do_download_backgrounded<R: FullRepo + 'static, S: Store + 'static>( async fn do_download_backgrounded<R: FullRepo + 'static, S: Store + 'static>(
client: web::Data<Client>, stream: impl Stream<Item = Result<web::Bytes, Error>>,
repo: web::Data<R>, repo: web::Data<R>,
store: web::Data<S>, store: web::Data<S>,
url: &str,
is_cached: bool, is_cached: bool,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
let res = client.get(url).send().await?;
if !res.status().is_success() {
return Err(UploadError::Download(res.status()).into());
}
let stream = res
.map_err(Error::from)
.limit((CONFIG.media.max_file_size * MEGABYTES) as u64);
let backgrounded = Backgrounded::proxy((**repo).clone(), (**store).clone(), stream).await?; let backgrounded = Backgrounded::proxy((**repo).clone(), (**store).clone(), stream).await?;
let upload_id = backgrounded.upload_id().expect("Upload ID exists"); let upload_id = backgrounded.upload_id().expect("Upload ID exists");