mirror of
https://git.asonix.dog/asonix/pict-rs
synced 2024-12-22 19:31:35 +00:00
Merge pull request 'asonix/postgres-repo' (#39) from asonix/postgres-repo into main
Reviewed-on: https://git.asonix.dog/asonix/pict-rs/pulls/39
This commit is contained in:
commit
509a52ec6b
64 changed files with 4297 additions and 1374 deletions
389
Cargo.lock
generated
389
Cargo.lock
generated
|
@ -77,7 +77,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
|
checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -313,7 +313,7 @@ checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -388,6 +388,12 @@ dependencies = [
|
||||||
"rustc-demangle",
|
"rustc-demangle",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "barrel"
|
||||||
|
version = "0.7.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ad9e605929a6964efbec5ac0884bd0fe93f12a3b1eb271f52c251316640c68d9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "base64"
|
name = "base64"
|
||||||
version = "0.13.1"
|
version = "0.13.1"
|
||||||
|
@ -494,7 +500,7 @@ dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -670,6 +676,28 @@ dependencies = [
|
||||||
"parking_lot_core 0.9.8",
|
"parking_lot_core 0.9.8",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "deadpool"
|
||||||
|
version = "0.9.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "421fe0f90f2ab22016f32a9881be5134fdd71c65298917084b0c7477cbc3856e"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"deadpool-runtime",
|
||||||
|
"num_cpus",
|
||||||
|
"retain_mut",
|
||||||
|
"tokio",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "deadpool-runtime"
|
||||||
|
version = "0.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1"
|
||||||
|
dependencies = [
|
||||||
|
"tokio",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deranged"
|
name = "deranged"
|
||||||
version = "0.3.8"
|
version = "0.3.8"
|
||||||
|
@ -692,6 +720,69 @@ dependencies = [
|
||||||
"syn 1.0.109",
|
"syn 1.0.109",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "diesel"
|
||||||
|
version = "2.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d98235fdc2f355d330a8244184ab6b4b33c28679c0b4158f63138e51d6cf7e88"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags 2.4.0",
|
||||||
|
"byteorder",
|
||||||
|
"diesel_derives",
|
||||||
|
"itoa",
|
||||||
|
"serde_json",
|
||||||
|
"time",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "diesel-async"
|
||||||
|
version = "0.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "acada1517534c92d3f382217b485db8a8638f111b0e3f2a2a8e26165050f77be"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"deadpool",
|
||||||
|
"diesel",
|
||||||
|
"futures-util",
|
||||||
|
"scoped-futures",
|
||||||
|
"tokio",
|
||||||
|
"tokio-postgres",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "diesel-derive-enum"
|
||||||
|
version = "2.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "81c5131a2895ef64741dad1d483f358c2a229a3a2d1b256778cdc5e146db64d4"
|
||||||
|
dependencies = [
|
||||||
|
"heck",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.31",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "diesel_derives"
|
||||||
|
version = "2.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e054665eaf6d97d1e7125512bb2d35d07c73ac86cc6920174cb42d1ab697a554"
|
||||||
|
dependencies = [
|
||||||
|
"diesel_table_macro_syntax",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.31",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "diesel_table_macro_syntax"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fc5557efc453706fed5e4fa85006fe9817c224c3f480a34c7e5959fd700921c5"
|
||||||
|
dependencies = [
|
||||||
|
"syn 2.0.31",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "digest"
|
name = "digest"
|
||||||
version = "0.10.7"
|
version = "0.10.7"
|
||||||
|
@ -740,6 +831,12 @@ dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fallible-iterator"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flate2"
|
name = "flate2"
|
||||||
version = "1.0.27"
|
version = "1.0.27"
|
||||||
|
@ -842,7 +939,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1280,9 +1377,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
version = "2.6.2"
|
version = "2.6.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5486aed0026218e61b8a01d5fbd5a0a134649abb71a0e53b7bc088529dced86e"
|
checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memoffset"
|
name = "memoffset"
|
||||||
|
@ -1329,7 +1426,7 @@ checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1440,9 +1537,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "object"
|
name = "object"
|
||||||
version = "0.32.0"
|
version = "0.32.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe"
|
checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
@ -1670,7 +1767,7 @@ dependencies = [
|
||||||
"pest_meta",
|
"pest_meta",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1684,6 +1781,24 @@ dependencies = [
|
||||||
"sha2",
|
"sha2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "phf"
|
||||||
|
version = "0.11.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc"
|
||||||
|
dependencies = [
|
||||||
|
"phf_shared",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "phf_shared"
|
||||||
|
version = "0.11.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b"
|
||||||
|
dependencies = [
|
||||||
|
"siphasher",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pict-rs"
|
name = "pict-rs"
|
||||||
version = "0.5.0-alpha.17"
|
version = "0.5.0-alpha.17"
|
||||||
|
@ -1694,12 +1809,17 @@ dependencies = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
|
"barrel",
|
||||||
"base64 0.21.3",
|
"base64 0.21.3",
|
||||||
"clap",
|
"clap",
|
||||||
"color-eyre",
|
"color-eyre",
|
||||||
"config",
|
"config",
|
||||||
"console-subscriber",
|
"console-subscriber",
|
||||||
"dashmap",
|
"dashmap",
|
||||||
|
"deadpool",
|
||||||
|
"diesel",
|
||||||
|
"diesel-async",
|
||||||
|
"diesel-derive-enum",
|
||||||
"flume",
|
"flume",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"hex",
|
"hex",
|
||||||
|
@ -1713,6 +1833,7 @@ dependencies = [
|
||||||
"opentelemetry-otlp",
|
"opentelemetry-otlp",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"quick-xml 0.30.0",
|
"quick-xml 0.30.0",
|
||||||
|
"refinery",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"reqwest-middleware",
|
"reqwest-middleware",
|
||||||
"reqwest-tracing",
|
"reqwest-tracing",
|
||||||
|
@ -1728,13 +1849,13 @@ dependencies = [
|
||||||
"thiserror",
|
"thiserror",
|
||||||
"time",
|
"time",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"tokio-postgres",
|
||||||
"tokio-uring",
|
"tokio-uring",
|
||||||
"tokio-util",
|
"tokio-util",
|
||||||
"toml 0.7.6",
|
"toml 0.7.6",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-actix-web",
|
"tracing-actix-web",
|
||||||
"tracing-error",
|
"tracing-error",
|
||||||
"tracing-futures",
|
|
||||||
"tracing-log",
|
"tracing-log",
|
||||||
"tracing-opentelemetry",
|
"tracing-opentelemetry",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
|
@ -1759,7 +1880,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1780,6 +1901,53 @@ version = "1.4.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "31114a898e107c51bb1609ffaf55a0e011cf6a4d7f1170d0015a165082c0338b"
|
checksum = "31114a898e107c51bb1609ffaf55a0e011cf6a4d7f1170d0015a165082c0338b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "postgres"
|
||||||
|
version = "0.19.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7915b33ed60abc46040cbcaa25ffa1c7ec240668e0477c4f3070786f5916d451"
|
||||||
|
dependencies = [
|
||||||
|
"bytes",
|
||||||
|
"fallible-iterator",
|
||||||
|
"futures-util",
|
||||||
|
"log",
|
||||||
|
"tokio",
|
||||||
|
"tokio-postgres",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "postgres-protocol"
|
||||||
|
version = "0.6.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520"
|
||||||
|
dependencies = [
|
||||||
|
"base64 0.21.3",
|
||||||
|
"byteorder",
|
||||||
|
"bytes",
|
||||||
|
"fallible-iterator",
|
||||||
|
"hmac",
|
||||||
|
"md-5",
|
||||||
|
"memchr",
|
||||||
|
"rand",
|
||||||
|
"sha2",
|
||||||
|
"stringprep",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "postgres-types"
|
||||||
|
version = "0.2.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8d2234cdee9408b523530a9b6d2d6b373d1db34f6a8e51dc03ded1828d7fb67c"
|
||||||
|
dependencies = [
|
||||||
|
"bytes",
|
||||||
|
"fallible-iterator",
|
||||||
|
"postgres-protocol",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"time",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ppv-lite86"
|
name = "ppv-lite86"
|
||||||
version = "0.2.17"
|
version = "0.2.17"
|
||||||
|
@ -1930,14 +2098,60 @@ dependencies = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "refinery"
|
||||||
version = "1.9.4"
|
version = "0.8.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "12de2eff854e5fa4b1295edd650e227e9d8fb0c9e90b12e7f36d6a6811791a29"
|
checksum = "cdb0436d0dd7bd8d4fce1e828751fa79742b08e35f27cfea7546f8a322b5ef24"
|
||||||
|
dependencies = [
|
||||||
|
"refinery-core",
|
||||||
|
"refinery-macros",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "refinery-core"
|
||||||
|
version = "0.8.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "19206547cd047e8f4dfa6b20c30d3ecaf24be05841b6aa0aa926a47a3d0662bb"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"cfg-if",
|
||||||
|
"lazy_static",
|
||||||
|
"log",
|
||||||
|
"postgres",
|
||||||
|
"regex",
|
||||||
|
"serde",
|
||||||
|
"siphasher",
|
||||||
|
"thiserror",
|
||||||
|
"time",
|
||||||
|
"tokio",
|
||||||
|
"tokio-postgres",
|
||||||
|
"toml 0.7.6",
|
||||||
|
"url",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "refinery-macros"
|
||||||
|
version = "0.8.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d94d4b9241859ba19eaa5c04c86e782eb3aa0aae2c5868e0cfa90c856e58a174"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"refinery-core",
|
||||||
|
"regex",
|
||||||
|
"syn 2.0.31",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex"
|
||||||
|
version = "1.9.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "697061221ea1b4a94a624f67d0ae2bfe4e22b8a17b6a192afb11046542cc8c47"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
"regex-automata 0.3.7",
|
"regex-automata 0.3.8",
|
||||||
"regex-syntax 0.7.5",
|
"regex-syntax 0.7.5",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1952,9 +2166,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-automata"
|
name = "regex-automata"
|
||||||
version = "0.3.7"
|
version = "0.3.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49530408a136e16e5b486e883fbb6ba058e8e4e8ae6621a77b048b314336e629"
|
checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
|
@ -2046,6 +2260,12 @@ dependencies = [
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "retain_mut"
|
||||||
|
version = "0.1.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ring"
|
name = "ring"
|
||||||
version = "0.16.20"
|
version = "0.16.20"
|
||||||
|
@ -2168,6 +2388,25 @@ version = "1.0.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
|
checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "same-file"
|
||||||
|
version = "1.0.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
|
||||||
|
dependencies = [
|
||||||
|
"winapi-util",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "scoped-futures"
|
||||||
|
version = "0.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b1473e24c637950c9bd38763220bea91ec3e095a89f672bbd7a10d03e77ba467"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"pin-utils",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "scoped-tls"
|
name = "scoped-tls"
|
||||||
version = "1.0.1"
|
version = "1.0.1"
|
||||||
|
@ -2232,7 +2471,7 @@ checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2316,6 +2555,12 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "siphasher"
|
||||||
|
version = "0.3.11"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sketches-ddsketch"
|
name = "sketches-ddsketch"
|
||||||
version = "0.2.1"
|
version = "0.2.1"
|
||||||
|
@ -2395,6 +2640,16 @@ version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7f11d35dae9818c4313649da4a97c8329e29357a7fe584526c1d78f5b63ef836"
|
checksum = "7f11d35dae9818c4313649da4a97c8329e29357a7fe584526c1d78f5b63ef836"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "stringprep"
|
||||||
|
version = "0.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da"
|
||||||
|
dependencies = [
|
||||||
|
"unicode-bidi",
|
||||||
|
"unicode-normalization",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strsim"
|
name = "strsim"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
|
@ -2420,9 +2675,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.29"
|
version = "2.0.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a"
|
checksum = "718fa2415bcb8d8bd775917a1bf12a7931b6dfa890753378538118181e0cb398"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -2446,22 +2701,22 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.47"
|
version = "1.0.48"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f"
|
checksum = "9d6d7a740b8a666a7e828dd00da9c0dc290dff53154ea77ac109281de90589b7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"thiserror-impl",
|
"thiserror-impl",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror-impl"
|
name = "thiserror-impl"
|
||||||
version = "1.0.47"
|
version = "1.0.48"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b"
|
checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2555,7 +2810,33 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tokio-postgres"
|
||||||
|
version = "0.7.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"byteorder",
|
||||||
|
"bytes",
|
||||||
|
"fallible-iterator",
|
||||||
|
"futures-channel",
|
||||||
|
"futures-util",
|
||||||
|
"log",
|
||||||
|
"parking_lot 0.12.1",
|
||||||
|
"percent-encoding",
|
||||||
|
"phf",
|
||||||
|
"pin-project-lite",
|
||||||
|
"postgres-protocol",
|
||||||
|
"postgres-types",
|
||||||
|
"rand",
|
||||||
|
"socket2 0.5.3",
|
||||||
|
"tokio",
|
||||||
|
"tokio-util",
|
||||||
|
"whoami",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2727,8 +3008,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-actix-web"
|
name = "tracing-actix-web"
|
||||||
version = "0.7.6"
|
version = "0.7.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://github.com/asonix/tracing-actix-web?branch=asonix/tracing-opentelemetry-021#0b337cc17fb88efc76d913751eee3ac66e4cd27f"
|
||||||
checksum = "5c0b08ce08cbde6a96fc1e4ebb8132053e53ec7a5cd27eef93ede6b73ebbda06"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"opentelemetry",
|
"opentelemetry",
|
||||||
|
@ -2746,7 +3026,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2769,16 +3049,6 @@ dependencies = [
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tracing-futures"
|
|
||||||
version = "0.2.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2"
|
|
||||||
dependencies = [
|
|
||||||
"pin-project",
|
|
||||||
"tracing",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-log"
|
name = "tracing-log"
|
||||||
version = "0.1.3"
|
version = "0.1.3"
|
||||||
|
@ -2792,12 +3062,14 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-opentelemetry"
|
name = "tracing-opentelemetry"
|
||||||
version = "0.20.0"
|
version = "0.21.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fc09e402904a5261e42cf27aea09ccb7d5318c6717a9eec3d8e2e65c56b18f19"
|
checksum = "75327c6b667828ddc28f5e3f169036cb793c3f588d83bf0f262a7f062ffed3c8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"opentelemetry",
|
"opentelemetry",
|
||||||
|
"opentelemetry_sdk",
|
||||||
|
"smallvec",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-core",
|
"tracing-core",
|
||||||
"tracing-log",
|
"tracing-log",
|
||||||
|
@ -2936,6 +3208,16 @@ version = "0.9.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "walkdir"
|
||||||
|
version = "2.3.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698"
|
||||||
|
dependencies = [
|
||||||
|
"same-file",
|
||||||
|
"winapi-util",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "want"
|
name = "want"
|
||||||
version = "0.3.1"
|
version = "0.3.1"
|
||||||
|
@ -2972,7 +3254,7 @@ dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -3006,7 +3288,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.29",
|
"syn 2.0.31",
|
||||||
"wasm-bindgen-backend",
|
"wasm-bindgen-backend",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
@ -3046,6 +3328,16 @@ version = "0.25.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc"
|
checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "whoami"
|
||||||
|
version = "1.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50"
|
||||||
|
dependencies = [
|
||||||
|
"wasm-bindgen",
|
||||||
|
"web-sys",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winapi"
|
name = "winapi"
|
||||||
version = "0.3.9"
|
version = "0.3.9"
|
||||||
|
@ -3062,6 +3354,15 @@ version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi-util"
|
||||||
|
version = "0.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
|
||||||
|
dependencies = [
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winapi-x86_64-pc-windows-gnu"
|
name = "winapi-x86_64-pc-windows-gnu"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
|
|
14
Cargo.toml
14
Cargo.toml
|
@ -20,12 +20,17 @@ actix-server = "2.0.0"
|
||||||
actix-web = { version = "4.0.0", default-features = false }
|
actix-web = { version = "4.0.0", default-features = false }
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
async-trait = "0.1.51"
|
async-trait = "0.1.51"
|
||||||
|
barrel = { version = "0.7.0", features = ["pg"] }
|
||||||
base64 = "0.21.0"
|
base64 = "0.21.0"
|
||||||
clap = { version = "4.0.2", features = ["derive"] }
|
clap = { version = "4.0.2", features = ["derive"] }
|
||||||
color-eyre = "0.6"
|
color-eyre = "0.6"
|
||||||
config = "0.13.0"
|
config = "0.13.0"
|
||||||
console-subscriber = "0.1"
|
console-subscriber = "0.1"
|
||||||
dashmap = "5.1.0"
|
dashmap = "5.1.0"
|
||||||
|
deadpool = { version = "0.9.5", features = ["rt_tokio_1"] }
|
||||||
|
diesel = { version = "2.1.1", features = ["postgres_backend", "serde_json", "time", "uuid"] }
|
||||||
|
diesel-async = { version = "0.4.1", features = ["postgres", "deadpool"] }
|
||||||
|
diesel-derive-enum = { version = "2.1.0", features = ["postgres"] }
|
||||||
flume = "0.11.0"
|
flume = "0.11.0"
|
||||||
futures-core = "0.3"
|
futures-core = "0.3"
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
|
@ -39,6 +44,7 @@ opentelemetry = { version = "0.20", features = ["rt-tokio"] }
|
||||||
opentelemetry-otlp = "0.13"
|
opentelemetry-otlp = "0.13"
|
||||||
pin-project-lite = "0.2.7"
|
pin-project-lite = "0.2.7"
|
||||||
quick-xml = { version = "0.30.0", features = ["serialize"] }
|
quick-xml = { version = "0.30.0", features = ["serialize"] }
|
||||||
|
refinery = { version = "0.8.10", features = ["tokio-postgres", "postgres"] }
|
||||||
reqwest = { version = "0.11.18", default-features = false, features = ["json", "rustls-tls", "stream"] }
|
reqwest = { version = "0.11.18", default-features = false, features = ["json", "rustls-tls", "stream"] }
|
||||||
reqwest-middleware = "0.2.2"
|
reqwest-middleware = "0.2.2"
|
||||||
reqwest-tracing = { version = "0.4.5" }
|
reqwest-tracing = { version = "0.4.5" }
|
||||||
|
@ -54,6 +60,7 @@ storage-path-generator = "0.1.0"
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
time = { version = "0.3.0", features = ["serde", "serde-well-known"] }
|
time = { version = "0.3.0", features = ["serde", "serde-well-known"] }
|
||||||
tokio = { version = "1", features = ["full", "tracing"] }
|
tokio = { version = "1", features = ["full", "tracing"] }
|
||||||
|
tokio-postgres = { version = "0.7.10", features = ["with-uuid-1", "with-time-0_3", "with-serde_json-1"] }
|
||||||
tokio-uring = { version = "0.4", optional = true, features = ["bytes"] }
|
tokio-uring = { version = "0.4", optional = true, features = ["bytes"] }
|
||||||
tokio-util = { version = "0.7", default-features = false, features = [
|
tokio-util = { version = "0.7", default-features = false, features = [
|
||||||
"codec",
|
"codec",
|
||||||
|
@ -62,9 +69,8 @@ tokio-util = { version = "0.7", default-features = false, features = [
|
||||||
toml = "0.7.0"
|
toml = "0.7.0"
|
||||||
tracing = "0.1.15"
|
tracing = "0.1.15"
|
||||||
tracing-error = "0.2.0"
|
tracing-error = "0.2.0"
|
||||||
tracing-futures = "0.2.4"
|
|
||||||
tracing-log = "0.1.2"
|
tracing-log = "0.1.2"
|
||||||
tracing-opentelemetry = "0.20"
|
tracing-opentelemetry = "0.21"
|
||||||
tracing-subscriber = { version = "0.3.0", features = [
|
tracing-subscriber = { version = "0.3.0", features = [
|
||||||
"ansi",
|
"ansi",
|
||||||
"env-filter",
|
"env-filter",
|
||||||
|
@ -79,4 +85,6 @@ uuid = { version = "1", features = ["serde", "std", "v4", "v7"] }
|
||||||
[dependencies.tracing-actix-web]
|
[dependencies.tracing-actix-web]
|
||||||
version = "0.7.5"
|
version = "0.7.5"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["opentelemetry_0_20"]
|
features = ["emit_event_on_error", "opentelemetry_0_20"]
|
||||||
|
git = "https://github.com/asonix/tracing-actix-web"
|
||||||
|
branch = "asonix/tracing-opentelemetry-021"
|
||||||
|
|
16
dev.toml
16
dev.toml
|
@ -11,8 +11,12 @@ targets = 'warn,tracing_actix_web=info,actix_server=info,actix_web=info'
|
||||||
buffer_capacity = 102400
|
buffer_capacity = 102400
|
||||||
|
|
||||||
[tracing.opentelemetry]
|
[tracing.opentelemetry]
|
||||||
|
url = 'http://127.0.0.1:4317'
|
||||||
service_name = 'pict-rs'
|
service_name = 'pict-rs'
|
||||||
targets = 'info'
|
targets = 'info,pict_rs=debug'
|
||||||
|
|
||||||
|
[metrics]
|
||||||
|
prometheus_address = "127.0.0.1:8070"
|
||||||
|
|
||||||
[old_repo]
|
[old_repo]
|
||||||
path = 'data/sled-repo-local'
|
path = 'data/sled-repo-local'
|
||||||
|
@ -59,10 +63,12 @@ crf_2160 = 15
|
||||||
crf_max = 12
|
crf_max = 12
|
||||||
|
|
||||||
[repo]
|
[repo]
|
||||||
type = 'sled'
|
type = 'postgres'
|
||||||
path = 'data/sled-repo-local'
|
url = 'postgres://pictrs:1234@localhost:5432/pictrs'
|
||||||
cache_capacity = 67108864
|
|
||||||
export_path = "data/exports-local"
|
# [repo]
|
||||||
|
# type = 'sled'
|
||||||
|
# path = 'data/sled-repo-local'
|
||||||
|
|
||||||
[store]
|
[store]
|
||||||
type = 'filesystem'
|
type = 'filesystem'
|
||||||
|
|
|
@ -13,7 +13,7 @@ services:
|
||||||
# - "6669:6669"
|
# - "6669:6669"
|
||||||
# environment:
|
# environment:
|
||||||
# - PICTRS__TRACING__CONSOLE__ADDRESS=0.0.0.0:6669
|
# - PICTRS__TRACING__CONSOLE__ADDRESS=0.0.0.0:6669
|
||||||
# - PICTRS__TRACING__OPENTELEMETRY__URL=http://otel:4137
|
# - PICTRS__TRACING__OPENTELEMETRY__URL=http://jaeger:4317
|
||||||
# - RUST_BACKTRACE=1
|
# - RUST_BACKTRACE=1
|
||||||
# stdin_open: true
|
# stdin_open: true
|
||||||
# tty: true
|
# tty: true
|
||||||
|
@ -27,7 +27,7 @@ services:
|
||||||
# - "8081:8081"
|
# - "8081:8081"
|
||||||
# environment:
|
# environment:
|
||||||
# - PICTRS_PROXY_UPSTREAM=http://pictrs:8080
|
# - PICTRS_PROXY_UPSTREAM=http://pictrs:8080
|
||||||
# - PICTRS_PROXY_OPENTELEMETRY_URL=http://otel:4137
|
# - PICTRS_PROXY_OPENTELEMETRY_URL=http://jaeger:4317
|
||||||
|
|
||||||
minio:
|
minio:
|
||||||
image: quay.io/minio/minio
|
image: quay.io/minio/minio
|
||||||
|
@ -39,7 +39,7 @@ services:
|
||||||
- ./storage/minio:/mnt
|
- ./storage/minio:/mnt
|
||||||
|
|
||||||
garage:
|
garage:
|
||||||
image: dxflrs/garage:v0.8.1
|
image: dxflrs/garage:v0.8.3
|
||||||
ports:
|
ports:
|
||||||
- "3900:3900"
|
- "3900:3900"
|
||||||
- "3901:3901"
|
- "3901:3901"
|
||||||
|
@ -47,26 +47,35 @@ services:
|
||||||
- "3903:3903"
|
- "3903:3903"
|
||||||
- "3904:3904"
|
- "3904:3904"
|
||||||
environment:
|
environment:
|
||||||
- RUST_LOG=debug
|
- RUST_LOG=info
|
||||||
volumes:
|
volumes:
|
||||||
- ./storage/garage:/mnt
|
- ./storage/garage:/mnt
|
||||||
- ./garage.toml:/etc/garage.toml
|
- ./garage.toml:/etc/garage.toml
|
||||||
|
|
||||||
otel:
|
postgres:
|
||||||
image: otel/opentelemetry-collector:latest
|
image: postgres:15-alpine
|
||||||
command: --config otel-local-config.yaml
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
environment:
|
||||||
|
- PGDATA=/var/lib/postgresql/data
|
||||||
|
- POSTGRES_DB=pictrs
|
||||||
|
- POSTGRES_USER=pictrs
|
||||||
|
- POSTGRES_PASSWORD=1234
|
||||||
volumes:
|
volumes:
|
||||||
- type: bind
|
- ./storage/postgres:/var/lib/postgresql/data
|
||||||
source: ./otel.yml
|
|
||||||
target: /otel-local-config.yaml
|
|
||||||
restart: always
|
|
||||||
depends_on:
|
|
||||||
- jaeger
|
|
||||||
|
|
||||||
jaeger:
|
jaeger:
|
||||||
image: jaegertracing/all-in-one:1
|
image: jaegertracing/all-in-one:1.48
|
||||||
ports:
|
ports:
|
||||||
|
- "6831:6831/udp"
|
||||||
|
- "6832:6832/udp"
|
||||||
|
- "5778:5778"
|
||||||
|
- "4317:4317"
|
||||||
|
- "4138:4138"
|
||||||
- "14250:14250"
|
- "14250:14250"
|
||||||
|
- "14268:14268"
|
||||||
|
- "14269:14269"
|
||||||
|
- "9411:9411"
|
||||||
# To view traces, visit http://localhost:16686
|
# To view traces, visit http://localhost:16686
|
||||||
- "16686:16686"
|
- "16686:16686"
|
||||||
restart: always
|
restart: always
|
||||||
|
|
|
@ -88,13 +88,13 @@ methods:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
CREATE TABLE aliases (
|
CREATE TABLE aliases (
|
||||||
alias VARCHAR(30) PRIMARY KEY,
|
alias VARCHAR(50) PRIMARY KEY,
|
||||||
hash BYTEA NOT NULL REFERENCES hashes(hash) ON DELETE CASCADE,
|
hash BYTEA NOT NULL REFERENCES hashes(hash) ON DELETE CASCADE,
|
||||||
delete_token VARCHAR(30) NOT NULL
|
delete_token VARCHAR(30) NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
CREATE INDEX alias_hashes_index ON aliases (hash);
|
CREATE INDEX aliases_hash_index ON aliases (hash);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -155,7 +155,7 @@ methods:
|
||||||
CREATE TYPE job_status AS ENUM ('new', 'running');
|
CREATE TYPE job_status AS ENUM ('new', 'running');
|
||||||
|
|
||||||
|
|
||||||
CREATE TABLE queue (
|
CREATE TABLE job_queue (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
queue VARCHAR(30) NOT NULL,
|
queue VARCHAR(30) NOT NULL,
|
||||||
job JSONB NOT NULL,
|
job JSONB NOT NULL,
|
||||||
|
@ -165,20 +165,20 @@ CREATE TABLE queue (
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
CREATE INDEX queue_status_index ON queue INCLUDE status;
|
CREATE INDEX queue_status_index ON queue INCLUDE queue, status;
|
||||||
CREATE INDEX heartbeat_index ON queue
|
CREATE INDEX heartbeat_index ON queue INCLUDE heartbeat;
|
||||||
```
|
```
|
||||||
|
|
||||||
claiming a job can be
|
claiming a job can be
|
||||||
```sql
|
```sql
|
||||||
UPDATE queue SET status = 'new', heartbeat = NULL
|
UPDATE job_queue SET status = 'new', heartbeat = NULL
|
||||||
WHERE
|
WHERE
|
||||||
heartbeat IS NOT NULL AND heartbeat < NOW - INTERVAL '2 MINUTES';
|
heartbeat IS NOT NULL AND heartbeat < NOW - INTERVAL '2 MINUTES';
|
||||||
|
|
||||||
UPDATE queue SET status = 'running', heartbeat = CURRENT_TIMESTAMP
|
UPDATE job_queue SET status = 'running', heartbeat = CURRENT_TIMESTAMP
|
||||||
WHERE id = (
|
WHERE id = (
|
||||||
SELECT id
|
SELECT id
|
||||||
FROM queue
|
FROM job_queue
|
||||||
WHERE status = 'new' AND queue = '$QUEUE'
|
WHERE status = 'new' AND queue = '$QUEUE'
|
||||||
ORDER BY queue_time ASC
|
ORDER BY queue_time ASC
|
||||||
FOR UPDATE SKIP LOCKED
|
FOR UPDATE SKIP LOCKED
|
||||||
|
|
|
@ -32,6 +32,7 @@
|
||||||
cargo
|
cargo
|
||||||
cargo-outdated
|
cargo-outdated
|
||||||
clippy
|
clippy
|
||||||
|
diesel-cli
|
||||||
exiftool
|
exiftool
|
||||||
ffmpeg_6-full
|
ffmpeg_6-full
|
||||||
garage
|
garage
|
||||||
|
|
7
scripts/update-schema.sh
Executable file
7
scripts/update-schema.sh
Executable file
|
@ -0,0 +1,7 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
diesel \
|
||||||
|
--database-url 'postgres://pictrs:1234@localhost:5432/pictrs' \
|
||||||
|
print-schema \
|
||||||
|
--custom-type-derives "diesel::query_builder::QueryId" \
|
||||||
|
> src/repo/postgres/schema.rs
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error::Error,
|
error::Error,
|
||||||
repo::{ArcRepo, UploadId},
|
repo::{ArcRepo, UploadId},
|
||||||
|
@ -9,19 +11,13 @@ use futures_core::Stream;
|
||||||
use mime::APPLICATION_OCTET_STREAM;
|
use mime::APPLICATION_OCTET_STREAM;
|
||||||
use tracing::{Instrument, Span};
|
use tracing::{Instrument, Span};
|
||||||
|
|
||||||
pub(crate) struct Backgrounded<S>
|
pub(crate) struct Backgrounded {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
repo: ArcRepo,
|
repo: ArcRepo,
|
||||||
identifier: Option<S::Identifier>,
|
identifier: Option<Arc<str>>,
|
||||||
upload_id: Option<UploadId>,
|
upload_id: Option<UploadId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Backgrounded<S>
|
impl Backgrounded {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
pub(crate) fn disarm(mut self) {
|
pub(crate) fn disarm(mut self) {
|
||||||
let _ = self.identifier.take();
|
let _ = self.identifier.take();
|
||||||
let _ = self.upload_id.take();
|
let _ = self.upload_id.take();
|
||||||
|
@ -31,12 +27,13 @@ where
|
||||||
self.upload_id
|
self.upload_id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn identifier(&self) -> Option<&S::Identifier> {
|
pub(crate) fn identifier(&self) -> Option<&Arc<str>> {
|
||||||
self.identifier.as_ref()
|
self.identifier.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn proxy<P>(repo: ArcRepo, store: S, stream: P) -> Result<Self, Error>
|
pub(crate) async fn proxy<S, P>(repo: ArcRepo, store: S, stream: P) -> Result<Self, Error>
|
||||||
where
|
where
|
||||||
|
S: Store,
|
||||||
P: Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
P: Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
let mut this = Self {
|
let mut this = Self {
|
||||||
|
@ -50,8 +47,9 @@ where
|
||||||
Ok(this)
|
Ok(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn do_proxy<P>(&mut self, store: S, stream: P) -> Result<(), Error>
|
async fn do_proxy<S, P>(&mut self, store: S, stream: P) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
|
S: Store,
|
||||||
P: Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
P: Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
self.upload_id = Some(self.repo.create_upload().await?);
|
self.upload_id = Some(self.repo.create_upload().await?);
|
||||||
|
@ -68,10 +66,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Drop for Backgrounded<S>
|
impl Drop for Backgrounded {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let any_items = self.identifier.is_some() || self.upload_id.is_some();
|
let any_items = self.identifier.is_some() || self.upload_id.is_some();
|
||||||
|
|
||||||
|
@ -87,14 +82,12 @@ where
|
||||||
|
|
||||||
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Backgrounded cleanup Identifier", identifier = ?identifier);
|
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Backgrounded cleanup Identifier", identifier = ?identifier);
|
||||||
|
|
||||||
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
crate::sync::spawn(
|
||||||
actix_rt::spawn(
|
|
||||||
async move {
|
async move {
|
||||||
let _ = crate::queue::cleanup_identifier(&repo, identifier).await;
|
let _ = crate::queue::cleanup_identifier(&repo, &identifier).await;
|
||||||
}
|
}
|
||||||
.instrument(cleanup_span),
|
.instrument(cleanup_span),
|
||||||
)
|
);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(upload_id) = self.upload_id {
|
if let Some(upload_id) = self.upload_id {
|
||||||
|
@ -102,14 +95,12 @@ where
|
||||||
|
|
||||||
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Backgrounded cleanup Upload ID", upload_id = ?upload_id);
|
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Backgrounded cleanup Upload ID", upload_id = ?upload_id);
|
||||||
|
|
||||||
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
crate::sync::spawn(
|
||||||
actix_rt::spawn(
|
|
||||||
async move {
|
async move {
|
||||||
let _ = repo.claim(upload_id).await;
|
let _ = repo.claim(upload_id).await;
|
||||||
}
|
}
|
||||||
.instrument(cleanup_span),
|
.instrument(cleanup_span),
|
||||||
)
|
);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,8 +12,8 @@ use defaults::Defaults;
|
||||||
|
|
||||||
pub(crate) use commandline::Operation;
|
pub(crate) use commandline::Operation;
|
||||||
pub(crate) use file::{
|
pub(crate) use file::{
|
||||||
Animation, ConfigFile as Configuration, Image, Media, ObjectStorage, OpenTelemetry, Repo, Sled,
|
Animation, ConfigFile as Configuration, Image, Media, ObjectStorage, OpenTelemetry, Postgres,
|
||||||
Store, Tracing, Video,
|
Repo, Sled, Store, Tracing, Video,
|
||||||
};
|
};
|
||||||
pub(crate) use primitives::{Filesystem, LogFormat};
|
pub(crate) use primitives::{Filesystem, LogFormat};
|
||||||
|
|
||||||
|
|
|
@ -369,8 +369,64 @@ impl Args {
|
||||||
from: from.into(),
|
from: from.into(),
|
||||||
to: to.into(),
|
to: to.into(),
|
||||||
},
|
},
|
||||||
config_file,
|
|
||||||
save_to,
|
save_to,
|
||||||
|
config_file,
|
||||||
|
},
|
||||||
|
MigrateRepoTo::Postgres(MigratePostgresInner { to }) => Output {
|
||||||
|
config_format: ConfigFormat {
|
||||||
|
server,
|
||||||
|
client,
|
||||||
|
old_repo,
|
||||||
|
tracing,
|
||||||
|
metrics,
|
||||||
|
media,
|
||||||
|
repo: None,
|
||||||
|
store: None,
|
||||||
|
},
|
||||||
|
operation: Operation::MigrateRepo {
|
||||||
|
from: from.into(),
|
||||||
|
to: to.into(),
|
||||||
|
},
|
||||||
|
save_to,
|
||||||
|
config_file,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MigrateRepoFrom::Postgres(MigratePostgresRepo { from, to }) => match to {
|
||||||
|
MigrateRepoTo::Sled(MigrateSledInner { to }) => Output {
|
||||||
|
config_format: ConfigFormat {
|
||||||
|
server,
|
||||||
|
client,
|
||||||
|
old_repo,
|
||||||
|
tracing,
|
||||||
|
metrics,
|
||||||
|
media,
|
||||||
|
repo: None,
|
||||||
|
store: None,
|
||||||
|
},
|
||||||
|
operation: Operation::MigrateRepo {
|
||||||
|
from: from.into(),
|
||||||
|
to: to.into(),
|
||||||
|
},
|
||||||
|
save_to,
|
||||||
|
config_file,
|
||||||
|
},
|
||||||
|
MigrateRepoTo::Postgres(MigratePostgresInner { to }) => Output {
|
||||||
|
config_format: ConfigFormat {
|
||||||
|
server,
|
||||||
|
client,
|
||||||
|
old_repo,
|
||||||
|
tracing,
|
||||||
|
metrics,
|
||||||
|
media,
|
||||||
|
repo: None,
|
||||||
|
store: None,
|
||||||
|
},
|
||||||
|
operation: Operation::MigrateRepo {
|
||||||
|
from: from.into(),
|
||||||
|
to: to.into(),
|
||||||
|
},
|
||||||
|
save_to,
|
||||||
|
config_file,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -1058,6 +1114,7 @@ enum MigrateStoreFrom {
|
||||||
#[derive(Debug, Subcommand)]
|
#[derive(Debug, Subcommand)]
|
||||||
enum MigrateRepoFrom {
|
enum MigrateRepoFrom {
|
||||||
Sled(MigrateSledRepo),
|
Sled(MigrateSledRepo),
|
||||||
|
Postgres(MigratePostgresRepo),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Configure the destination storage for pict-rs storage migration
|
/// Configure the destination storage for pict-rs storage migration
|
||||||
|
@ -1075,8 +1132,10 @@ enum MigrateStoreTo {
|
||||||
/// Configure the destination repo for pict-rs repo migration
|
/// Configure the destination repo for pict-rs repo migration
|
||||||
#[derive(Debug, Subcommand)]
|
#[derive(Debug, Subcommand)]
|
||||||
enum MigrateRepoTo {
|
enum MigrateRepoTo {
|
||||||
/// Migrate to the provided sled storage
|
/// Migrate to the provided sled repo
|
||||||
Sled(MigrateSledInner),
|
Sled(MigrateSledInner),
|
||||||
|
/// Migrate to the provided postgres repo
|
||||||
|
Postgres(MigratePostgresInner),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Migrate pict-rs' storage from the provided filesystem storage
|
/// Migrate pict-rs' storage from the provided filesystem storage
|
||||||
|
@ -1099,6 +1158,16 @@ struct MigrateSledRepo {
|
||||||
to: MigrateRepoTo,
|
to: MigrateRepoTo,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Migrate pict-rs' repo from the provided postgres repo
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct MigratePostgresRepo {
|
||||||
|
#[command(flatten)]
|
||||||
|
from: Postgres,
|
||||||
|
|
||||||
|
#[command(subcommand)]
|
||||||
|
to: MigrateRepoTo,
|
||||||
|
}
|
||||||
|
|
||||||
/// Migrate pict-rs' storage to the provided filesystem storage
|
/// Migrate pict-rs' storage to the provided filesystem storage
|
||||||
#[derive(Debug, Parser)]
|
#[derive(Debug, Parser)]
|
||||||
struct MigrateFilesystemInner {
|
struct MigrateFilesystemInner {
|
||||||
|
@ -1116,6 +1185,13 @@ struct MigrateSledInner {
|
||||||
to: Sled,
|
to: Sled,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Migrate pict-rs' repo to the provided postgres repo
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct MigratePostgresInner {
|
||||||
|
#[command(flatten)]
|
||||||
|
to: Postgres,
|
||||||
|
}
|
||||||
|
|
||||||
/// Migrate pict-rs' storage from the provided object storage
|
/// Migrate pict-rs' storage from the provided object storage
|
||||||
#[derive(Debug, Parser)]
|
#[derive(Debug, Parser)]
|
||||||
struct MigrateObjectStorage {
|
struct MigrateObjectStorage {
|
||||||
|
@ -1163,6 +1239,8 @@ struct RunObjectStorage {
|
||||||
enum Repo {
|
enum Repo {
|
||||||
/// Run pict-rs with the provided sled-backed data repository
|
/// Run pict-rs with the provided sled-backed data repository
|
||||||
Sled(Sled),
|
Sled(Sled),
|
||||||
|
/// Run pict-rs with the provided postgres-backed data repository
|
||||||
|
Postgres(Postgres),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Configuration for filesystem media storage
|
/// Configuration for filesystem media storage
|
||||||
|
@ -1254,6 +1332,15 @@ pub(super) struct Sled {
|
||||||
pub(super) export_path: Option<PathBuf>,
|
pub(super) export_path: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Configuration for the postgres-backed data repository
|
||||||
|
#[derive(Debug, Parser, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(super) struct Postgres {
|
||||||
|
/// The URL of the postgres database
|
||||||
|
#[arg(short, long)]
|
||||||
|
pub(super) url: Url,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Parser, serde::Serialize)]
|
#[derive(Debug, Parser, serde::Serialize)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
struct OldSled {
|
struct OldSled {
|
||||||
|
|
|
@ -363,8 +363,20 @@ impl From<crate::config::commandline::Sled> for crate::config::file::Sled {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<crate::config::commandline::Postgres> for crate::config::file::Postgres {
|
||||||
|
fn from(value: crate::config::commandline::Postgres) -> Self {
|
||||||
|
crate::config::file::Postgres { url: value.url }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<crate::config::commandline::Sled> for crate::config::file::Repo {
|
impl From<crate::config::commandline::Sled> for crate::config::file::Repo {
|
||||||
fn from(value: crate::config::commandline::Sled) -> Self {
|
fn from(value: crate::config::commandline::Sled) -> Self {
|
||||||
crate::config::file::Repo::Sled(value.into())
|
crate::config::file::Repo::Sled(value.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<crate::config::commandline::Postgres> for crate::config::file::Repo {
|
||||||
|
fn from(value: crate::config::commandline::Postgres) -> Self {
|
||||||
|
crate::config::file::Repo::Postgres(value.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -88,6 +88,7 @@ pub(crate) struct ObjectStorage {
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub(crate) enum Repo {
|
pub(crate) enum Repo {
|
||||||
Sled(Sled),
|
Sled(Sled),
|
||||||
|
Postgres(Postgres),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
@ -421,3 +422,9 @@ pub(crate) struct Sled {
|
||||||
|
|
||||||
pub(crate) export_path: PathBuf,
|
pub(crate) export_path: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub(crate) struct Postgres {
|
||||||
|
pub(crate) url: Url,
|
||||||
|
}
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
bytes_stream::BytesStream,
|
bytes_stream::BytesStream,
|
||||||
discover::Discovery,
|
discover::Discovery,
|
||||||
|
@ -101,9 +103,10 @@ impl Details {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "DEBUG")]
|
||||||
pub(crate) async fn from_store<S: Store>(
|
pub(crate) async fn from_store<S: Store>(
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
timeout: u64,
|
timeout: u64,
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
let mut buf = BytesStream::new();
|
let mut buf = BytesStream::new();
|
||||||
|
|
|
@ -9,6 +9,7 @@ use crate::{
|
||||||
|
|
||||||
use super::Discovery;
|
use super::Discovery;
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "DEBUG", skip_all)]
|
||||||
pub(super) async fn check_reorient(
|
pub(super) async fn check_reorient(
|
||||||
Discovery {
|
Discovery {
|
||||||
input,
|
input,
|
||||||
|
|
|
@ -97,6 +97,7 @@ pub(super) async fn confirm_bytes(
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "DEBUG", skip(f))]
|
||||||
async fn count_avif_frames<F, Fut>(f: F, timeout: u64) -> Result<u32, MagickError>
|
async fn count_avif_frames<F, Fut>(f: F, timeout: u64) -> Result<u32, MagickError>
|
||||||
where
|
where
|
||||||
F: FnOnce(crate::file::File) -> Fut,
|
F: FnOnce(crate::file::File) -> Fut,
|
||||||
|
@ -147,6 +148,7 @@ where
|
||||||
Ok(lines)
|
Ok(lines)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "DEBUG", skip(f))]
|
||||||
async fn discover_file<F, Fut>(f: F, timeout: u64) -> Result<Discovery, MagickError>
|
async fn discover_file<F, Fut>(f: F, timeout: u64) -> Result<Discovery, MagickError>
|
||||||
where
|
where
|
||||||
F: FnOnce(crate::file::File) -> Fut,
|
F: FnOnce(crate::file::File) -> Fut,
|
||||||
|
|
30
src/error.rs
30
src/error.rs
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use actix_web::{http::StatusCode, HttpResponse, ResponseError};
|
use actix_web::{http::StatusCode, HttpResponse, ResponseError};
|
||||||
use color_eyre::Report;
|
use color_eyre::Report;
|
||||||
|
|
||||||
|
@ -5,6 +7,8 @@ use crate::error_code::ErrorCode;
|
||||||
|
|
||||||
pub(crate) struct Error {
|
pub(crate) struct Error {
|
||||||
inner: color_eyre::Report,
|
inner: color_eyre::Report,
|
||||||
|
debug: Arc<str>,
|
||||||
|
display: Arc<str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Error {
|
impl Error {
|
||||||
|
@ -21,17 +25,21 @@ impl Error {
|
||||||
.map(|e| e.error_code())
|
.map(|e| e.error_code())
|
||||||
.unwrap_or(ErrorCode::UNKNOWN_ERROR)
|
.unwrap_or(ErrorCode::UNKNOWN_ERROR)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn is_disconnected(&self) -> bool {
|
||||||
|
self.kind().map(|e| e.is_disconnected()).unwrap_or(false)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for Error {
|
impl std::fmt::Debug for Error {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
std::fmt::Debug::fmt(&self.inner, f)
|
f.write_str(&self.debug)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for Error {
|
impl std::fmt::Display for Error {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
std::fmt::Display::fmt(&self.inner, f)
|
f.write_str(&self.display)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,8 +54,14 @@ where
|
||||||
UploadError: From<T>,
|
UploadError: From<T>,
|
||||||
{
|
{
|
||||||
fn from(error: T) -> Self {
|
fn from(error: T) -> Self {
|
||||||
|
let inner = Report::from(UploadError::from(error));
|
||||||
|
let debug = Arc::from(format!("{inner:?}"));
|
||||||
|
let display = Arc::from(format!("{inner}"));
|
||||||
|
|
||||||
Error {
|
Error {
|
||||||
inner: Report::from(UploadError::from(error)),
|
inner,
|
||||||
|
debug,
|
||||||
|
display,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -166,12 +180,20 @@ impl UploadError {
|
||||||
Self::InvalidToken => ErrorCode::INVALID_DELETE_TOKEN,
|
Self::InvalidToken => ErrorCode::INVALID_DELETE_TOKEN,
|
||||||
Self::UnsupportedProcessExtension => ErrorCode::INVALID_FILE_EXTENSION,
|
Self::UnsupportedProcessExtension => ErrorCode::INVALID_FILE_EXTENSION,
|
||||||
Self::DuplicateAlias => ErrorCode::DUPLICATE_ALIAS,
|
Self::DuplicateAlias => ErrorCode::DUPLICATE_ALIAS,
|
||||||
Self::PushJob(_) => todo!(),
|
Self::PushJob(_) => ErrorCode::PUSH_JOB,
|
||||||
Self::Range => ErrorCode::RANGE_NOT_SATISFIABLE,
|
Self::Range => ErrorCode::RANGE_NOT_SATISFIABLE,
|
||||||
Self::Limit(_) => ErrorCode::VALIDATE_FILE_SIZE,
|
Self::Limit(_) => ErrorCode::VALIDATE_FILE_SIZE,
|
||||||
Self::Timeout(_) => ErrorCode::STREAM_TOO_SLOW,
|
Self::Timeout(_) => ErrorCode::STREAM_TOO_SLOW,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const fn is_disconnected(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::Repo(e) => e.is_disconnected(),
|
||||||
|
Self::Store(s) => s.is_disconnected(),
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<actix_web::error::BlockingError> for UploadError {
|
impl From<actix_web::error::BlockingError> for UploadError {
|
||||||
|
|
|
@ -56,12 +56,19 @@ impl ErrorCode {
|
||||||
code: "already-claimed",
|
code: "already-claimed",
|
||||||
};
|
};
|
||||||
pub(crate) const SLED_ERROR: ErrorCode = ErrorCode { code: "sled-error" };
|
pub(crate) const SLED_ERROR: ErrorCode = ErrorCode { code: "sled-error" };
|
||||||
|
pub(crate) const POSTGRES_ERROR: ErrorCode = ErrorCode {
|
||||||
|
code: "postgres-error",
|
||||||
|
};
|
||||||
pub(crate) const EXTRACT_DETAILS: ErrorCode = ErrorCode {
|
pub(crate) const EXTRACT_DETAILS: ErrorCode = ErrorCode {
|
||||||
code: "extract-details",
|
code: "extract-details",
|
||||||
};
|
};
|
||||||
pub(crate) const EXTRACT_UPLOAD_RESULT: ErrorCode = ErrorCode {
|
pub(crate) const EXTRACT_UPLOAD_RESULT: ErrorCode = ErrorCode {
|
||||||
code: "extract-upload-result",
|
code: "extract-upload-result",
|
||||||
};
|
};
|
||||||
|
pub(crate) const PUSH_JOB: ErrorCode = ErrorCode { code: "push-job" };
|
||||||
|
pub(crate) const EXTRACT_JOB: ErrorCode = ErrorCode {
|
||||||
|
code: "extract-job",
|
||||||
|
};
|
||||||
pub(crate) const CONFLICTED_RECORD: ErrorCode = ErrorCode {
|
pub(crate) const CONFLICTED_RECORD: ErrorCode = ErrorCode {
|
||||||
code: "conflicted-record",
|
code: "conflicted-record",
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error_code::ErrorCode,
|
error_code::ErrorCode,
|
||||||
formats::InternalVideoFormat,
|
formats::InternalVideoFormat,
|
||||||
|
@ -132,7 +134,7 @@ impl ThumbnailFormat {
|
||||||
#[tracing::instrument(skip(store))]
|
#[tracing::instrument(skip(store))]
|
||||||
pub(crate) async fn thumbnail<S: Store>(
|
pub(crate) async fn thumbnail<S: Store>(
|
||||||
store: S,
|
store: S,
|
||||||
from: S::Identifier,
|
from: Arc<str>,
|
||||||
input_format: InternalVideoFormat,
|
input_format: InternalVideoFormat,
|
||||||
format: ThumbnailFormat,
|
format: ThumbnailFormat,
|
||||||
timeout: u64,
|
timeout: u64,
|
||||||
|
|
|
@ -446,15 +446,15 @@ mod io_uring {
|
||||||
actix_rt::System::new().block_on(async move {
|
actix_rt::System::new().block_on(async move {
|
||||||
let arbiter = actix_rt::Arbiter::new();
|
let arbiter = actix_rt::Arbiter::new();
|
||||||
|
|
||||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
let (tx, rx) = crate::sync::channel(1);
|
||||||
|
|
||||||
arbiter.spawn(async move {
|
arbiter.spawn(async move {
|
||||||
let handle = actix_rt::spawn($fut);
|
let handle = crate::sync::spawn($fut);
|
||||||
|
|
||||||
let _ = tx.send(handle.await.unwrap());
|
let _ = tx.send(handle.await.unwrap());
|
||||||
});
|
});
|
||||||
|
|
||||||
rx.await.unwrap()
|
rx.into_recv_async().await.unwrap()
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
75
src/future.rs
Normal file
75
src/future.rs
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
use std::{
|
||||||
|
future::Future,
|
||||||
|
time::{Duration, Instant},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) type LocalBoxFuture<'a, T> = std::pin::Pin<Box<dyn Future<Output = T> + 'a>>;
|
||||||
|
|
||||||
|
pub(crate) trait WithTimeout: Future {
|
||||||
|
fn with_timeout(self, duration: Duration) -> actix_rt::time::Timeout<Self>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
actix_rt::time::timeout(duration, self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) trait WithMetrics: Future {
|
||||||
|
fn with_metrics(self, name: &'static str) -> MetricsFuture<Self>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
MetricsFuture {
|
||||||
|
future: self,
|
||||||
|
metrics: Metrics {
|
||||||
|
name,
|
||||||
|
start: Instant::now(),
|
||||||
|
complete: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<F> WithMetrics for F where F: Future {}
|
||||||
|
impl<F> WithTimeout for F where F: Future {}
|
||||||
|
|
||||||
|
pin_project_lite::pin_project! {
|
||||||
|
pub(crate) struct MetricsFuture<F> {
|
||||||
|
#[pin]
|
||||||
|
future: F,
|
||||||
|
|
||||||
|
metrics: Metrics,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Metrics {
|
||||||
|
name: &'static str,
|
||||||
|
start: Instant,
|
||||||
|
complete: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<F> Future for MetricsFuture<F>
|
||||||
|
where
|
||||||
|
F: Future,
|
||||||
|
{
|
||||||
|
type Output = F::Output;
|
||||||
|
|
||||||
|
fn poll(
|
||||||
|
self: std::pin::Pin<&mut Self>,
|
||||||
|
cx: &mut std::task::Context<'_>,
|
||||||
|
) -> std::task::Poll<Self::Output> {
|
||||||
|
let this = self.project();
|
||||||
|
|
||||||
|
let out = std::task::ready!(this.future.poll(cx));
|
||||||
|
|
||||||
|
this.metrics.complete = true;
|
||||||
|
|
||||||
|
std::task::Poll::Ready(out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for Metrics {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
metrics::histogram!(self.name, self.start.elapsed().as_secs_f64(), "complete" => self.complete.to_string());
|
||||||
|
}
|
||||||
|
}
|
|
@ -5,7 +5,7 @@ use crate::{
|
||||||
ffmpeg::ThumbnailFormat,
|
ffmpeg::ThumbnailFormat,
|
||||||
formats::{InputProcessableFormat, InternalVideoFormat},
|
formats::{InputProcessableFormat, InternalVideoFormat},
|
||||||
repo::{Alias, ArcRepo, Hash, VariantAlreadyExists},
|
repo::{Alias, ArcRepo, Hash, VariantAlreadyExists},
|
||||||
store::{Identifier, Store},
|
store::Store,
|
||||||
};
|
};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use std::{path::PathBuf, time::Instant};
|
use std::{path::PathBuf, time::Instant};
|
||||||
|
@ -91,7 +91,7 @@ async fn process<S: Store + 'static>(
|
||||||
let permit = crate::PROCESS_SEMAPHORE.acquire().await;
|
let permit = crate::PROCESS_SEMAPHORE.acquire().await;
|
||||||
|
|
||||||
let identifier = if let Some(identifier) = repo.still_identifier_from_alias(&alias).await? {
|
let identifier = if let Some(identifier) = repo.still_identifier_from_alias(&alias).await? {
|
||||||
S::Identifier::from_arc(identifier)?
|
identifier
|
||||||
} else {
|
} else {
|
||||||
let Some(identifier) = repo.identifier(hash.clone()).await? else {
|
let Some(identifier) = repo.identifier(hash.clone()).await? else {
|
||||||
return Err(UploadError::MissingIdentifier.into());
|
return Err(UploadError::MissingIdentifier.into());
|
||||||
|
@ -101,7 +101,7 @@ async fn process<S: Store + 'static>(
|
||||||
|
|
||||||
let reader = crate::ffmpeg::thumbnail(
|
let reader = crate::ffmpeg::thumbnail(
|
||||||
store.clone(),
|
store.clone(),
|
||||||
S::Identifier::from_arc(identifier)?,
|
identifier,
|
||||||
input_format.unwrap_or(InternalVideoFormat::Mp4),
|
input_format.unwrap_or(InternalVideoFormat::Mp4),
|
||||||
thumbnail_format,
|
thumbnail_format,
|
||||||
media.process_timeout,
|
media.process_timeout,
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
bytes_stream::BytesStream,
|
bytes_stream::BytesStream,
|
||||||
either::Either,
|
either::Either,
|
||||||
|
@ -15,15 +17,12 @@ mod hasher;
|
||||||
use hasher::Hasher;
|
use hasher::Hasher;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct Session<S>
|
pub(crate) struct Session {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
repo: ArcRepo,
|
repo: ArcRepo,
|
||||||
delete_token: DeleteToken,
|
delete_token: DeleteToken,
|
||||||
hash: Option<Hash>,
|
hash: Option<Hash>,
|
||||||
alias: Option<Alias>,
|
alias: Option<Alias>,
|
||||||
identifier: Option<S::Identifier>,
|
identifier: Option<Arc<str>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(stream))]
|
#[tracing::instrument(skip(stream))]
|
||||||
|
@ -49,7 +48,7 @@ pub(crate) async fn ingest<S>(
|
||||||
stream: impl Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
stream: impl Stream<Item = Result<Bytes, Error>> + Unpin + 'static,
|
||||||
declared_alias: Option<Alias>,
|
declared_alias: Option<Alias>,
|
||||||
media: &crate::config::Media,
|
media: &crate::config::Media,
|
||||||
) -> Result<Session<S>, Error>
|
) -> Result<Session, Error>
|
||||||
where
|
where
|
||||||
S: Store,
|
S: Store,
|
||||||
{
|
{
|
||||||
|
@ -131,11 +130,11 @@ where
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all)]
|
#[tracing::instrument(level = "trace", skip_all)]
|
||||||
async fn save_upload<S>(
|
async fn save_upload<S>(
|
||||||
session: &mut Session<S>,
|
session: &mut Session,
|
||||||
repo: &ArcRepo,
|
repo: &ArcRepo,
|
||||||
store: &S,
|
store: &S,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
S: Store,
|
S: Store,
|
||||||
|
@ -153,10 +152,7 @@ where
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Session<S>
|
impl Session {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
pub(crate) fn disarm(mut self) -> DeleteToken {
|
pub(crate) fn disarm(mut self) -> DeleteToken {
|
||||||
let _ = self.hash.take();
|
let _ = self.hash.take();
|
||||||
let _ = self.alias.take();
|
let _ = self.alias.take();
|
||||||
|
@ -206,10 +202,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Drop for Session<S>
|
impl Drop for Session {
|
||||||
where
|
|
||||||
S: Store,
|
|
||||||
{
|
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let any_items = self.hash.is_some() || self.alias.is_some() || self.identifier.is_some();
|
let any_items = self.hash.is_some() || self.alias.is_some() || self.identifier.is_some();
|
||||||
|
|
||||||
|
@ -224,14 +217,12 @@ where
|
||||||
|
|
||||||
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Session cleanup hash", hash = ?hash);
|
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Session cleanup hash", hash = ?hash);
|
||||||
|
|
||||||
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
crate::sync::spawn(
|
||||||
actix_rt::spawn(
|
|
||||||
async move {
|
async move {
|
||||||
let _ = crate::queue::cleanup_hash(&repo, hash).await;
|
let _ = crate::queue::cleanup_hash(&repo, hash).await;
|
||||||
}
|
}
|
||||||
.instrument(cleanup_span),
|
.instrument(cleanup_span),
|
||||||
)
|
);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(alias) = self.alias.take() {
|
if let Some(alias) = self.alias.take() {
|
||||||
|
@ -240,14 +231,12 @@ where
|
||||||
|
|
||||||
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Session cleanup alias", alias = ?alias);
|
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Session cleanup alias", alias = ?alias);
|
||||||
|
|
||||||
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
crate::sync::spawn(
|
||||||
actix_rt::spawn(
|
|
||||||
async move {
|
async move {
|
||||||
let _ = crate::queue::cleanup_alias(&repo, alias, token).await;
|
let _ = crate::queue::cleanup_alias(&repo, alias, token).await;
|
||||||
}
|
}
|
||||||
.instrument(cleanup_span),
|
.instrument(cleanup_span),
|
||||||
)
|
);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(identifier) = self.identifier.take() {
|
if let Some(identifier) = self.identifier.take() {
|
||||||
|
@ -255,14 +244,12 @@ where
|
||||||
|
|
||||||
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Session cleanup identifier", identifier = ?identifier);
|
let cleanup_span = tracing::info_span!(parent: &cleanup_parent_span, "Session cleanup identifier", identifier = ?identifier);
|
||||||
|
|
||||||
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
crate::sync::spawn(
|
||||||
actix_rt::spawn(
|
|
||||||
async move {
|
async move {
|
||||||
let _ = crate::queue::cleanup_identifier(&repo, identifier).await;
|
let _ = crate::queue::cleanup_identifier(&repo, &identifier).await;
|
||||||
}
|
}
|
||||||
.instrument(cleanup_span),
|
.instrument(cleanup_span),
|
||||||
)
|
);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -82,16 +82,15 @@ mod test {
|
||||||
actix_rt::System::new().block_on(async move {
|
actix_rt::System::new().block_on(async move {
|
||||||
let arbiter = actix_rt::Arbiter::new();
|
let arbiter = actix_rt::Arbiter::new();
|
||||||
|
|
||||||
let (tx, rx) = tracing::trace_span!(parent: None, "Create channel")
|
let (tx, rx) = crate::sync::channel(1);
|
||||||
.in_scope(|| tokio::sync::oneshot::channel());
|
|
||||||
|
|
||||||
arbiter.spawn(async move {
|
arbiter.spawn(async move {
|
||||||
let handle = actix_rt::spawn($fut);
|
let handle = crate::sync::spawn($fut);
|
||||||
|
|
||||||
let _ = tx.send(handle.await.unwrap());
|
let _ = tx.send(handle.await.unwrap());
|
||||||
});
|
});
|
||||||
|
|
||||||
rx.await.unwrap()
|
rx.into_recv_async().await.unwrap()
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,9 +8,7 @@ use opentelemetry_otlp::WithExportConfig;
|
||||||
use tracing::subscriber::set_global_default;
|
use tracing::subscriber::set_global_default;
|
||||||
use tracing_error::ErrorLayer;
|
use tracing_error::ErrorLayer;
|
||||||
use tracing_log::LogTracer;
|
use tracing_log::LogTracer;
|
||||||
use tracing_subscriber::{
|
use tracing_subscriber::{layer::SubscriberExt, registry::LookupSpan, Layer, Registry};
|
||||||
fmt::format::FmtSpan, layer::SubscriberExt, registry::LookupSpan, Layer, Registry,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub(super) fn init_tracing(tracing: &Tracing) -> color_eyre::Result<()> {
|
pub(super) fn init_tracing(tracing: &Tracing) -> color_eyre::Result<()> {
|
||||||
color_eyre::install()?;
|
color_eyre::install()?;
|
||||||
|
@ -19,8 +17,7 @@ pub(super) fn init_tracing(tracing: &Tracing) -> color_eyre::Result<()> {
|
||||||
|
|
||||||
opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new());
|
opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new());
|
||||||
|
|
||||||
let format_layer =
|
let format_layer = tracing_subscriber::fmt::layer();
|
||||||
tracing_subscriber::fmt::layer().with_span_events(FmtSpan::NEW | FmtSpan::CLOSE);
|
|
||||||
|
|
||||||
match tracing.logging.format {
|
match tracing.logging.format {
|
||||||
LogFormat::Compact => with_format(format_layer.compact(), tracing),
|
LogFormat::Compact => with_format(format_layer.compact(), tracing),
|
||||||
|
|
152
src/lib.rs
152
src/lib.rs
|
@ -11,6 +11,7 @@ mod exiftool;
|
||||||
mod ffmpeg;
|
mod ffmpeg;
|
||||||
mod file;
|
mod file;
|
||||||
mod formats;
|
mod formats;
|
||||||
|
mod future;
|
||||||
mod generate;
|
mod generate;
|
||||||
mod ingest;
|
mod ingest;
|
||||||
mod init_tracing;
|
mod init_tracing;
|
||||||
|
@ -26,6 +27,7 @@ mod repo_04;
|
||||||
mod serde_str;
|
mod serde_str;
|
||||||
mod store;
|
mod store;
|
||||||
mod stream;
|
mod stream;
|
||||||
|
mod sync;
|
||||||
mod tmp_file;
|
mod tmp_file;
|
||||||
mod validate;
|
mod validate;
|
||||||
|
|
||||||
|
@ -36,6 +38,7 @@ use actix_web::{
|
||||||
web, App, HttpRequest, HttpResponse, HttpResponseBuilder, HttpServer,
|
web, App, HttpRequest, HttpResponse, HttpResponseBuilder, HttpServer,
|
||||||
};
|
};
|
||||||
use details::{ApiDetails, HumanDate};
|
use details::{ApiDetails, HumanDate};
|
||||||
|
use future::WithTimeout;
|
||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
use metrics_exporter_prometheus::PrometheusBuilder;
|
use metrics_exporter_prometheus::PrometheusBuilder;
|
||||||
use middleware::Metrics;
|
use middleware::Metrics;
|
||||||
|
@ -45,14 +48,15 @@ use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
|
||||||
use reqwest_tracing::TracingMiddleware;
|
use reqwest_tracing::TracingMiddleware;
|
||||||
use rusty_s3::UrlStyle;
|
use rusty_s3::UrlStyle;
|
||||||
use std::{
|
use std::{
|
||||||
|
marker::PhantomData,
|
||||||
path::Path,
|
path::Path,
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
time::{Duration, SystemTime},
|
time::{Duration, SystemTime},
|
||||||
};
|
};
|
||||||
use tokio::sync::Semaphore;
|
use tokio::sync::Semaphore;
|
||||||
|
use tracing::Instrument;
|
||||||
use tracing_actix_web::TracingLogger;
|
use tracing_actix_web::TracingLogger;
|
||||||
use tracing_futures::Instrument;
|
|
||||||
|
|
||||||
use self::{
|
use self::{
|
||||||
backgrounded::Backgrounded,
|
backgrounded::Backgrounded,
|
||||||
|
@ -69,7 +73,7 @@ use self::{
|
||||||
queue::queue_generate,
|
queue::queue_generate,
|
||||||
repo::{sled::SledRepo, Alias, DeleteToken, Hash, Repo, UploadId, UploadResult},
|
repo::{sled::SledRepo, Alias, DeleteToken, Hash, Repo, UploadId, UploadResult},
|
||||||
serde_str::Serde,
|
serde_str::Serde,
|
||||||
store::{file_store::FileStore, object_store::ObjectStore, Identifier, Store},
|
store::{file_store::FileStore, object_store::ObjectStore, Store},
|
||||||
stream::{empty, once, StreamLimit, StreamMap, StreamTimeout},
|
stream::{empty, once, StreamLimit, StreamMap, StreamTimeout},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -83,8 +87,9 @@ const DAYS: u32 = 24 * HOURS;
|
||||||
const NOT_FOUND_KEY: &str = "404-alias";
|
const NOT_FOUND_KEY: &str = "404-alias";
|
||||||
|
|
||||||
static PROCESS_SEMAPHORE: Lazy<Semaphore> = Lazy::new(|| {
|
static PROCESS_SEMAPHORE: Lazy<Semaphore> = Lazy::new(|| {
|
||||||
tracing::trace_span!(parent: None, "Initialize semaphore")
|
let permits = num_cpus::get().saturating_sub(1).max(1);
|
||||||
.in_scope(|| Semaphore::new(num_cpus::get().saturating_sub(1).max(1)))
|
|
||||||
|
crate::sync::bare_semaphore(permits)
|
||||||
});
|
});
|
||||||
|
|
||||||
async fn ensure_details<S: Store + 'static>(
|
async fn ensure_details<S: Store + 'static>(
|
||||||
|
@ -93,7 +98,7 @@ async fn ensure_details<S: Store + 'static>(
|
||||||
config: &Configuration,
|
config: &Configuration,
|
||||||
alias: &Alias,
|
alias: &Alias,
|
||||||
) -> Result<Details, Error> {
|
) -> Result<Details, Error> {
|
||||||
let Some(identifier) = repo.identifier_from_alias(alias).await?.map(S::Identifier::from_arc).transpose()? else {
|
let Some(identifier) = repo.identifier_from_alias(alias).await? else {
|
||||||
return Err(UploadError::MissingAlias.into());
|
return Err(UploadError::MissingAlias.into());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -117,10 +122,10 @@ async fn ensure_details<S: Store + 'static>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Upload<S: Store + 'static>(Value<Session<S>>);
|
struct Upload<S>(Value<Session>, PhantomData<S>);
|
||||||
|
|
||||||
impl<S: Store + 'static> FormData for Upload<S> {
|
impl<S: Store + 'static> FormData for Upload<S> {
|
||||||
type Item = Session<S>;
|
type Item = Session;
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn form(req: &HttpRequest) -> Form<Self::Item, Self::Error> {
|
fn form(req: &HttpRequest) -> Form<Self::Item, Self::Error> {
|
||||||
|
@ -172,14 +177,14 @@ impl<S: Store + 'static> FormData for Upload<S> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error> {
|
fn extract(value: Value<Self::Item>) -> Result<Self, Self::Error> {
|
||||||
Ok(Upload(value))
|
Ok(Upload(value, PhantomData))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Import<S: Store + 'static>(Value<Session<S>>);
|
struct Import<S: Store + 'static>(Value<Session>, PhantomData<S>);
|
||||||
|
|
||||||
impl<S: Store + 'static> FormData for Import<S> {
|
impl<S: Store + 'static> FormData for Import<S> {
|
||||||
type Item = Session<S>;
|
type Item = Session;
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> {
|
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> {
|
||||||
|
@ -241,14 +246,14 @@ impl<S: Store + 'static> FormData for Import<S> {
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
Ok(Import(value))
|
Ok(Import(value, PhantomData))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handle responding to successful uploads
|
/// Handle responding to successful uploads
|
||||||
#[tracing::instrument(name = "Uploaded files", skip(value, repo, store, config))]
|
#[tracing::instrument(name = "Uploaded files", skip(value, repo, store, config))]
|
||||||
async fn upload<S: Store + 'static>(
|
async fn upload<S: Store + 'static>(
|
||||||
Multipart(Upload(value)): Multipart<Upload<S>>,
|
Multipart(Upload(value, _)): Multipart<Upload<S>>,
|
||||||
repo: web::Data<ArcRepo>,
|
repo: web::Data<ArcRepo>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
config: web::Data<Configuration>,
|
config: web::Data<Configuration>,
|
||||||
|
@ -259,7 +264,7 @@ async fn upload<S: Store + 'static>(
|
||||||
/// Handle responding to successful uploads
|
/// Handle responding to successful uploads
|
||||||
#[tracing::instrument(name = "Imported files", skip(value, repo, store, config))]
|
#[tracing::instrument(name = "Imported files", skip(value, repo, store, config))]
|
||||||
async fn import<S: Store + 'static>(
|
async fn import<S: Store + 'static>(
|
||||||
Multipart(Import(value)): Multipart<Import<S>>,
|
Multipart(Import(value, _)): Multipart<Import<S>>,
|
||||||
repo: web::Data<ArcRepo>,
|
repo: web::Data<ArcRepo>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
config: web::Data<Configuration>,
|
config: web::Data<Configuration>,
|
||||||
|
@ -270,7 +275,7 @@ async fn import<S: Store + 'static>(
|
||||||
/// Handle responding to successful uploads
|
/// Handle responding to successful uploads
|
||||||
#[tracing::instrument(name = "Uploaded files", skip(value, repo, store, config))]
|
#[tracing::instrument(name = "Uploaded files", skip(value, repo, store, config))]
|
||||||
async fn handle_upload<S: Store + 'static>(
|
async fn handle_upload<S: Store + 'static>(
|
||||||
value: Value<Session<S>>,
|
value: Value<Session>,
|
||||||
repo: web::Data<ArcRepo>,
|
repo: web::Data<ArcRepo>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
config: web::Data<Configuration>,
|
config: web::Data<Configuration>,
|
||||||
|
@ -312,10 +317,10 @@ async fn handle_upload<S: Store + 'static>(
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
struct BackgroundedUpload<S: Store + 'static>(Value<Backgrounded<S>>);
|
struct BackgroundedUpload<S: Store + 'static>(Value<Backgrounded>, PhantomData<S>);
|
||||||
|
|
||||||
impl<S: Store + 'static> FormData for BackgroundedUpload<S> {
|
impl<S: Store + 'static> FormData for BackgroundedUpload<S> {
|
||||||
type Item = Backgrounded<S>;
|
type Item = Backgrounded;
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> {
|
fn form(req: &actix_web::HttpRequest) -> Form<Self::Item, Self::Error> {
|
||||||
|
@ -371,13 +376,13 @@ impl<S: Store + 'static> FormData for BackgroundedUpload<S> {
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
Ok(BackgroundedUpload(value))
|
Ok(BackgroundedUpload(value, PhantomData))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(name = "Uploaded files", skip(value, repo))]
|
#[tracing::instrument(name = "Uploaded files", skip(value, repo))]
|
||||||
async fn upload_backgrounded<S: Store>(
|
async fn upload_backgrounded<S: Store>(
|
||||||
Multipart(BackgroundedUpload(value)): Multipart<BackgroundedUpload<S>>,
|
Multipart(BackgroundedUpload(value, _)): Multipart<BackgroundedUpload<S>>,
|
||||||
repo: web::Data<ArcRepo>,
|
repo: web::Data<ArcRepo>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let images = value
|
let images = value
|
||||||
|
@ -394,11 +399,7 @@ async fn upload_backgrounded<S: Store>(
|
||||||
|
|
||||||
for image in &images {
|
for image in &images {
|
||||||
let upload_id = image.result.upload_id().expect("Upload ID exists");
|
let upload_id = image.result.upload_id().expect("Upload ID exists");
|
||||||
let identifier = image
|
let identifier = image.result.identifier().expect("Identifier exists");
|
||||||
.result
|
|
||||||
.identifier()
|
|
||||||
.expect("Identifier exists")
|
|
||||||
.to_bytes()?;
|
|
||||||
|
|
||||||
queue::queue_ingest(&repo, identifier, upload_id, None).await?;
|
queue::queue_ingest(&repo, identifier, upload_id, None).await?;
|
||||||
|
|
||||||
|
@ -432,7 +433,11 @@ async fn claim_upload<S: Store + 'static>(
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let upload_id = Serde::into_inner(query.into_inner().upload_id);
|
let upload_id = Serde::into_inner(query.into_inner().upload_id);
|
||||||
|
|
||||||
match actix_rt::time::timeout(Duration::from_secs(10), repo.wait(upload_id)).await {
|
match repo
|
||||||
|
.wait(upload_id)
|
||||||
|
.with_timeout(Duration::from_secs(10))
|
||||||
|
.await
|
||||||
|
{
|
||||||
Ok(wait_res) => {
|
Ok(wait_res) => {
|
||||||
let upload_result = wait_res?;
|
let upload_result = wait_res?;
|
||||||
repo.claim(upload_id).await?;
|
repo.claim(upload_id).await?;
|
||||||
|
@ -560,10 +565,7 @@ async fn do_download_backgrounded<S: Store + 'static>(
|
||||||
let backgrounded = Backgrounded::proxy((**repo).clone(), (**store).clone(), stream).await?;
|
let backgrounded = Backgrounded::proxy((**repo).clone(), (**store).clone(), stream).await?;
|
||||||
|
|
||||||
let upload_id = backgrounded.upload_id().expect("Upload ID exists");
|
let upload_id = backgrounded.upload_id().expect("Upload ID exists");
|
||||||
let identifier = backgrounded
|
let identifier = backgrounded.identifier().expect("Identifier exists");
|
||||||
.identifier()
|
|
||||||
.expect("Identifier exists")
|
|
||||||
.to_bytes()?;
|
|
||||||
|
|
||||||
queue::queue_ingest(&repo, identifier, upload_id, None).await?;
|
queue::queue_ingest(&repo, identifier, upload_id, None).await?;
|
||||||
|
|
||||||
|
@ -764,8 +766,6 @@ async fn process_details<S: Store>(
|
||||||
let identifier = repo
|
let identifier = repo
|
||||||
.variant_identifier(hash, thumbnail_string)
|
.variant_identifier(hash, thumbnail_string)
|
||||||
.await?
|
.await?
|
||||||
.map(S::Identifier::from_arc)
|
|
||||||
.transpose()?
|
|
||||||
.ok_or(UploadError::MissingAlias)?;
|
.ok_or(UploadError::MissingAlias)?;
|
||||||
|
|
||||||
let details = repo.details(&identifier).await?;
|
let details = repo.details(&identifier).await?;
|
||||||
|
@ -856,11 +856,7 @@ async fn process<S: Store + 'static>(
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let identifier_opt = repo
|
let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?;
|
||||||
.variant_identifier(hash.clone(), path_string)
|
|
||||||
.await?
|
|
||||||
.map(S::Identifier::from_arc)
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if let Some(identifier) = identifier_opt {
|
if let Some(identifier) = identifier_opt {
|
||||||
let details = repo.details(&identifier).await?;
|
let details = repo.details(&identifier).await?;
|
||||||
|
@ -980,11 +976,7 @@ async fn process_head<S: Store + 'static>(
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let identifier_opt = repo
|
let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?;
|
||||||
.variant_identifier(hash.clone(), path_string)
|
|
||||||
.await?
|
|
||||||
.map(S::Identifier::from_arc)
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if let Some(identifier) = identifier_opt {
|
if let Some(identifier) = identifier_opt {
|
||||||
let details = repo.details(&identifier).await?;
|
let details = repo.details(&identifier).await?;
|
||||||
|
@ -1047,11 +1039,7 @@ async fn process_backgrounded<S: Store>(
|
||||||
return Ok(HttpResponse::BadRequest().finish());
|
return Ok(HttpResponse::BadRequest().finish());
|
||||||
};
|
};
|
||||||
|
|
||||||
let identifier_opt = repo
|
let identifier_opt = repo.variant_identifier(hash.clone(), path_string).await?;
|
||||||
.variant_identifier(hash.clone(), path_string)
|
|
||||||
.await?
|
|
||||||
.map(S::Identifier::from_arc)
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if identifier_opt.is_some() {
|
if identifier_opt.is_some() {
|
||||||
return Ok(HttpResponse::Accepted().finish());
|
return Ok(HttpResponse::Accepted().finish());
|
||||||
|
@ -1185,7 +1173,7 @@ async fn do_serve<S: Store + 'static>(
|
||||||
(hash, alias, true)
|
(hash, alias, true)
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(identifier) = repo.identifier(hash.clone()).await?.map(Identifier::from_arc).transpose()? else {
|
let Some(identifier) = repo.identifier(hash.clone()).await? else {
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
"Original File identifier for hash {hash:?} is missing, queue cleanup task",
|
"Original File identifier for hash {hash:?} is missing, queue cleanup task",
|
||||||
);
|
);
|
||||||
|
@ -1250,7 +1238,7 @@ async fn do_serve_head<S: Store + 'static>(
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
config: web::Data<Configuration>,
|
config: web::Data<Configuration>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let Some(identifier) = repo.identifier_from_alias(&alias).await?.map(S::Identifier::from_arc).transpose()? else {
|
let Some(identifier) = repo.identifier_from_alias(&alias).await? else {
|
||||||
// Invalid alias
|
// Invalid alias
|
||||||
return Ok(HttpResponse::NotFound().finish());
|
return Ok(HttpResponse::NotFound().finish());
|
||||||
};
|
};
|
||||||
|
@ -1268,7 +1256,7 @@ async fn do_serve_head<S: Store + 'static>(
|
||||||
|
|
||||||
async fn ranged_file_head_resp<S: Store + 'static>(
|
async fn ranged_file_head_resp<S: Store + 'static>(
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: S::Identifier,
|
identifier: Arc<str>,
|
||||||
range: Option<web::Header<Range>>,
|
range: Option<web::Header<Range>>,
|
||||||
details: Details,
|
details: Details,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
|
@ -1303,7 +1291,7 @@ async fn ranged_file_head_resp<S: Store + 'static>(
|
||||||
|
|
||||||
async fn ranged_file_resp<S: Store + 'static>(
|
async fn ranged_file_resp<S: Store + 'static>(
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: S::Identifier,
|
identifier: Arc<str>,
|
||||||
range: Option<web::Header<Range>>,
|
range: Option<web::Header<Range>>,
|
||||||
details: Details,
|
details: Details,
|
||||||
not_found: bool,
|
not_found: bool,
|
||||||
|
@ -1555,7 +1543,7 @@ async fn identifier<S: Store>(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(identifier) = repo.identifier_from_alias(&alias).await?.map(S::Identifier::from_arc).transpose()? else {
|
let Some(identifier) = repo.identifier_from_alias(&alias).await? else {
|
||||||
// Invalid alias
|
// Invalid alias
|
||||||
return Ok(HttpResponse::NotFound().json(serde_json::json!({
|
return Ok(HttpResponse::NotFound().json(serde_json::json!({
|
||||||
"msg": "No identifiers associated with provided alias"
|
"msg": "No identifiers associated with provided alias"
|
||||||
|
@ -1564,10 +1552,11 @@ async fn identifier<S: Store>(
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
Ok(HttpResponse::Ok().json(&serde_json::json!({
|
||||||
"msg": "ok",
|
"msg": "ok",
|
||||||
"identifier": identifier.string_repr(),
|
"identifier": identifier.as_ref(),
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(repo, store))]
|
||||||
async fn healthz<S: Store>(
|
async fn healthz<S: Store>(
|
||||||
repo: web::Data<ArcRepo>,
|
repo: web::Data<ArcRepo>,
|
||||||
store: web::Data<S>,
|
store: web::Data<S>,
|
||||||
|
@ -1691,8 +1680,7 @@ fn spawn_cleanup(repo: ArcRepo, config: &Configuration) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
crate::sync::spawn(async move {
|
||||||
actix_rt::spawn(async move {
|
|
||||||
let mut interval = actix_rt::time::interval(Duration::from_secs(30));
|
let mut interval = actix_rt::time::interval(Duration::from_secs(30));
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
@ -1713,22 +1701,18 @@ fn spawn_cleanup(repo: ArcRepo, config: &Configuration) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn spawn_workers<S>(repo: ArcRepo, store: S, config: Configuration, process_map: ProcessMap)
|
fn spawn_workers<S>(repo: ArcRepo, store: S, config: Configuration, process_map: ProcessMap)
|
||||||
where
|
where
|
||||||
S: Store + 'static,
|
S: Store + 'static,
|
||||||
{
|
{
|
||||||
tracing::trace_span!(parent: None, "Spawn task").in_scope(|| {
|
crate::sync::spawn(queue::process_cleanup(
|
||||||
actix_rt::spawn(queue::process_cleanup(
|
|
||||||
repo.clone(),
|
repo.clone(),
|
||||||
store.clone(),
|
store.clone(),
|
||||||
config.clone(),
|
config.clone(),
|
||||||
))
|
));
|
||||||
});
|
crate::sync::spawn(queue::process_images(repo, store, process_map, config));
|
||||||
tracing::trace_span!(parent: None, "Spawn task")
|
|
||||||
.in_scope(|| actix_rt::spawn(queue::process_images(repo, store, process_map, config)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn launch_file_store<F: Fn(&mut web::ServiceConfig) + Send + Clone + 'static>(
|
async fn launch_file_store<F: Fn(&mut web::ServiceConfig) + Send + Clone + 'static>(
|
||||||
|
@ -1810,7 +1794,7 @@ async fn launch_object_store<F: Fn(&mut web::ServiceConfig) + Send + Clone + 'st
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn migrate_inner<S1>(
|
async fn migrate_inner<S1>(
|
||||||
repo: Repo,
|
repo: ArcRepo,
|
||||||
client: ClientWithMiddleware,
|
client: ClientWithMiddleware,
|
||||||
from: S1,
|
from: S1,
|
||||||
to: config::primitives::Store,
|
to: config::primitives::Store,
|
||||||
|
@ -1824,11 +1808,7 @@ where
|
||||||
config::primitives::Store::Filesystem(config::Filesystem { path }) => {
|
config::primitives::Store::Filesystem(config::Filesystem { path }) => {
|
||||||
let to = FileStore::build(path.clone(), repo.clone()).await?;
|
let to = FileStore::build(path.clone(), repo.clone()).await?;
|
||||||
|
|
||||||
match repo {
|
migrate_store(repo, from, to, skip_missing_files, timeout).await?
|
||||||
Repo::Sled(repo) => {
|
|
||||||
migrate_store(Arc::new(repo), from, to, skip_missing_files, timeout).await?
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
config::primitives::Store::ObjectStorage(config::primitives::ObjectStorage {
|
config::primitives::Store::ObjectStorage(config::primitives::ObjectStorage {
|
||||||
endpoint,
|
endpoint,
|
||||||
|
@ -1862,11 +1842,7 @@ where
|
||||||
.await?
|
.await?
|
||||||
.build(client);
|
.build(client);
|
||||||
|
|
||||||
match repo {
|
migrate_store(repo, from, to, skip_missing_files, timeout).await?
|
||||||
Repo::Sled(repo) => {
|
|
||||||
migrate_store(Arc::new(repo), from, to, skip_missing_files, timeout).await?
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1970,7 +1946,7 @@ impl PictRsConfiguration {
|
||||||
from,
|
from,
|
||||||
to,
|
to,
|
||||||
} => {
|
} => {
|
||||||
let repo = Repo::open(config.repo.clone())?;
|
let repo = Repo::open(config.repo.clone()).await?.to_arc();
|
||||||
|
|
||||||
match from {
|
match from {
|
||||||
config::primitives::Store::Filesystem(config::Filesystem { path }) => {
|
config::primitives::Store::Filesystem(config::Filesystem { path }) => {
|
||||||
|
@ -2034,15 +2010,15 @@ impl PictRsConfiguration {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
Operation::MigrateRepo { from, to } => {
|
Operation::MigrateRepo { from, to } => {
|
||||||
let from = Repo::open(from)?.to_arc();
|
let from = Repo::open(from).await?.to_arc();
|
||||||
let to = Repo::open(to)?.to_arc();
|
let to = Repo::open(to).await?.to_arc();
|
||||||
|
|
||||||
repo::migrate_repo(from, to).await?;
|
repo::migrate_repo(from, to).await?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let repo = Repo::open(config.repo.clone())?;
|
let repo = Repo::open(config.repo.clone()).await?;
|
||||||
|
|
||||||
if config.server.read_only {
|
if config.server.read_only {
|
||||||
tracing::warn!("Launching in READ ONLY mode");
|
tracing::warn!("Launching in READ ONLY mode");
|
||||||
|
@ -2050,10 +2026,10 @@ impl PictRsConfiguration {
|
||||||
|
|
||||||
match config.store.clone() {
|
match config.store.clone() {
|
||||||
config::Store::Filesystem(config::Filesystem { path }) => {
|
config::Store::Filesystem(config::Filesystem { path }) => {
|
||||||
let store = FileStore::build(path, repo.clone()).await?;
|
|
||||||
|
|
||||||
let arc_repo = repo.to_arc();
|
let arc_repo = repo.to_arc();
|
||||||
|
|
||||||
|
let store = FileStore::build(path, arc_repo.clone()).await?;
|
||||||
|
|
||||||
if arc_repo.get("migrate-0.4").await?.is_none() {
|
if arc_repo.get("migrate-0.4").await?.is_none() {
|
||||||
if let Some(old_repo) = repo_04::open(&config.old_repo)? {
|
if let Some(old_repo) = repo_04::open(&config.old_repo)? {
|
||||||
repo::migrate_04(old_repo, arc_repo.clone(), store.clone(), config.clone())
|
repo::migrate_04(old_repo, arc_repo.clone(), store.clone(), config.clone())
|
||||||
|
@ -2066,15 +2042,14 @@ impl PictRsConfiguration {
|
||||||
|
|
||||||
match repo {
|
match repo {
|
||||||
Repo::Sled(sled_repo) => {
|
Repo::Sled(sled_repo) => {
|
||||||
launch_file_store(
|
launch_file_store(arc_repo, store, client, config, move |sc| {
|
||||||
Arc::new(sled_repo.clone()),
|
sled_extra_config(sc, sled_repo.clone())
|
||||||
store,
|
})
|
||||||
client,
|
|
||||||
config,
|
|
||||||
move |sc| sled_extra_config(sc, sled_repo.clone()),
|
|
||||||
)
|
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
Repo::Postgres(_) => {
|
||||||
|
launch_file_store(arc_repo, store, client, config, |_| {}).await?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
config::Store::ObjectStorage(config::ObjectStorage {
|
config::Store::ObjectStorage(config::ObjectStorage {
|
||||||
|
@ -2089,6 +2064,8 @@ impl PictRsConfiguration {
|
||||||
client_timeout,
|
client_timeout,
|
||||||
public_endpoint,
|
public_endpoint,
|
||||||
}) => {
|
}) => {
|
||||||
|
let arc_repo = repo.to_arc();
|
||||||
|
|
||||||
let store = ObjectStore::build(
|
let store = ObjectStore::build(
|
||||||
endpoint,
|
endpoint,
|
||||||
bucket_name,
|
bucket_name,
|
||||||
|
@ -2104,13 +2081,11 @@ impl PictRsConfiguration {
|
||||||
signature_duration,
|
signature_duration,
|
||||||
client_timeout,
|
client_timeout,
|
||||||
public_endpoint,
|
public_endpoint,
|
||||||
repo.clone(),
|
arc_repo.clone(),
|
||||||
)
|
)
|
||||||
.await?
|
.await?
|
||||||
.build(client.clone());
|
.build(client.clone());
|
||||||
|
|
||||||
let arc_repo = repo.to_arc();
|
|
||||||
|
|
||||||
if arc_repo.get("migrate-0.4").await?.is_none() {
|
if arc_repo.get("migrate-0.4").await?.is_none() {
|
||||||
if let Some(old_repo) = repo_04::open(&config.old_repo)? {
|
if let Some(old_repo) = repo_04::open(&config.old_repo)? {
|
||||||
repo::migrate_04(old_repo, arc_repo.clone(), store.clone(), config.clone())
|
repo::migrate_04(old_repo, arc_repo.clone(), store.clone(), config.clone())
|
||||||
|
@ -2128,6 +2103,9 @@ impl PictRsConfiguration {
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
Repo::Postgres(_) => {
|
||||||
|
launch_object_store(arc_repo, store, client, config, |_| {}).await?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error_code::ErrorCode,
|
error_code::ErrorCode,
|
||||||
formats::ProcessableFormat,
|
formats::ProcessableFormat,
|
||||||
|
@ -140,7 +142,7 @@ where
|
||||||
|
|
||||||
pub(crate) async fn process_image_store_read<S: Store + 'static>(
|
pub(crate) async fn process_image_store_read<S: Store + 'static>(
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
args: Vec<String>,
|
args: Vec<String>,
|
||||||
input_format: ProcessableFormat,
|
input_format: ProcessableFormat,
|
||||||
format: ProcessableFormat,
|
format: ProcessableFormat,
|
||||||
|
|
|
@ -12,6 +12,8 @@ use std::{
|
||||||
task::{Context, Poll},
|
task::{Context, Poll},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::future::WithTimeout;
|
||||||
|
|
||||||
pub(crate) use self::metrics::Metrics;
|
pub(crate) use self::metrics::Metrics;
|
||||||
|
|
||||||
pub(crate) struct Deadline;
|
pub(crate) struct Deadline;
|
||||||
|
@ -149,8 +151,12 @@ impl actix_web::error::ResponseError for DeadlineExceeded {
|
||||||
HttpResponse::build(self.status_code())
|
HttpResponse::build(self.status_code())
|
||||||
.content_type("application/json")
|
.content_type("application/json")
|
||||||
.body(
|
.body(
|
||||||
serde_json::to_string(&serde_json::json!({ "msg": self.to_string() }))
|
serde_json::to_string(
|
||||||
.unwrap_or_else(|_| r#"{"msg":"request timeout"}"#.to_string()),
|
&serde_json::json!({ "msg": self.to_string(), "code": "request-timeout" }),
|
||||||
|
)
|
||||||
|
.unwrap_or_else(|_| {
|
||||||
|
r#"{"msg":"request timeout","code":"request-timeout"}"#.to_string()
|
||||||
|
}),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -163,7 +169,7 @@ where
|
||||||
DeadlineFuture {
|
DeadlineFuture {
|
||||||
inner: match timeout {
|
inner: match timeout {
|
||||||
Some(duration) => DeadlineFutureInner::Timed {
|
Some(duration) => DeadlineFutureInner::Timed {
|
||||||
timeout: actix_rt::time::timeout(duration, future),
|
timeout: future.with_timeout(duration),
|
||||||
},
|
},
|
||||||
None => DeadlineFutureInner::Untimed { future },
|
None => DeadlineFutureInner::Untimed { future },
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
use std::{
|
use std::{
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
sync::atomic::{AtomicU64, Ordering},
|
sync::{
|
||||||
|
atomic::{AtomicU64, Ordering},
|
||||||
|
Arc,
|
||||||
|
},
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -8,7 +11,7 @@ use crate::{
|
||||||
details::Details,
|
details::Details,
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
repo::{ArcRepo, Hash},
|
repo::{ArcRepo, Hash},
|
||||||
store::{Identifier, Store},
|
store::Store,
|
||||||
stream::IntoStreamer,
|
stream::IntoStreamer,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -58,7 +61,7 @@ where
|
||||||
tracing::warn!("Retrying migration +{failure_count}");
|
tracing::warn!("Retrying migration +{failure_count}");
|
||||||
}
|
}
|
||||||
|
|
||||||
tokio::time::sleep(Duration::from_secs(3)).await;
|
actix_rt::time::sleep(Duration::from_secs(3)).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -103,7 +106,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hashes are read in a consistent order
|
// Hashes are read in a consistent order
|
||||||
let mut stream = repo.hashes().await.into_streamer();
|
let mut stream = repo.hashes().into_streamer();
|
||||||
|
|
||||||
let state = Rc::new(MigrateState {
|
let state = Rc::new(MigrateState {
|
||||||
repo: repo.clone(),
|
repo: repo.clone(),
|
||||||
|
@ -169,7 +172,7 @@ where
|
||||||
let current_index = index.fetch_add(1, Ordering::Relaxed);
|
let current_index = index.fetch_add(1, Ordering::Relaxed);
|
||||||
|
|
||||||
let original_identifier = match repo.identifier(hash.clone()).await {
|
let original_identifier = match repo.identifier(hash.clone()).await {
|
||||||
Ok(Some(identifier)) => S1::Identifier::from_arc(identifier)?,
|
Ok(Some(identifier)) => identifier,
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
"Original File identifier for hash {hash:?} is missing, queue cleanup task",
|
"Original File identifier for hash {hash:?} is missing, queue cleanup task",
|
||||||
|
@ -214,8 +217,6 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(identifier) = repo.motion_identifier(hash.clone()).await? {
|
if let Some(identifier) = repo.motion_identifier(hash.clone()).await? {
|
||||||
let identifier = S1::Identifier::from_arc(identifier)?;
|
|
||||||
|
|
||||||
if !repo.is_migrated(&identifier).await? {
|
if !repo.is_migrated(&identifier).await? {
|
||||||
match migrate_file(repo, from, to, &identifier, *skip_missing_files, *timeout).await {
|
match migrate_file(repo, from, to, &identifier, *skip_missing_files, *timeout).await {
|
||||||
Ok(new_identifier) => {
|
Ok(new_identifier) => {
|
||||||
|
@ -245,8 +246,6 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
for (variant, identifier) in repo.variants(hash.clone()).await? {
|
for (variant, identifier) in repo.variants(hash.clone()).await? {
|
||||||
let identifier = S1::Identifier::from_arc(identifier)?;
|
|
||||||
|
|
||||||
if !repo.is_migrated(&identifier).await? {
|
if !repo.is_migrated(&identifier).await? {
|
||||||
match migrate_file(repo, from, to, &identifier, *skip_missing_files, *timeout).await {
|
match migrate_file(repo, from, to, &identifier, *skip_missing_files, *timeout).await {
|
||||||
Ok(new_identifier) => {
|
Ok(new_identifier) => {
|
||||||
|
@ -339,10 +338,10 @@ async fn migrate_file<S1, S2>(
|
||||||
repo: &ArcRepo,
|
repo: &ArcRepo,
|
||||||
from: &S1,
|
from: &S1,
|
||||||
to: &S2,
|
to: &S2,
|
||||||
identifier: &S1::Identifier,
|
identifier: &Arc<str>,
|
||||||
skip_missing_files: bool,
|
skip_missing_files: bool,
|
||||||
timeout: u64,
|
timeout: u64,
|
||||||
) -> Result<S2::Identifier, MigrateError>
|
) -> Result<Arc<str>, MigrateError>
|
||||||
where
|
where
|
||||||
S1: Store,
|
S1: Store,
|
||||||
S2: Store,
|
S2: Store,
|
||||||
|
@ -365,7 +364,7 @@ where
|
||||||
tracing::warn!("Failed moving file. Retrying +{failure_count}");
|
tracing::warn!("Failed moving file. Retrying +{failure_count}");
|
||||||
}
|
}
|
||||||
|
|
||||||
tokio::time::sleep(Duration::from_secs(3)).await;
|
actix_rt::time::sleep(Duration::from_secs(3)).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -382,9 +381,9 @@ async fn do_migrate_file<S1, S2>(
|
||||||
repo: &ArcRepo,
|
repo: &ArcRepo,
|
||||||
from: &S1,
|
from: &S1,
|
||||||
to: &S2,
|
to: &S2,
|
||||||
identifier: &S1::Identifier,
|
identifier: &Arc<str>,
|
||||||
timeout: u64,
|
timeout: u64,
|
||||||
) -> Result<S2::Identifier, MigrateError>
|
) -> Result<Arc<str>, MigrateError>
|
||||||
where
|
where
|
||||||
S1: Store,
|
S1: Store,
|
||||||
S2: Store,
|
S2: Store,
|
||||||
|
@ -421,11 +420,7 @@ where
|
||||||
Ok(new_identifier)
|
Ok(new_identifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn migrate_details<I1, I2>(repo: &ArcRepo, from: &I1, to: &I2) -> Result<(), Error>
|
async fn migrate_details(repo: &ArcRepo, from: &Arc<str>, to: &Arc<str>) -> Result<(), Error> {
|
||||||
where
|
|
||||||
I1: Identifier,
|
|
||||||
I2: Identifier,
|
|
||||||
{
|
|
||||||
if let Some(details) = repo.details(from).await? {
|
if let Some(details) = repo.details(from).await? {
|
||||||
repo.relate_details(to, &details).await?;
|
repo.relate_details(to, &details).await?;
|
||||||
repo.cleanup_details(from).await?;
|
repo.cleanup_details(from).await?;
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use actix_rt::task::JoinHandle;
|
use actix_rt::task::JoinHandle;
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
|
use flume::r#async::RecvFut;
|
||||||
use std::{
|
use std::{
|
||||||
future::Future,
|
future::Future,
|
||||||
pin::Pin,
|
pin::Pin,
|
||||||
|
@ -10,11 +11,10 @@ use std::{
|
||||||
use tokio::{
|
use tokio::{
|
||||||
io::{AsyncRead, AsyncWriteExt, ReadBuf},
|
io::{AsyncRead, AsyncWriteExt, ReadBuf},
|
||||||
process::{Child, ChildStdin, ChildStdout, Command},
|
process::{Child, ChildStdin, ChildStdout, Command},
|
||||||
sync::oneshot::{channel, Receiver},
|
|
||||||
};
|
};
|
||||||
use tracing::{Instrument, Span};
|
use tracing::{Instrument, Span};
|
||||||
|
|
||||||
use crate::error_code::ErrorCode;
|
use crate::{error_code::ErrorCode, future::WithTimeout};
|
||||||
|
|
||||||
struct MetricsGuard {
|
struct MetricsGuard {
|
||||||
start: Instant,
|
start: Instant,
|
||||||
|
@ -73,7 +73,7 @@ struct DropHandle {
|
||||||
|
|
||||||
pub(crate) struct ProcessRead<I> {
|
pub(crate) struct ProcessRead<I> {
|
||||||
inner: I,
|
inner: I,
|
||||||
err_recv: Receiver<std::io::Error>,
|
err_recv: RecvFut<'static, std::io::Error>,
|
||||||
err_closed: bool,
|
err_closed: bool,
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
handle: DropHandle,
|
handle: DropHandle,
|
||||||
|
@ -159,7 +159,7 @@ impl Process {
|
||||||
timeout,
|
timeout,
|
||||||
} = self;
|
} = self;
|
||||||
|
|
||||||
let res = actix_rt::time::timeout(timeout, child.wait()).await;
|
let res = child.wait().with_timeout(timeout).await;
|
||||||
|
|
||||||
match res {
|
match res {
|
||||||
Ok(Ok(status)) if status.success() => {
|
Ok(Ok(status)) if status.success() => {
|
||||||
|
@ -206,14 +206,13 @@ impl Process {
|
||||||
let stdin = child.stdin.take().expect("stdin exists");
|
let stdin = child.stdin.take().expect("stdin exists");
|
||||||
let stdout = child.stdout.take().expect("stdout exists");
|
let stdout = child.stdout.take().expect("stdout exists");
|
||||||
|
|
||||||
let (tx, rx) = tracing::trace_span!(parent: None, "Create channel", %command)
|
let (tx, rx) = crate::sync::channel::<std::io::Error>(1);
|
||||||
.in_scope(channel::<std::io::Error>);
|
let rx = rx.into_recv_async();
|
||||||
|
|
||||||
let span = tracing::info_span!(parent: None, "Background process task", %command);
|
let span = tracing::info_span!(parent: None, "Background process task", %command);
|
||||||
span.follows_from(Span::current());
|
span.follows_from(Span::current());
|
||||||
|
|
||||||
let handle = tracing::trace_span!(parent: None, "Spawn task", %command).in_scope(|| {
|
let handle = crate::sync::spawn(
|
||||||
actix_rt::spawn(
|
|
||||||
async move {
|
async move {
|
||||||
let child_fut = async {
|
let child_fut = async {
|
||||||
(f)(stdin).await?;
|
(f)(stdin).await?;
|
||||||
|
@ -221,7 +220,7 @@ impl Process {
|
||||||
child.wait().await
|
child.wait().await
|
||||||
};
|
};
|
||||||
|
|
||||||
let error = match actix_rt::time::timeout(timeout, child_fut).await {
|
let error = match child_fut.with_timeout(timeout).await {
|
||||||
Ok(Ok(status)) if status.success() => {
|
Ok(Ok(status)) if status.success() => {
|
||||||
guard.disarm();
|
guard.disarm();
|
||||||
return;
|
return;
|
||||||
|
@ -237,8 +236,7 @@ impl Process {
|
||||||
let _ = child.kill().await;
|
let _ = child.kill().await;
|
||||||
}
|
}
|
||||||
.instrument(span),
|
.instrument(span),
|
||||||
)
|
);
|
||||||
});
|
|
||||||
|
|
||||||
let sleep = actix_rt::time::sleep(timeout);
|
let sleep = actix_rt::time::sleep(timeout);
|
||||||
|
|
||||||
|
|
110
src/queue.rs
110
src/queue.rs
|
@ -3,15 +3,13 @@ use crate::{
|
||||||
config::Configuration,
|
config::Configuration,
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
formats::InputProcessableFormat,
|
formats::InputProcessableFormat,
|
||||||
|
future::LocalBoxFuture,
|
||||||
repo::{Alias, DeleteToken, FullRepo, Hash, JobId, UploadId},
|
repo::{Alias, DeleteToken, FullRepo, Hash, JobId, UploadId},
|
||||||
serde_str::Serde,
|
serde_str::Serde,
|
||||||
store::{Identifier, Store},
|
store::Store,
|
||||||
};
|
};
|
||||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
|
||||||
use std::{
|
use std::{
|
||||||
future::Future,
|
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
pin::Pin,
|
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
@ -20,32 +18,6 @@ use tracing::Instrument;
|
||||||
mod cleanup;
|
mod cleanup;
|
||||||
mod process;
|
mod process;
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct Base64Bytes(Vec<u8>);
|
|
||||||
|
|
||||||
impl serde::Serialize for Base64Bytes {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: serde::Serializer,
|
|
||||||
{
|
|
||||||
let s = BASE64_STANDARD.encode(&self.0);
|
|
||||||
s.serialize(serializer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> serde::Deserialize<'de> for Base64Bytes {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let s: String = serde::Deserialize::deserialize(deserializer)?;
|
|
||||||
BASE64_STANDARD
|
|
||||||
.decode(s)
|
|
||||||
.map(Base64Bytes)
|
|
||||||
.map_err(|e| serde::de::Error::custom(e.to_string()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const CLEANUP_QUEUE: &str = "cleanup";
|
const CLEANUP_QUEUE: &str = "cleanup";
|
||||||
const PROCESS_QUEUE: &str = "process";
|
const PROCESS_QUEUE: &str = "process";
|
||||||
|
|
||||||
|
@ -55,7 +27,7 @@ enum Cleanup {
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
},
|
},
|
||||||
Identifier {
|
Identifier {
|
||||||
identifier: Base64Bytes,
|
identifier: String,
|
||||||
},
|
},
|
||||||
Alias {
|
Alias {
|
||||||
alias: Serde<Alias>,
|
alias: Serde<Alias>,
|
||||||
|
@ -74,7 +46,7 @@ enum Cleanup {
|
||||||
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
||||||
enum Process {
|
enum Process {
|
||||||
Ingest {
|
Ingest {
|
||||||
identifier: Base64Bytes,
|
identifier: String,
|
||||||
upload_id: Serde<UploadId>,
|
upload_id: Serde<UploadId>,
|
||||||
declared_alias: Option<Serde<Alias>>,
|
declared_alias: Option<Serde<Alias>>,
|
||||||
},
|
},
|
||||||
|
@ -91,30 +63,30 @@ pub(crate) async fn cleanup_alias(
|
||||||
alias: Alias,
|
alias: Alias,
|
||||||
token: DeleteToken,
|
token: DeleteToken,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::Alias {
|
let job = serde_json::to_value(Cleanup::Alias {
|
||||||
alias: Serde::new(alias),
|
alias: Serde::new(alias),
|
||||||
token: Serde::new(token),
|
token: Serde::new(token),
|
||||||
})
|
})
|
||||||
.map_err(UploadError::PushJob)?;
|
.map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn cleanup_hash(repo: &Arc<dyn FullRepo>, hash: Hash) -> Result<(), Error> {
|
pub(crate) async fn cleanup_hash(repo: &Arc<dyn FullRepo>, hash: Hash) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::Hash { hash }).map_err(UploadError::PushJob)?;
|
let job = serde_json::to_value(Cleanup::Hash { hash }).map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn cleanup_identifier<I: Identifier>(
|
pub(crate) async fn cleanup_identifier(
|
||||||
repo: &Arc<dyn FullRepo>,
|
repo: &Arc<dyn FullRepo>,
|
||||||
identifier: I,
|
identifier: &Arc<str>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::Identifier {
|
let job = serde_json::to_value(Cleanup::Identifier {
|
||||||
identifier: Base64Bytes(identifier.to_bytes()?),
|
identifier: identifier.to_string(),
|
||||||
})
|
})
|
||||||
.map_err(UploadError::PushJob)?;
|
.map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -124,42 +96,42 @@ async fn cleanup_variants(
|
||||||
variant: Option<String>,
|
variant: Option<String>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let job =
|
let job =
|
||||||
serde_json::to_vec(&Cleanup::Variant { hash, variant }).map_err(UploadError::PushJob)?;
|
serde_json::to_value(Cleanup::Variant { hash, variant }).map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn cleanup_outdated_proxies(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
pub(crate) async fn cleanup_outdated_proxies(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::OutdatedProxies).map_err(UploadError::PushJob)?;
|
let job = serde_json::to_value(Cleanup::OutdatedProxies).map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn cleanup_outdated_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
pub(crate) async fn cleanup_outdated_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::OutdatedVariants).map_err(UploadError::PushJob)?;
|
let job = serde_json::to_value(Cleanup::OutdatedVariants).map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn cleanup_all_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
pub(crate) async fn cleanup_all_variants(repo: &Arc<dyn FullRepo>) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Cleanup::AllVariants).map_err(UploadError::PushJob)?;
|
let job = serde_json::to_value(Cleanup::AllVariants).map_err(UploadError::PushJob)?;
|
||||||
repo.push(CLEANUP_QUEUE, job.into()).await?;
|
repo.push(CLEANUP_QUEUE, job).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn queue_ingest(
|
pub(crate) async fn queue_ingest(
|
||||||
repo: &Arc<dyn FullRepo>,
|
repo: &Arc<dyn FullRepo>,
|
||||||
identifier: Vec<u8>,
|
identifier: &Arc<str>,
|
||||||
upload_id: UploadId,
|
upload_id: UploadId,
|
||||||
declared_alias: Option<Alias>,
|
declared_alias: Option<Alias>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Process::Ingest {
|
let job = serde_json::to_value(Process::Ingest {
|
||||||
identifier: Base64Bytes(identifier),
|
identifier: identifier.to_string(),
|
||||||
declared_alias: declared_alias.map(Serde::new),
|
declared_alias: declared_alias.map(Serde::new),
|
||||||
upload_id: Serde::new(upload_id),
|
upload_id: Serde::new(upload_id),
|
||||||
})
|
})
|
||||||
.map_err(UploadError::PushJob)?;
|
.map_err(UploadError::PushJob)?;
|
||||||
repo.push(PROCESS_QUEUE, job.into()).await?;
|
repo.push(PROCESS_QUEUE, job).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -170,14 +142,14 @@ pub(crate) async fn queue_generate(
|
||||||
process_path: PathBuf,
|
process_path: PathBuf,
|
||||||
process_args: Vec<String>,
|
process_args: Vec<String>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let job = serde_json::to_vec(&Process::Generate {
|
let job = serde_json::to_value(Process::Generate {
|
||||||
target_format,
|
target_format,
|
||||||
source: Serde::new(source),
|
source: Serde::new(source),
|
||||||
process_path,
|
process_path,
|
||||||
process_args,
|
process_args,
|
||||||
})
|
})
|
||||||
.map_err(UploadError::PushJob)?;
|
.map_err(UploadError::PushJob)?;
|
||||||
repo.push(PROCESS_QUEUE, job.into()).await?;
|
repo.push(PROCESS_QUEUE, job).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -206,8 +178,6 @@ pub(crate) async fn process_images<S: Store + 'static>(
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
type LocalBoxFuture<'a, T> = Pin<Box<dyn Future<Output = T> + 'a>>;
|
|
||||||
|
|
||||||
async fn process_jobs<S, F>(
|
async fn process_jobs<S, F>(
|
||||||
repo: &Arc<dyn FullRepo>,
|
repo: &Arc<dyn FullRepo>,
|
||||||
store: &S,
|
store: &S,
|
||||||
|
@ -220,7 +190,7 @@ async fn process_jobs<S, F>(
|
||||||
&'a Arc<dyn FullRepo>,
|
&'a Arc<dyn FullRepo>,
|
||||||
&'a S,
|
&'a S,
|
||||||
&'a Configuration,
|
&'a Configuration,
|
||||||
&'a [u8],
|
serde_json::Value,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
+ Copy,
|
+ Copy,
|
||||||
{
|
{
|
||||||
|
@ -232,6 +202,11 @@ async fn process_jobs<S, F>(
|
||||||
if let Err(e) = res {
|
if let Err(e) = res {
|
||||||
tracing::warn!("Error processing jobs: {}", format!("{e}"));
|
tracing::warn!("Error processing jobs: {}", format!("{e}"));
|
||||||
tracing::warn!("{}", format!("{e:?}"));
|
tracing::warn!("{}", format!("{e:?}"));
|
||||||
|
|
||||||
|
if e.is_disconnected() {
|
||||||
|
actix_rt::time::sleep(Duration::from_secs(10)).await;
|
||||||
|
}
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -284,13 +259,13 @@ where
|
||||||
&'a Arc<dyn FullRepo>,
|
&'a Arc<dyn FullRepo>,
|
||||||
&'a S,
|
&'a S,
|
||||||
&'a Configuration,
|
&'a Configuration,
|
||||||
&'a [u8],
|
serde_json::Value,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
+ Copy,
|
+ Copy,
|
||||||
{
|
{
|
||||||
loop {
|
loop {
|
||||||
let fut = async {
|
let fut = async {
|
||||||
let (job_id, bytes) = repo.pop(queue, worker_id).await?;
|
let (job_id, job) = repo.pop(queue, worker_id).await?;
|
||||||
|
|
||||||
let span = tracing::info_span!("Running Job");
|
let span = tracing::info_span!("Running Job");
|
||||||
|
|
||||||
|
@ -303,7 +278,7 @@ where
|
||||||
queue,
|
queue,
|
||||||
worker_id,
|
worker_id,
|
||||||
job_id,
|
job_id,
|
||||||
(callback)(repo, store, config, bytes.as_ref()),
|
(callback)(repo, store, config, job),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.instrument(span)
|
.instrument(span)
|
||||||
|
@ -337,7 +312,7 @@ async fn process_image_jobs<S, F>(
|
||||||
&'a S,
|
&'a S,
|
||||||
&'a ProcessMap,
|
&'a ProcessMap,
|
||||||
&'a Configuration,
|
&'a Configuration,
|
||||||
&'a [u8],
|
serde_json::Value,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
+ Copy,
|
+ Copy,
|
||||||
{
|
{
|
||||||
|
@ -350,6 +325,11 @@ async fn process_image_jobs<S, F>(
|
||||||
if let Err(e) = res {
|
if let Err(e) = res {
|
||||||
tracing::warn!("Error processing jobs: {}", format!("{e}"));
|
tracing::warn!("Error processing jobs: {}", format!("{e}"));
|
||||||
tracing::warn!("{}", format!("{e:?}"));
|
tracing::warn!("{}", format!("{e:?}"));
|
||||||
|
|
||||||
|
if e.is_disconnected() {
|
||||||
|
actix_rt::time::sleep(Duration::from_secs(3)).await;
|
||||||
|
}
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -373,13 +353,13 @@ where
|
||||||
&'a S,
|
&'a S,
|
||||||
&'a ProcessMap,
|
&'a ProcessMap,
|
||||||
&'a Configuration,
|
&'a Configuration,
|
||||||
&'a [u8],
|
serde_json::Value,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
+ Copy,
|
+ Copy,
|
||||||
{
|
{
|
||||||
loop {
|
loop {
|
||||||
let fut = async {
|
let fut = async {
|
||||||
let (job_id, bytes) = repo.pop(queue, worker_id).await?;
|
let (job_id, job) = repo.pop(queue, worker_id).await?;
|
||||||
|
|
||||||
let span = tracing::info_span!("Running Job");
|
let span = tracing::info_span!("Running Job");
|
||||||
|
|
||||||
|
@ -392,7 +372,7 @@ where
|
||||||
queue,
|
queue,
|
||||||
worker_id,
|
worker_id,
|
||||||
job_id,
|
job_id,
|
||||||
(callback)(repo, store, process_map, config, bytes.as_ref()),
|
(callback)(repo, store, process_map, config, job),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.instrument(span)
|
.instrument(span)
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Configuration,
|
config::Configuration,
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
queue::{Base64Bytes, Cleanup, LocalBoxFuture},
|
future::LocalBoxFuture,
|
||||||
|
queue::Cleanup,
|
||||||
repo::{Alias, ArcRepo, DeleteToken, Hash},
|
repo::{Alias, ArcRepo, DeleteToken, Hash},
|
||||||
serde_str::Serde,
|
serde_str::Serde,
|
||||||
store::{Identifier, Store},
|
store::Store,
|
||||||
stream::IntoStreamer,
|
stream::IntoStreamer,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -12,18 +15,18 @@ pub(super) fn perform<'a, S>(
|
||||||
repo: &'a ArcRepo,
|
repo: &'a ArcRepo,
|
||||||
store: &'a S,
|
store: &'a S,
|
||||||
configuration: &'a Configuration,
|
configuration: &'a Configuration,
|
||||||
job: &'a [u8],
|
job: serde_json::Value,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
where
|
where
|
||||||
S: Store,
|
S: Store,
|
||||||
{
|
{
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
match serde_json::from_slice(job) {
|
match serde_json::from_value(job) {
|
||||||
Ok(job) => match job {
|
Ok(job) => match job {
|
||||||
Cleanup::Hash { hash: in_hash } => hash(repo, in_hash).await?,
|
Cleanup::Hash { hash: in_hash } => hash(repo, in_hash).await?,
|
||||||
Cleanup::Identifier {
|
Cleanup::Identifier {
|
||||||
identifier: Base64Bytes(in_identifier),
|
identifier: in_identifier,
|
||||||
} => identifier(repo, store, in_identifier).await?,
|
} => identifier(repo, store, Arc::from(in_identifier)).await?,
|
||||||
Cleanup::Alias {
|
Cleanup::Alias {
|
||||||
alias: stored_alias,
|
alias: stored_alias,
|
||||||
token,
|
token,
|
||||||
|
@ -50,20 +53,18 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
async fn identifier<S>(repo: &ArcRepo, store: &S, identifier: Vec<u8>) -> Result<(), Error>
|
async fn identifier<S>(repo: &ArcRepo, store: &S, identifier: Arc<str>) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
S: Store,
|
S: Store,
|
||||||
{
|
{
|
||||||
let identifier = S::Identifier::from_bytes(identifier)?;
|
|
||||||
|
|
||||||
let mut errors = Vec::new();
|
let mut errors = Vec::new();
|
||||||
|
|
||||||
if let Err(e) = store.remove(&identifier).await {
|
if let Err(e) = store.remove(&identifier).await {
|
||||||
errors.push(e);
|
errors.push(UploadError::from(e));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(e) = repo.cleanup_details(&identifier).await {
|
if let Err(e) = repo.cleanup_details(&identifier).await {
|
||||||
errors.push(e);
|
errors.push(UploadError::from(e));
|
||||||
}
|
}
|
||||||
|
|
||||||
for error in errors {
|
for error in errors {
|
||||||
|
@ -100,7 +101,7 @@ async fn hash(repo: &ArcRepo, hash: Hash) -> Result<(), Error> {
|
||||||
idents.extend(repo.motion_identifier(hash.clone()).await?);
|
idents.extend(repo.motion_identifier(hash.clone()).await?);
|
||||||
|
|
||||||
for identifier in idents {
|
for identifier in idents {
|
||||||
let _ = super::cleanup_identifier(repo, identifier).await;
|
let _ = super::cleanup_identifier(repo, &identifier).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
repo.cleanup_hash(hash).await?;
|
repo.cleanup_hash(hash).await?;
|
||||||
|
@ -136,7 +137,7 @@ async fn alias(repo: &ArcRepo, alias: Alias, token: DeleteToken) -> Result<(), E
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
async fn all_variants(repo: &ArcRepo) -> Result<(), Error> {
|
async fn all_variants(repo: &ArcRepo) -> Result<(), Error> {
|
||||||
let mut hash_stream = repo.hashes().await.into_streamer();
|
let mut hash_stream = repo.hashes().into_streamer();
|
||||||
|
|
||||||
while let Some(res) = hash_stream.next().await {
|
while let Some(res) = hash_stream.next().await {
|
||||||
let hash = res?;
|
let hash = res?;
|
||||||
|
@ -193,7 +194,7 @@ async fn hash_variant(
|
||||||
.variant_identifier(hash.clone(), target_variant.clone())
|
.variant_identifier(hash.clone(), target_variant.clone())
|
||||||
.await?
|
.await?
|
||||||
{
|
{
|
||||||
super::cleanup_identifier(repo, identifier).await?;
|
super::cleanup_identifier(repo, &identifier).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
repo.remove_variant(hash.clone(), target_variant.clone())
|
repo.remove_variant(hash.clone(), target_variant.clone())
|
||||||
|
@ -203,7 +204,7 @@ async fn hash_variant(
|
||||||
for (variant, identifier) in repo.variants(hash.clone()).await? {
|
for (variant, identifier) in repo.variants(hash.clone()).await? {
|
||||||
repo.remove_variant(hash.clone(), variant.clone()).await?;
|
repo.remove_variant(hash.clone(), variant.clone()).await?;
|
||||||
repo.remove_variant_access(hash.clone(), variant).await?;
|
repo.remove_variant_access(hash.clone(), variant).await?;
|
||||||
super::cleanup_identifier(repo, identifier).await?;
|
super::cleanup_identifier(repo, &identifier).await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,37 +3,38 @@ use crate::{
|
||||||
config::Configuration,
|
config::Configuration,
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
formats::InputProcessableFormat,
|
formats::InputProcessableFormat,
|
||||||
|
future::LocalBoxFuture,
|
||||||
ingest::Session,
|
ingest::Session,
|
||||||
queue::{Base64Bytes, LocalBoxFuture, Process},
|
queue::Process,
|
||||||
repo::{Alias, ArcRepo, UploadId, UploadResult},
|
repo::{Alias, ArcRepo, UploadId, UploadResult},
|
||||||
serde_str::Serde,
|
serde_str::Serde,
|
||||||
store::{Identifier, Store},
|
store::Store,
|
||||||
stream::StreamMap,
|
stream::StreamMap,
|
||||||
};
|
};
|
||||||
use std::path::PathBuf;
|
use std::{path::PathBuf, sync::Arc};
|
||||||
|
|
||||||
pub(super) fn perform<'a, S>(
|
pub(super) fn perform<'a, S>(
|
||||||
repo: &'a ArcRepo,
|
repo: &'a ArcRepo,
|
||||||
store: &'a S,
|
store: &'a S,
|
||||||
process_map: &'a ProcessMap,
|
process_map: &'a ProcessMap,
|
||||||
config: &'a Configuration,
|
config: &'a Configuration,
|
||||||
job: &'a [u8],
|
job: serde_json::Value,
|
||||||
) -> LocalBoxFuture<'a, Result<(), Error>>
|
) -> LocalBoxFuture<'a, Result<(), Error>>
|
||||||
where
|
where
|
||||||
S: Store + 'static,
|
S: Store + 'static,
|
||||||
{
|
{
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
match serde_json::from_slice(job) {
|
match serde_json::from_value(job) {
|
||||||
Ok(job) => match job {
|
Ok(job) => match job {
|
||||||
Process::Ingest {
|
Process::Ingest {
|
||||||
identifier: Base64Bytes(identifier),
|
identifier,
|
||||||
upload_id,
|
upload_id,
|
||||||
declared_alias,
|
declared_alias,
|
||||||
} => {
|
} => {
|
||||||
process_ingest(
|
process_ingest(
|
||||||
repo,
|
repo,
|
||||||
store,
|
store,
|
||||||
identifier,
|
Arc::from(identifier),
|
||||||
Serde::into_inner(upload_id),
|
Serde::into_inner(upload_id),
|
||||||
declared_alias.map(Serde::into_inner),
|
declared_alias.map(Serde::into_inner),
|
||||||
&config.media,
|
&config.media,
|
||||||
|
@ -68,11 +69,11 @@ where
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip(repo, store, media))]
|
||||||
async fn process_ingest<S>(
|
async fn process_ingest<S>(
|
||||||
repo: &ArcRepo,
|
repo: &ArcRepo,
|
||||||
store: &S,
|
store: &S,
|
||||||
unprocessed_identifier: Vec<u8>,
|
unprocessed_identifier: Arc<str>,
|
||||||
upload_id: UploadId,
|
upload_id: UploadId,
|
||||||
declared_alias: Option<Alias>,
|
declared_alias: Option<Alias>,
|
||||||
media: &crate::config::Media,
|
media: &crate::config::Media,
|
||||||
|
@ -81,14 +82,12 @@ where
|
||||||
S: Store + 'static,
|
S: Store + 'static,
|
||||||
{
|
{
|
||||||
let fut = async {
|
let fut = async {
|
||||||
let unprocessed_identifier = S::Identifier::from_bytes(unprocessed_identifier)?;
|
|
||||||
|
|
||||||
let ident = unprocessed_identifier.clone();
|
let ident = unprocessed_identifier.clone();
|
||||||
let store2 = store.clone();
|
let store2 = store.clone();
|
||||||
let repo = repo.clone();
|
let repo = repo.clone();
|
||||||
|
|
||||||
let media = media.clone();
|
let media = media.clone();
|
||||||
let error_boundary = actix_rt::spawn(async move {
|
let error_boundary = crate::sync::spawn(async move {
|
||||||
let stream = store2
|
let stream = store2
|
||||||
.to_stream(&ident, None, None)
|
.to_stream(&ident, None, None)
|
||||||
.await?
|
.await?
|
||||||
|
@ -97,7 +96,7 @@ where
|
||||||
let session =
|
let session =
|
||||||
crate::ingest::ingest(&repo, &store2, stream, declared_alias, &media).await?;
|
crate::ingest::ingest(&repo, &store2, stream, declared_alias, &media).await?;
|
||||||
|
|
||||||
Ok(session) as Result<Session<S>, Error>
|
Ok(session) as Result<Session, Error>
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
@ -128,7 +127,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip(repo, store, process_map, process_path, process_args, config))]
|
||||||
async fn generate<S: Store + 'static>(
|
async fn generate<S: Store + 'static>(
|
||||||
repo: &ArcRepo,
|
repo: &ArcRepo,
|
||||||
store: &S,
|
store: &S,
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error::{Error, UploadError},
|
error::{Error, UploadError},
|
||||||
store::Store,
|
store::Store,
|
||||||
|
@ -26,7 +28,7 @@ pub(crate) fn chop_bytes(
|
||||||
pub(crate) async fn chop_store<S: Store>(
|
pub(crate) async fn chop_store<S: Store>(
|
||||||
byte_range: &ByteRangeSpec,
|
byte_range: &ByteRangeSpec,
|
||||||
store: &S,
|
store: &S,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
length: u64,
|
length: u64,
|
||||||
) -> Result<impl Stream<Item = std::io::Result<Bytes>>, Error> {
|
) -> Result<impl Stream<Item = std::io::Result<Bytes>>, Error> {
|
||||||
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
|
||||||
|
|
580
src/repo.rs
580
src/repo.rs
|
@ -1,8 +1,14 @@
|
||||||
|
mod alias;
|
||||||
|
mod delete_token;
|
||||||
|
mod hash;
|
||||||
|
mod metrics;
|
||||||
|
mod migrate;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config,
|
config,
|
||||||
details::Details,
|
details::Details,
|
||||||
error_code::{ErrorCode, OwnedErrorCode},
|
error_code::{ErrorCode, OwnedErrorCode},
|
||||||
store::{Identifier, StoreError},
|
future::LocalBoxFuture,
|
||||||
stream::LocalBoxStream,
|
stream::LocalBoxStream,
|
||||||
};
|
};
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
|
@ -10,10 +16,11 @@ use std::{fmt::Debug, sync::Arc};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
mod hash;
|
pub(crate) mod postgres;
|
||||||
mod migrate;
|
|
||||||
pub(crate) mod sled;
|
pub(crate) mod sled;
|
||||||
|
|
||||||
|
pub(crate) use alias::Alias;
|
||||||
|
pub(crate) use delete_token::DeleteToken;
|
||||||
pub(crate) use hash::Hash;
|
pub(crate) use hash::Hash;
|
||||||
pub(crate) use migrate::{migrate_04, migrate_repo};
|
pub(crate) use migrate::{migrate_04, migrate_repo};
|
||||||
|
|
||||||
|
@ -22,6 +29,7 @@ pub(crate) type ArcRepo = Arc<dyn FullRepo>;
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) enum Repo {
|
pub(crate) enum Repo {
|
||||||
Sled(self::sled::SledRepo),
|
Sled(self::sled::SledRepo),
|
||||||
|
Postgres(self::postgres::PostgresRepo),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
@ -30,17 +38,6 @@ enum MaybeUuid {
|
||||||
Name(String),
|
Name(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
|
||||||
pub(crate) struct Alias {
|
|
||||||
id: MaybeUuid,
|
|
||||||
extension: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
|
||||||
pub(crate) struct DeleteToken {
|
|
||||||
id: MaybeUuid,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct HashAlreadyExists;
|
pub(crate) struct HashAlreadyExists;
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -70,6 +67,9 @@ pub(crate) enum RepoError {
|
||||||
#[error("Error in sled")]
|
#[error("Error in sled")]
|
||||||
SledError(#[from] crate::repo::sled::SledError),
|
SledError(#[from] crate::repo::sled::SledError),
|
||||||
|
|
||||||
|
#[error("Error in postgres")]
|
||||||
|
PostgresError(#[from] crate::repo::postgres::PostgresError),
|
||||||
|
|
||||||
#[error("Upload was already claimed")]
|
#[error("Upload was already claimed")]
|
||||||
AlreadyClaimed,
|
AlreadyClaimed,
|
||||||
|
|
||||||
|
@ -81,10 +81,18 @@ impl RepoError {
|
||||||
pub(crate) const fn error_code(&self) -> ErrorCode {
|
pub(crate) const fn error_code(&self) -> ErrorCode {
|
||||||
match self {
|
match self {
|
||||||
Self::SledError(e) => e.error_code(),
|
Self::SledError(e) => e.error_code(),
|
||||||
|
Self::PostgresError(e) => e.error_code(),
|
||||||
Self::AlreadyClaimed => ErrorCode::ALREADY_CLAIMED,
|
Self::AlreadyClaimed => ErrorCode::ALREADY_CLAIMED,
|
||||||
Self::Canceled => ErrorCode::PANIC,
|
Self::Canceled => ErrorCode::PANIC,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) const fn is_disconnected(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::PostgresError(e) => e.is_disconnected(),
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
|
@ -106,7 +114,7 @@ pub(crate) trait FullRepo:
|
||||||
async fn health_check(&self) -> Result<(), RepoError>;
|
async fn health_check(&self) -> Result<(), RepoError>;
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn identifier_from_alias(&self, alias: &Alias) -> Result<Option<Arc<[u8]>>, RepoError> {
|
async fn identifier_from_alias(&self, alias: &Alias) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let Some(hash) = self.hash(alias).await? else {
|
let Some(hash) = self.hash(alias).await? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
@ -127,7 +135,7 @@ pub(crate) trait FullRepo:
|
||||||
async fn still_identifier_from_alias(
|
async fn still_identifier_from_alias(
|
||||||
&self,
|
&self,
|
||||||
alias: &Alias,
|
alias: &Alias,
|
||||||
) -> Result<Option<Arc<[u8]>>, StoreError> {
|
) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let Some(hash) = self.hash(alias).await? else {
|
let Some(hash) = self.hash(alias).await? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
@ -367,13 +375,13 @@ impl JobId {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait QueueRepo: BaseRepo {
|
pub(crate) trait QueueRepo: BaseRepo {
|
||||||
async fn push(&self, queue: &'static str, job: Arc<[u8]>) -> Result<JobId, RepoError>;
|
async fn push(&self, queue: &'static str, job: serde_json::Value) -> Result<JobId, RepoError>;
|
||||||
|
|
||||||
async fn pop(
|
async fn pop(
|
||||||
&self,
|
&self,
|
||||||
queue: &'static str,
|
queue: &'static str,
|
||||||
worker_id: Uuid,
|
worker_id: Uuid,
|
||||||
) -> Result<(JobId, Arc<[u8]>), RepoError>;
|
) -> Result<(JobId, serde_json::Value), RepoError>;
|
||||||
|
|
||||||
async fn heartbeat(
|
async fn heartbeat(
|
||||||
&self,
|
&self,
|
||||||
|
@ -395,7 +403,7 @@ impl<T> QueueRepo for Arc<T>
|
||||||
where
|
where
|
||||||
T: QueueRepo,
|
T: QueueRepo,
|
||||||
{
|
{
|
||||||
async fn push(&self, queue: &'static str, job: Arc<[u8]>) -> Result<JobId, RepoError> {
|
async fn push(&self, queue: &'static str, job: serde_json::Value) -> Result<JobId, RepoError> {
|
||||||
T::push(self, queue, job).await
|
T::push(self, queue, job).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -403,7 +411,7 @@ where
|
||||||
&self,
|
&self,
|
||||||
queue: &'static str,
|
queue: &'static str,
|
||||||
worker_id: Uuid,
|
worker_id: Uuid,
|
||||||
) -> Result<(JobId, Arc<[u8]>), RepoError> {
|
) -> Result<(JobId, serde_json::Value), RepoError> {
|
||||||
T::pop(self, queue, worker_id).await
|
T::pop(self, queue, worker_id).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -455,12 +463,12 @@ where
|
||||||
pub(crate) trait DetailsRepo: BaseRepo {
|
pub(crate) trait DetailsRepo: BaseRepo {
|
||||||
async fn relate_details(
|
async fn relate_details(
|
||||||
&self,
|
&self,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
details: &Details,
|
details: &Details,
|
||||||
) -> Result<(), StoreError>;
|
) -> Result<(), RepoError>;
|
||||||
async fn details(&self, identifier: &dyn Identifier) -> Result<Option<Details>, StoreError>;
|
async fn details(&self, identifier: &Arc<str>) -> Result<Option<Details>, RepoError>;
|
||||||
|
|
||||||
async fn cleanup_details(&self, identifier: &dyn Identifier) -> Result<(), StoreError>;
|
async fn cleanup_details(&self, identifier: &Arc<str>) -> Result<(), RepoError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
|
@ -470,17 +478,17 @@ where
|
||||||
{
|
{
|
||||||
async fn relate_details(
|
async fn relate_details(
|
||||||
&self,
|
&self,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
details: &Details,
|
details: &Details,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
T::relate_details(self, identifier, details).await
|
T::relate_details(self, identifier, details).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn details(&self, identifier: &dyn Identifier) -> Result<Option<Details>, StoreError> {
|
async fn details(&self, identifier: &Arc<str>) -> Result<Option<Details>, RepoError> {
|
||||||
T::details(self, identifier).await
|
T::details(self, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cleanup_details(&self, identifier: &dyn Identifier) -> Result<(), StoreError> {
|
async fn cleanup_details(&self, identifier: &Arc<str>) -> Result<(), RepoError> {
|
||||||
T::cleanup_details(self, identifier).await
|
T::cleanup_details(self, identifier).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -491,11 +499,11 @@ pub(crate) trait StoreMigrationRepo: BaseRepo {
|
||||||
|
|
||||||
async fn mark_migrated(
|
async fn mark_migrated(
|
||||||
&self,
|
&self,
|
||||||
old_identifier: &dyn Identifier,
|
old_identifier: &Arc<str>,
|
||||||
new_identifier: &dyn Identifier,
|
new_identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError>;
|
) -> Result<(), RepoError>;
|
||||||
|
|
||||||
async fn is_migrated(&self, identifier: &dyn Identifier) -> Result<bool, StoreError>;
|
async fn is_migrated(&self, identifier: &Arc<str>) -> Result<bool, RepoError>;
|
||||||
|
|
||||||
async fn clear(&self) -> Result<(), RepoError>;
|
async fn clear(&self) -> Result<(), RepoError>;
|
||||||
}
|
}
|
||||||
|
@ -511,13 +519,13 @@ where
|
||||||
|
|
||||||
async fn mark_migrated(
|
async fn mark_migrated(
|
||||||
&self,
|
&self,
|
||||||
old_identifier: &dyn Identifier,
|
old_identifier: &Arc<str>,
|
||||||
new_identifier: &dyn Identifier,
|
new_identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
T::mark_migrated(self, old_identifier, new_identifier).await
|
T::mark_migrated(self, old_identifier, new_identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn is_migrated(&self, identifier: &dyn Identifier) -> Result<bool, StoreError> {
|
async fn is_migrated(&self, identifier: &Arc<str>) -> Result<bool, RepoError> {
|
||||||
T::is_migrated(self, identifier).await
|
T::is_migrated(self, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -526,7 +534,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub(crate) struct OrderedHash {
|
pub(crate) struct OrderedHash {
|
||||||
timestamp: time::OffsetDateTime,
|
timestamp: time::OffsetDateTime,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
|
@ -564,12 +572,88 @@ impl HashPage {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type PageFuture = LocalBoxFuture<'static, Result<HashPage, RepoError>>;
|
||||||
|
|
||||||
|
pub(crate) struct HashStream {
|
||||||
|
repo: Option<ArcRepo>,
|
||||||
|
page_future: Option<PageFuture>,
|
||||||
|
page: Option<HashPage>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl futures_core::Stream for HashStream {
|
||||||
|
type Item = Result<Hash, RepoError>;
|
||||||
|
|
||||||
|
fn poll_next(
|
||||||
|
self: std::pin::Pin<&mut Self>,
|
||||||
|
cx: &mut std::task::Context<'_>,
|
||||||
|
) -> std::task::Poll<Option<Self::Item>> {
|
||||||
|
let this = self.get_mut();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let Some(repo) = &this.repo else {
|
||||||
|
return std::task::Poll::Ready(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let slug = if let Some(page) = &mut this.page {
|
||||||
|
// popping last in page is fine - we reversed them
|
||||||
|
if let Some(hash) = page.hashes.pop() {
|
||||||
|
return std::task::Poll::Ready(Some(Ok(hash)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let slug = page.next();
|
||||||
|
this.page.take();
|
||||||
|
|
||||||
|
if let Some(slug) = slug {
|
||||||
|
Some(slug)
|
||||||
|
} else {
|
||||||
|
this.repo.take();
|
||||||
|
return std::task::Poll::Ready(None);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(page_future) = &mut this.page_future {
|
||||||
|
let res = std::task::ready!(page_future.as_mut().poll(cx));
|
||||||
|
|
||||||
|
this.page_future.take();
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Ok(mut page) => {
|
||||||
|
// reverse because we use `pop` to fetch next
|
||||||
|
page.hashes.reverse();
|
||||||
|
|
||||||
|
this.page = Some(page);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
this.repo.take();
|
||||||
|
|
||||||
|
return std::task::Poll::Ready(Some(Err(e)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let repo = repo.clone();
|
||||||
|
|
||||||
|
this.page_future = Some(Box::pin(async move { repo.hash_page(slug, 100).await }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl dyn FullRepo {
|
||||||
|
pub(crate) fn hashes(self: &Arc<Self>) -> HashStream {
|
||||||
|
HashStream {
|
||||||
|
repo: Some(self.clone()),
|
||||||
|
page_future: None,
|
||||||
|
page: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait HashRepo: BaseRepo {
|
pub(crate) trait HashRepo: BaseRepo {
|
||||||
async fn size(&self) -> Result<u64, RepoError>;
|
async fn size(&self) -> Result<u64, RepoError>;
|
||||||
|
|
||||||
async fn hashes(&self) -> LocalBoxStream<'static, Result<Hash, RepoError>>;
|
|
||||||
|
|
||||||
async fn hash_page(&self, slug: Option<String>, limit: usize) -> Result<HashPage, RepoError> {
|
async fn hash_page(&self, slug: Option<String>, limit: usize) -> Result<HashPage, RepoError> {
|
||||||
let hash = slug.as_deref().and_then(hash_from_slug);
|
let hash = slug.as_deref().and_then(hash_from_slug);
|
||||||
|
|
||||||
|
@ -599,8 +683,8 @@ pub(crate) trait HashRepo: BaseRepo {
|
||||||
async fn create_hash(
|
async fn create_hash(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
) -> Result<Result<(), HashAlreadyExists>, RepoError> {
|
||||||
self.create_hash_with_timestamp(hash, identifier, time::OffsetDateTime::now_utc())
|
self.create_hash_with_timestamp(hash, identifier, time::OffsetDateTime::now_utc())
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
@ -608,38 +692,34 @@ pub(crate) trait HashRepo: BaseRepo {
|
||||||
async fn create_hash_with_timestamp(
|
async fn create_hash_with_timestamp(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
timestamp: time::OffsetDateTime,
|
timestamp: time::OffsetDateTime,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError>;
|
) -> Result<Result<(), HashAlreadyExists>, RepoError>;
|
||||||
|
|
||||||
async fn update_identifier(
|
async fn update_identifier(&self, hash: Hash, identifier: &Arc<str>) -> Result<(), RepoError>;
|
||||||
&self,
|
|
||||||
hash: Hash,
|
|
||||||
identifier: &dyn Identifier,
|
|
||||||
) -> Result<(), StoreError>;
|
|
||||||
|
|
||||||
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError>;
|
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError>;
|
||||||
|
|
||||||
async fn relate_variant_identifier(
|
async fn relate_variant_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Result<(), VariantAlreadyExists>, StoreError>;
|
) -> Result<Result<(), VariantAlreadyExists>, RepoError>;
|
||||||
async fn variant_identifier(
|
async fn variant_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
) -> Result<Option<Arc<[u8]>>, RepoError>;
|
) -> Result<Option<Arc<str>>, RepoError>;
|
||||||
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<[u8]>)>, RepoError>;
|
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<str>)>, RepoError>;
|
||||||
async fn remove_variant(&self, hash: Hash, variant: String) -> Result<(), RepoError>;
|
async fn remove_variant(&self, hash: Hash, variant: String) -> Result<(), RepoError>;
|
||||||
|
|
||||||
async fn relate_motion_identifier(
|
async fn relate_motion_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError>;
|
) -> Result<(), RepoError>;
|
||||||
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError>;
|
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError>;
|
||||||
|
|
||||||
async fn cleanup_hash(&self, hash: Hash) -> Result<(), RepoError>;
|
async fn cleanup_hash(&self, hash: Hash) -> Result<(), RepoError>;
|
||||||
}
|
}
|
||||||
|
@ -653,10 +733,6 @@ where
|
||||||
T::size(self).await
|
T::size(self).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn hashes(&self) -> LocalBoxStream<'static, Result<Hash, RepoError>> {
|
|
||||||
T::hashes(self).await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
|
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
|
||||||
T::bound(self, hash).await
|
T::bound(self, hash).await
|
||||||
}
|
}
|
||||||
|
@ -680,21 +756,17 @@ where
|
||||||
async fn create_hash_with_timestamp(
|
async fn create_hash_with_timestamp(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
timestamp: time::OffsetDateTime,
|
timestamp: time::OffsetDateTime,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
) -> Result<Result<(), HashAlreadyExists>, RepoError> {
|
||||||
T::create_hash_with_timestamp(self, hash, identifier, timestamp).await
|
T::create_hash_with_timestamp(self, hash, identifier, timestamp).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_identifier(
|
async fn update_identifier(&self, hash: Hash, identifier: &Arc<str>) -> Result<(), RepoError> {
|
||||||
&self,
|
|
||||||
hash: Hash,
|
|
||||||
identifier: &dyn Identifier,
|
|
||||||
) -> Result<(), StoreError> {
|
|
||||||
T::update_identifier(self, hash, identifier).await
|
T::update_identifier(self, hash, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError> {
|
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
T::identifier(self, hash).await
|
T::identifier(self, hash).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -702,8 +774,8 @@ where
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Result<(), VariantAlreadyExists>, StoreError> {
|
) -> Result<Result<(), VariantAlreadyExists>, RepoError> {
|
||||||
T::relate_variant_identifier(self, hash, variant, identifier).await
|
T::relate_variant_identifier(self, hash, variant, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -711,11 +783,11 @@ where
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
) -> Result<Option<Arc<[u8]>>, RepoError> {
|
) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
T::variant_identifier(self, hash, variant).await
|
T::variant_identifier(self, hash, variant).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<[u8]>)>, RepoError> {
|
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<str>)>, RepoError> {
|
||||||
T::variants(self, hash).await
|
T::variants(self, hash).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -726,12 +798,12 @@ where
|
||||||
async fn relate_motion_identifier(
|
async fn relate_motion_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
T::relate_motion_identifier(self, hash, identifier).await
|
T::relate_motion_identifier(self, hash, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError> {
|
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
T::motion_identifier(self, hash).await
|
T::motion_identifier(self, hash).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -791,7 +863,7 @@ where
|
||||||
|
|
||||||
impl Repo {
|
impl Repo {
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
pub(crate) fn open(config: config::Repo) -> color_eyre::Result<Self> {
|
pub(crate) async fn open(config: config::Repo) -> color_eyre::Result<Self> {
|
||||||
match config {
|
match config {
|
||||||
config::Repo::Sled(config::Sled {
|
config::Repo::Sled(config::Sled {
|
||||||
path,
|
path,
|
||||||
|
@ -802,12 +874,18 @@ impl Repo {
|
||||||
|
|
||||||
Ok(Self::Sled(repo))
|
Ok(Self::Sled(repo))
|
||||||
}
|
}
|
||||||
|
config::Repo::Postgres(config::Postgres { url }) => {
|
||||||
|
let repo = self::postgres::PostgresRepo::connect(url).await?;
|
||||||
|
|
||||||
|
Ok(Self::Postgres(repo))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn to_arc(&self) -> ArcRepo {
|
pub(crate) fn to_arc(&self) -> ArcRepo {
|
||||||
match self {
|
match self {
|
||||||
Self::Sled(sled_repo) => Arc::new(sled_repo.clone()),
|
Self::Sled(sled_repo) => Arc::new(sled_repo.clone()),
|
||||||
|
Self::Postgres(postgres_repo) => Arc::new(postgres_repo.clone()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -829,106 +907,6 @@ impl MaybeUuid {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn split_at_dot(s: &str) -> Option<(&str, &str)> {
|
|
||||||
let index = s.find('.')?;
|
|
||||||
|
|
||||||
Some(s.split_at(index))
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Alias {
|
|
||||||
pub(crate) fn generate(extension: String) -> Self {
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Uuid(Uuid::new_v4()),
|
|
||||||
extension: Some(extension),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_existing(alias: &str) -> Self {
|
|
||||||
if let Some((start, end)) = split_at_dot(alias) {
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::from_str(start),
|
|
||||||
extension: Some(end.into()),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::from_str(alias),
|
|
||||||
extension: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn extension(&self) -> Option<&str> {
|
|
||||||
self.extension.as_deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn to_bytes(&self) -> Vec<u8> {
|
|
||||||
let mut v = self.id.as_bytes().to_vec();
|
|
||||||
|
|
||||||
if let Some(ext) = self.extension() {
|
|
||||||
v.extend_from_slice(ext.as_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
v
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_slice(bytes: &[u8]) -> Option<Self> {
|
|
||||||
if let Ok(s) = std::str::from_utf8(bytes) {
|
|
||||||
Some(Self::from_existing(s))
|
|
||||||
} else if bytes.len() >= 16 {
|
|
||||||
let id = Uuid::from_slice(&bytes[0..16]).expect("Already checked length");
|
|
||||||
|
|
||||||
let extension = if bytes.len() > 16 {
|
|
||||||
Some(String::from_utf8_lossy(&bytes[16..]).to_string())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Some(Self {
|
|
||||||
id: MaybeUuid::Uuid(id),
|
|
||||||
extension,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DeleteToken {
|
|
||||||
pub(crate) fn from_existing(existing: &str) -> Self {
|
|
||||||
if let Ok(uuid) = Uuid::parse_str(existing) {
|
|
||||||
DeleteToken {
|
|
||||||
id: MaybeUuid::Uuid(uuid),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
DeleteToken {
|
|
||||||
id: MaybeUuid::Name(existing.into()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn generate() -> Self {
|
|
||||||
Self {
|
|
||||||
id: MaybeUuid::Uuid(Uuid::new_v4()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn to_bytes(&self) -> Vec<u8> {
|
|
||||||
self.id.as_bytes().to_vec()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_slice(bytes: &[u8]) -> Option<Self> {
|
|
||||||
if let Ok(s) = std::str::from_utf8(bytes) {
|
|
||||||
Some(DeleteToken::from_existing(s))
|
|
||||||
} else if bytes.len() == 16 {
|
|
||||||
Some(DeleteToken {
|
|
||||||
id: MaybeUuid::Uuid(Uuid::from_slice(bytes).ok()?),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UploadId {
|
impl UploadId {
|
||||||
pub(crate) fn generate() -> Self {
|
pub(crate) fn generate() -> Self {
|
||||||
Self { id: Uuid::new_v4() }
|
Self { id: Uuid::new_v4() }
|
||||||
|
@ -961,253 +939,3 @@ impl std::fmt::Display for MaybeUuid {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::str::FromStr for DeleteToken {
|
|
||||||
type Err = std::convert::Infallible;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
Ok(DeleteToken::from_existing(s))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for DeleteToken {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", self.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::str::FromStr for Alias {
|
|
||||||
type Err = std::convert::Infallible;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
Ok(Alias::from_existing(s))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for Alias {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
if let Some(ext) = self.extension() {
|
|
||||||
write!(f, "{}{ext}", self.id)
|
|
||||||
} else {
|
|
||||||
write!(f, "{}", self.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::{Alias, DeleteToken, MaybeUuid, Uuid};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn string_delete_token() {
|
|
||||||
let delete_token = DeleteToken::from_existing("blah");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
delete_token,
|
|
||||||
DeleteToken {
|
|
||||||
id: MaybeUuid::Name(String::from("blah"))
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uuid_string_delete_token() {
|
|
||||||
let uuid = Uuid::new_v4();
|
|
||||||
|
|
||||||
let delete_token = DeleteToken::from_existing(&uuid.to_string());
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
delete_token,
|
|
||||||
DeleteToken {
|
|
||||||
id: MaybeUuid::Uuid(uuid),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn bytes_delete_token() {
|
|
||||||
let delete_token = DeleteToken::from_slice(b"blah").unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
delete_token,
|
|
||||||
DeleteToken {
|
|
||||||
id: MaybeUuid::Name(String::from("blah"))
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uuid_bytes_delete_token() {
|
|
||||||
let uuid = Uuid::new_v4();
|
|
||||||
|
|
||||||
let delete_token = DeleteToken::from_slice(&uuid.as_bytes()[..]).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
delete_token,
|
|
||||||
DeleteToken {
|
|
||||||
id: MaybeUuid::Uuid(uuid),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uuid_bytes_string_delete_token() {
|
|
||||||
let uuid = Uuid::new_v4();
|
|
||||||
|
|
||||||
let delete_token = DeleteToken::from_slice(uuid.to_string().as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
delete_token,
|
|
||||||
DeleteToken {
|
|
||||||
id: MaybeUuid::Uuid(uuid),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn string_alias() {
|
|
||||||
let alias = Alias::from_existing("blah");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
alias,
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Name(String::from("blah")),
|
|
||||||
extension: None
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn string_alias_ext() {
|
|
||||||
let alias = Alias::from_existing("blah.mp4");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
alias,
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Name(String::from("blah")),
|
|
||||||
extension: Some(String::from(".mp4")),
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uuid_string_alias() {
|
|
||||||
let uuid = Uuid::new_v4();
|
|
||||||
|
|
||||||
let alias = Alias::from_existing(&uuid.to_string());
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
alias,
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Uuid(uuid),
|
|
||||||
extension: None,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uuid_string_alias_ext() {
|
|
||||||
let uuid = Uuid::new_v4();
|
|
||||||
|
|
||||||
let alias_str = format!("{uuid}.mp4");
|
|
||||||
let alias = Alias::from_existing(&alias_str);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
alias,
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Uuid(uuid),
|
|
||||||
extension: Some(String::from(".mp4")),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn bytes_alias() {
|
|
||||||
let alias = Alias::from_slice(b"blah").unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
alias,
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Name(String::from("blah")),
|
|
||||||
extension: None
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn bytes_alias_ext() {
|
|
||||||
let alias = Alias::from_slice(b"blah.mp4").unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
alias,
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Name(String::from("blah")),
|
|
||||||
extension: Some(String::from(".mp4")),
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uuid_bytes_alias() {
|
|
||||||
let uuid = Uuid::new_v4();
|
|
||||||
|
|
||||||
let alias = Alias::from_slice(&uuid.as_bytes()[..]).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
alias,
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Uuid(uuid),
|
|
||||||
extension: None,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uuid_bytes_string_alias() {
|
|
||||||
let uuid = Uuid::new_v4();
|
|
||||||
|
|
||||||
let alias = Alias::from_slice(uuid.to_string().as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
alias,
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Uuid(uuid),
|
|
||||||
extension: None,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uuid_bytes_alias_ext() {
|
|
||||||
let uuid = Uuid::new_v4();
|
|
||||||
|
|
||||||
let mut alias_bytes = uuid.as_bytes().to_vec();
|
|
||||||
alias_bytes.extend_from_slice(b".mp4");
|
|
||||||
|
|
||||||
let alias = Alias::from_slice(&alias_bytes).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
alias,
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Uuid(uuid),
|
|
||||||
extension: Some(String::from(".mp4")),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uuid_bytes_string_alias_ext() {
|
|
||||||
let uuid = Uuid::new_v4();
|
|
||||||
|
|
||||||
let alias_str = format!("{uuid}.mp4");
|
|
||||||
let alias = Alias::from_slice(alias_str.as_bytes()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
alias,
|
|
||||||
Alias {
|
|
||||||
id: MaybeUuid::Uuid(uuid),
|
|
||||||
extension: Some(String::from(".mp4")),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
274
src/repo/alias.rs
Normal file
274
src/repo/alias.rs
Normal file
|
@ -0,0 +1,274 @@
|
||||||
|
use diesel::{backend::Backend, sql_types::VarChar, AsExpression, FromSqlRow};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use super::MaybeUuid;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, AsExpression, FromSqlRow)]
|
||||||
|
#[diesel(sql_type = VarChar)]
|
||||||
|
pub(crate) struct Alias {
|
||||||
|
id: MaybeUuid,
|
||||||
|
extension: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl diesel::serialize::ToSql<VarChar, diesel::pg::Pg> for Alias {
|
||||||
|
fn to_sql<'b>(
|
||||||
|
&'b self,
|
||||||
|
out: &mut diesel::serialize::Output<'b, '_, diesel::pg::Pg>,
|
||||||
|
) -> diesel::serialize::Result {
|
||||||
|
let s = self.to_string();
|
||||||
|
|
||||||
|
<String as diesel::serialize::ToSql<VarChar, diesel::pg::Pg>>::to_sql(
|
||||||
|
&s,
|
||||||
|
&mut out.reborrow(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<B> diesel::deserialize::FromSql<VarChar, B> for Alias
|
||||||
|
where
|
||||||
|
B: Backend,
|
||||||
|
String: diesel::deserialize::FromSql<VarChar, B>,
|
||||||
|
{
|
||||||
|
fn from_sql(
|
||||||
|
bytes: <B as diesel::backend::Backend>::RawValue<'_>,
|
||||||
|
) -> diesel::deserialize::Result<Self> {
|
||||||
|
let s = String::from_sql(bytes)?;
|
||||||
|
|
||||||
|
s.parse().map_err(From::from)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Alias {
|
||||||
|
pub(crate) fn generate(extension: String) -> Self {
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(Uuid::new_v4()),
|
||||||
|
extension: Some(extension),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_existing(alias: &str) -> Self {
|
||||||
|
if let Some((start, end)) = split_at_dot(alias) {
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::from_str(start),
|
||||||
|
extension: Some(end.into()),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::from_str(alias),
|
||||||
|
extension: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn extension(&self) -> Option<&str> {
|
||||||
|
self.extension.as_deref()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_bytes(&self) -> Vec<u8> {
|
||||||
|
let mut v = self.id.as_bytes().to_vec();
|
||||||
|
|
||||||
|
if let Some(ext) = self.extension() {
|
||||||
|
v.extend_from_slice(ext.as_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
v
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_slice(bytes: &[u8]) -> Option<Self> {
|
||||||
|
if let Ok(s) = std::str::from_utf8(bytes) {
|
||||||
|
Some(Self::from_existing(s))
|
||||||
|
} else if bytes.len() >= 16 {
|
||||||
|
let id = Uuid::from_slice(&bytes[0..16]).expect("Already checked length");
|
||||||
|
|
||||||
|
let extension = if bytes.len() > 16 {
|
||||||
|
Some(String::from_utf8_lossy(&bytes[16..]).to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(Self {
|
||||||
|
id: MaybeUuid::Uuid(id),
|
||||||
|
extension,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn split_at_dot(s: &str) -> Option<(&str, &str)> {
|
||||||
|
let index = s.find('.')?;
|
||||||
|
|
||||||
|
Some(s.split_at(index))
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::str::FromStr for Alias {
|
||||||
|
type Err = std::convert::Infallible;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
Ok(Alias::from_existing(s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Alias {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
if let Some(ext) = self.extension() {
|
||||||
|
write!(f, "{}{ext}", self.id)
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", self.id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{Alias, MaybeUuid};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn string_alias() {
|
||||||
|
let alias = Alias::from_existing("blah");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Name(String::from("blah")),
|
||||||
|
extension: None
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn string_alias_ext() {
|
||||||
|
let alias = Alias::from_existing("blah.mp4");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Name(String::from("blah")),
|
||||||
|
extension: Some(String::from(".mp4")),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_string_alias() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let alias = Alias::from_existing(&uuid.to_string());
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_string_alias_ext() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let alias_str = format!("{uuid}.mp4");
|
||||||
|
let alias = Alias::from_existing(&alias_str);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: Some(String::from(".mp4")),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bytes_alias() {
|
||||||
|
let alias = Alias::from_slice(b"blah").unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Name(String::from("blah")),
|
||||||
|
extension: None
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bytes_alias_ext() {
|
||||||
|
let alias = Alias::from_slice(b"blah.mp4").unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Name(String::from("blah")),
|
||||||
|
extension: Some(String::from(".mp4")),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_alias() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let alias = Alias::from_slice(&uuid.as_bytes()[..]).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_string_alias() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let alias = Alias::from_slice(uuid.to_string().as_bytes()).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_alias_ext() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let mut alias_bytes = uuid.as_bytes().to_vec();
|
||||||
|
alias_bytes.extend_from_slice(b".mp4");
|
||||||
|
|
||||||
|
let alias = Alias::from_slice(&alias_bytes).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: Some(String::from(".mp4")),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_string_alias_ext() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let alias_str = format!("{uuid}.mp4");
|
||||||
|
let alias = Alias::from_slice(alias_str.as_bytes()).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
alias,
|
||||||
|
Alias {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
extension: Some(String::from(".mp4")),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
160
src/repo/delete_token.rs
Normal file
160
src/repo/delete_token.rs
Normal file
|
@ -0,0 +1,160 @@
|
||||||
|
use diesel::{backend::Backend, sql_types::VarChar, AsExpression, FromSqlRow};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use super::MaybeUuid;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, AsExpression, FromSqlRow)]
|
||||||
|
#[diesel(sql_type = VarChar)]
|
||||||
|
pub(crate) struct DeleteToken {
|
||||||
|
id: MaybeUuid,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl diesel::serialize::ToSql<VarChar, diesel::pg::Pg> for DeleteToken {
|
||||||
|
fn to_sql<'b>(
|
||||||
|
&'b self,
|
||||||
|
out: &mut diesel::serialize::Output<'b, '_, diesel::pg::Pg>,
|
||||||
|
) -> diesel::serialize::Result {
|
||||||
|
let s = self.to_string();
|
||||||
|
|
||||||
|
<String as diesel::serialize::ToSql<VarChar, diesel::pg::Pg>>::to_sql(
|
||||||
|
&s,
|
||||||
|
&mut out.reborrow(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<B> diesel::deserialize::FromSql<VarChar, B> for DeleteToken
|
||||||
|
where
|
||||||
|
B: Backend,
|
||||||
|
String: diesel::deserialize::FromSql<VarChar, B>,
|
||||||
|
{
|
||||||
|
fn from_sql(
|
||||||
|
bytes: <B as diesel::backend::Backend>::RawValue<'_>,
|
||||||
|
) -> diesel::deserialize::Result<Self> {
|
||||||
|
let s = String::from_sql(bytes)?;
|
||||||
|
|
||||||
|
s.parse().map_err(From::from)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DeleteToken {
|
||||||
|
pub(crate) fn from_existing(existing: &str) -> Self {
|
||||||
|
if let Ok(uuid) = Uuid::parse_str(existing) {
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Name(existing.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn generate() -> Self {
|
||||||
|
Self {
|
||||||
|
id: MaybeUuid::Uuid(Uuid::new_v4()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_bytes(&self) -> Vec<u8> {
|
||||||
|
self.id.as_bytes().to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_slice(bytes: &[u8]) -> Option<Self> {
|
||||||
|
if let Ok(s) = std::str::from_utf8(bytes) {
|
||||||
|
Some(DeleteToken::from_existing(s))
|
||||||
|
} else if bytes.len() == 16 {
|
||||||
|
Some(DeleteToken {
|
||||||
|
id: MaybeUuid::Uuid(Uuid::from_slice(bytes).ok()?),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::str::FromStr for DeleteToken {
|
||||||
|
type Err = std::convert::Infallible;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
Ok(DeleteToken::from_existing(s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for DeleteToken {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{DeleteToken, MaybeUuid};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn string_delete_token() {
|
||||||
|
let delete_token = DeleteToken::from_existing("blah");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
delete_token,
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Name(String::from("blah"))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_string_delete_token() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let delete_token = DeleteToken::from_existing(&uuid.to_string());
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
delete_token,
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bytes_delete_token() {
|
||||||
|
let delete_token = DeleteToken::from_slice(b"blah").unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
delete_token,
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Name(String::from("blah"))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_delete_token() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let delete_token = DeleteToken::from_slice(&uuid.as_bytes()[..]).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
delete_token,
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn uuid_bytes_string_delete_token() {
|
||||||
|
let uuid = Uuid::new_v4();
|
||||||
|
|
||||||
|
let delete_token = DeleteToken::from_slice(uuid.to_string().as_bytes()).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
delete_token,
|
||||||
|
DeleteToken {
|
||||||
|
id: MaybeUuid::Uuid(uuid),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,13 +1,42 @@
|
||||||
|
use diesel::{backend::Backend, sql_types::VarChar, AsExpression, FromSqlRow};
|
||||||
|
|
||||||
use crate::formats::InternalFormat;
|
use crate::formats::InternalFormat;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, AsExpression, FromSqlRow)]
|
||||||
|
#[diesel(sql_type = VarChar)]
|
||||||
pub(crate) struct Hash {
|
pub(crate) struct Hash {
|
||||||
hash: Arc<[u8; 32]>,
|
hash: Arc<[u8; 32]>,
|
||||||
size: u64,
|
size: u64,
|
||||||
format: InternalFormat,
|
format: InternalFormat,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl diesel::serialize::ToSql<VarChar, diesel::pg::Pg> for Hash {
|
||||||
|
fn to_sql<'b>(
|
||||||
|
&'b self,
|
||||||
|
out: &mut diesel::serialize::Output<'b, '_, diesel::pg::Pg>,
|
||||||
|
) -> diesel::serialize::Result {
|
||||||
|
let s = self.to_base64();
|
||||||
|
|
||||||
|
<String as diesel::serialize::ToSql<VarChar, diesel::pg::Pg>>::to_sql(
|
||||||
|
&s,
|
||||||
|
&mut out.reborrow(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<B> diesel::deserialize::FromSql<VarChar, B> for Hash
|
||||||
|
where
|
||||||
|
B: Backend,
|
||||||
|
String: diesel::deserialize::FromSql<VarChar, B>,
|
||||||
|
{
|
||||||
|
fn from_sql(bytes: <B as Backend>::RawValue<'_>) -> diesel::deserialize::Result<Self> {
|
||||||
|
let s = String::from_sql(bytes)?;
|
||||||
|
|
||||||
|
Self::from_base64(s).ok_or_else(|| "Invalid base64 hash".to_string().into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Hash {
|
impl Hash {
|
||||||
pub(crate) fn new(hash: [u8; 32], size: u64, format: InternalFormat) -> Self {
|
pub(crate) fn new(hash: [u8; 32], size: u64, format: InternalFormat) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
@ -30,6 +59,22 @@ impl Hash {
|
||||||
hex::encode(self.to_bytes())
|
hex::encode(self.to_bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_base64(&self) -> String {
|
||||||
|
use base64::Engine;
|
||||||
|
|
||||||
|
base64::engine::general_purpose::STANDARD.encode(self.to_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_base64(input: String) -> Option<Self> {
|
||||||
|
use base64::Engine;
|
||||||
|
|
||||||
|
let bytes = base64::engine::general_purpose::STANDARD
|
||||||
|
.decode(input)
|
||||||
|
.ok()?;
|
||||||
|
|
||||||
|
Self::from_bytes(&bytes)
|
||||||
|
}
|
||||||
|
|
||||||
pub(super) fn to_bytes(&self) -> Vec<u8> {
|
pub(super) fn to_bytes(&self) -> Vec<u8> {
|
||||||
let format_byte = self.format.to_byte();
|
let format_byte = self.format.to_byte();
|
||||||
|
|
||||||
|
|
74
src/repo/metrics.rs
Normal file
74
src/repo/metrics.rs
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
use std::time::Instant;
|
||||||
|
|
||||||
|
pub(super) struct PushMetricsGuard {
|
||||||
|
queue: &'static str,
|
||||||
|
armed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) struct PopMetricsGuard {
|
||||||
|
queue: &'static str,
|
||||||
|
start: Instant,
|
||||||
|
armed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) struct WaitMetricsGuard {
|
||||||
|
start: Instant,
|
||||||
|
armed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PushMetricsGuard {
|
||||||
|
pub(super) fn guard(queue: &'static str) -> Self {
|
||||||
|
Self { queue, armed: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn disarm(mut self) {
|
||||||
|
self.armed = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PopMetricsGuard {
|
||||||
|
pub(super) fn guard(queue: &'static str) -> Self {
|
||||||
|
Self {
|
||||||
|
queue,
|
||||||
|
start: Instant::now(),
|
||||||
|
armed: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn disarm(mut self) {
|
||||||
|
self.armed = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WaitMetricsGuard {
|
||||||
|
pub(super) fn guard() -> Self {
|
||||||
|
Self {
|
||||||
|
start: Instant::now(),
|
||||||
|
armed: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn disarm(mut self) {
|
||||||
|
self.armed = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for PushMetricsGuard {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
metrics::increment_counter!("pict-rs.queue.push", "completed" => (!self.armed).to_string(), "queue" => self.queue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for PopMetricsGuard {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
metrics::histogram!("pict-rs.queue.pop.duration", self.start.elapsed().as_secs_f64(), "completed" => (!self.armed).to_string(), "queue" => self.queue);
|
||||||
|
metrics::increment_counter!("pict-rs.queue.pop", "completed" => (!self.armed).to_string(), "queue" => self.queue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for WaitMetricsGuard {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
metrics::histogram!("pict-rs.upload.wait.duration", self.start.elapsed().as_secs_f64(), "completed" => (!self.armed).to_string());
|
||||||
|
metrics::increment_counter!("pict-rs.upload.wait", "completed" => (!self.armed).to_string());
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use tokio::task::JoinSet;
|
use tokio::task::JoinSet;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -33,7 +35,7 @@ pub(crate) async fn migrate_repo(old_repo: ArcRepo, new_repo: ArcRepo) -> Result
|
||||||
tracing::warn!("Checks complete, migrating repo");
|
tracing::warn!("Checks complete, migrating repo");
|
||||||
tracing::warn!("{total_size} hashes will be migrated");
|
tracing::warn!("{total_size} hashes will be migrated");
|
||||||
|
|
||||||
let mut hash_stream = old_repo.hashes().await.into_streamer();
|
let mut hash_stream = old_repo.hashes().into_streamer();
|
||||||
|
|
||||||
let mut index = 0;
|
let mut index = 0;
|
||||||
while let Some(res) = hash_stream.next().await {
|
while let Some(res) = hash_stream.next().await {
|
||||||
|
@ -204,10 +206,14 @@ async fn do_migrate_hash(old_repo: &ArcRepo, new_repo: &ArcRepo, hash: Hash) ->
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
let _ = new_repo.create_hash(hash.clone(), &identifier).await?;
|
|
||||||
|
|
||||||
if let Some(details) = old_repo.details(&identifier).await? {
|
if let Some(details) = old_repo.details(&identifier).await? {
|
||||||
|
let _ = new_repo
|
||||||
|
.create_hash_with_timestamp(hash.clone(), &identifier, details.created_at())
|
||||||
|
.await?;
|
||||||
|
|
||||||
new_repo.relate_details(&identifier, &details).await?;
|
new_repo.relate_details(&identifier, &details).await?;
|
||||||
|
} else {
|
||||||
|
let _ = new_repo.create_hash(hash.clone(), &identifier).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(identifier) = old_repo.motion_identifier(hash.clone()).await? {
|
if let Some(identifier) = old_repo.motion_identifier(hash.clone()).await? {
|
||||||
|
@ -266,7 +272,7 @@ async fn do_migrate_hash_04<S: Store>(
|
||||||
config: &Configuration,
|
config: &Configuration,
|
||||||
old_hash: sled::IVec,
|
old_hash: sled::IVec,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let Some(identifier) = old_repo.identifier::<S::Identifier>(old_hash.clone()).await? else {
|
let Some(identifier) = old_repo.identifier(old_hash.clone()).await? else {
|
||||||
tracing::warn!("Skipping hash {}, no identifier", hex::encode(&old_hash));
|
tracing::warn!("Skipping hash {}, no identifier", hex::encode(&old_hash));
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
@ -276,10 +282,8 @@ async fn do_migrate_hash_04<S: Store>(
|
||||||
let hash_details = set_details(old_repo, new_repo, store, config, &identifier).await?;
|
let hash_details = set_details(old_repo, new_repo, store, config, &identifier).await?;
|
||||||
|
|
||||||
let aliases = old_repo.aliases_for_hash(old_hash.clone()).await?;
|
let aliases = old_repo.aliases_for_hash(old_hash.clone()).await?;
|
||||||
let variants = old_repo.variants::<S::Identifier>(old_hash.clone()).await?;
|
let variants = old_repo.variants(old_hash.clone()).await?;
|
||||||
let motion_identifier = old_repo
|
let motion_identifier = old_repo.motion_identifier(old_hash.clone()).await?;
|
||||||
.motion_identifier::<S::Identifier>(old_hash.clone())
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let hash = old_hash[..].try_into().expect("Invalid hash size");
|
let hash = old_hash[..].try_into().expect("Invalid hash size");
|
||||||
|
|
||||||
|
@ -326,7 +330,7 @@ async fn set_details<S: Store>(
|
||||||
new_repo: &ArcRepo,
|
new_repo: &ArcRepo,
|
||||||
store: &S,
|
store: &S,
|
||||||
config: &Configuration,
|
config: &Configuration,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Details, Error> {
|
) -> Result<Details, Error> {
|
||||||
if let Some(details) = new_repo.details(identifier).await? {
|
if let Some(details) = new_repo.details(identifier).await? {
|
||||||
Ok(details)
|
Ok(details)
|
||||||
|
@ -342,9 +346,9 @@ async fn fetch_or_generate_details<S: Store>(
|
||||||
old_repo: &OldSledRepo,
|
old_repo: &OldSledRepo,
|
||||||
store: &S,
|
store: &S,
|
||||||
config: &Configuration,
|
config: &Configuration,
|
||||||
identifier: &S::Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Details, Error> {
|
) -> Result<Details, Error> {
|
||||||
let details_opt = old_repo.details(identifier).await?;
|
let details_opt = old_repo.details(identifier.clone()).await?;
|
||||||
|
|
||||||
if let Some(details) = details_opt {
|
if let Some(details) = details_opt {
|
||||||
Ok(details)
|
Ok(details)
|
||||||
|
|
1854
src/repo/postgres.rs
Normal file
1854
src/repo/postgres.rs
Normal file
File diff suppressed because it is too large
Load diff
3
src/repo/postgres/embedded.rs
Normal file
3
src/repo/postgres/embedded.rs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
use refinery::embed_migrations;
|
||||||
|
|
||||||
|
embed_migrations!("./src/repo/postgres/migrations");
|
6
src/repo/postgres/job_status.rs
Normal file
6
src/repo/postgres/job_status.rs
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, diesel_derive_enum::DbEnum)]
|
||||||
|
#[ExistingTypePath = "crate::repo::postgres::schema::sql_types::JobStatus"]
|
||||||
|
pub(super) enum JobStatus {
|
||||||
|
New,
|
||||||
|
Running,
|
||||||
|
}
|
11
src/repo/postgres/migrations/V0000__enable_pgcrypto.rs
Normal file
11
src/repo/postgres/migrations/V0000__enable_pgcrypto.rs
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
use barrel::backend::Pg;
|
||||||
|
use barrel::functions::AutogenFunction;
|
||||||
|
use barrel::{types, Migration};
|
||||||
|
|
||||||
|
pub(crate) fn migration() -> String {
|
||||||
|
let mut m = Migration::new();
|
||||||
|
|
||||||
|
m.inject_custom("CREATE EXTENSION IF NOT EXISTS pgcrypto;");
|
||||||
|
|
||||||
|
m.make::<Pg>().to_string()
|
||||||
|
}
|
33
src/repo/postgres/migrations/V0001__create_hashes.rs
Normal file
33
src/repo/postgres/migrations/V0001__create_hashes.rs
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
use barrel::backend::Pg;
|
||||||
|
use barrel::functions::AutogenFunction;
|
||||||
|
use barrel::{types, Migration};
|
||||||
|
|
||||||
|
pub(crate) fn migration() -> String {
|
||||||
|
let mut m = Migration::new();
|
||||||
|
|
||||||
|
m.create_table("hashes", |t| {
|
||||||
|
t.add_column(
|
||||||
|
"hash",
|
||||||
|
types::text()
|
||||||
|
.primary(true)
|
||||||
|
.unique(true)
|
||||||
|
.nullable(false)
|
||||||
|
.size(128),
|
||||||
|
);
|
||||||
|
t.add_column("identifier", types::text().unique(true).nullable(false));
|
||||||
|
t.add_column(
|
||||||
|
"motion_identifier",
|
||||||
|
types::text().unique(true).nullable(true),
|
||||||
|
);
|
||||||
|
t.add_column(
|
||||||
|
"created_at",
|
||||||
|
types::datetime()
|
||||||
|
.nullable(false)
|
||||||
|
.default(AutogenFunction::CurrentTimestamp),
|
||||||
|
);
|
||||||
|
|
||||||
|
t.add_index("ordered_hash_index", types::index(["created_at", "hash"]));
|
||||||
|
});
|
||||||
|
|
||||||
|
m.make::<Pg>().to_string()
|
||||||
|
}
|
28
src/repo/postgres/migrations/V0002__create_variants.rs
Normal file
28
src/repo/postgres/migrations/V0002__create_variants.rs
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
use barrel::backend::Pg;
|
||||||
|
use barrel::functions::AutogenFunction;
|
||||||
|
use barrel::{types, Migration};
|
||||||
|
|
||||||
|
pub(crate) fn migration() -> String {
|
||||||
|
let mut m = Migration::new();
|
||||||
|
|
||||||
|
m.create_table("variants", |t| {
|
||||||
|
t.inject_custom(r#""id" UUID PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL UNIQUE"#);
|
||||||
|
t.add_column("hash", types::text().nullable(false));
|
||||||
|
t.add_column("variant", types::text().nullable(false));
|
||||||
|
t.add_column("identifier", types::text().nullable(false));
|
||||||
|
t.add_column(
|
||||||
|
"accessed",
|
||||||
|
types::datetime()
|
||||||
|
.nullable(false)
|
||||||
|
.default(AutogenFunction::CurrentTimestamp),
|
||||||
|
);
|
||||||
|
|
||||||
|
t.add_foreign_key(&["hash"], "hashes", &["hash"]);
|
||||||
|
t.add_index(
|
||||||
|
"hash_variant_index",
|
||||||
|
types::index(["hash", "variant"]).unique(true),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
m.make::<Pg>().to_string()
|
||||||
|
}
|
25
src/repo/postgres/migrations/V0003__create_aliases.rs
Normal file
25
src/repo/postgres/migrations/V0003__create_aliases.rs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
use barrel::backend::Pg;
|
||||||
|
use barrel::functions::AutogenFunction;
|
||||||
|
use barrel::{types, Migration};
|
||||||
|
|
||||||
|
pub(crate) fn migration() -> String {
|
||||||
|
let mut m = Migration::new();
|
||||||
|
|
||||||
|
m.create_table("aliases", |t| {
|
||||||
|
t.add_column(
|
||||||
|
"alias",
|
||||||
|
types::text()
|
||||||
|
.size(60)
|
||||||
|
.primary(true)
|
||||||
|
.unique(true)
|
||||||
|
.nullable(false),
|
||||||
|
);
|
||||||
|
t.add_column("hash", types::text().nullable(false));
|
||||||
|
t.add_column("token", types::text().size(60).nullable(false));
|
||||||
|
|
||||||
|
t.add_foreign_key(&["hash"], "hashes", &["hash"]);
|
||||||
|
t.add_index("aliases_hash_index", types::index(["hash"]));
|
||||||
|
});
|
||||||
|
|
||||||
|
m.make::<Pg>().to_string()
|
||||||
|
}
|
21
src/repo/postgres/migrations/V0004__create_settings.rs
Normal file
21
src/repo/postgres/migrations/V0004__create_settings.rs
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
use barrel::backend::Pg;
|
||||||
|
use barrel::functions::AutogenFunction;
|
||||||
|
use barrel::{types, Migration};
|
||||||
|
|
||||||
|
pub(crate) fn migration() -> String {
|
||||||
|
let mut m = Migration::new();
|
||||||
|
|
||||||
|
m.create_table("settings", |t| {
|
||||||
|
t.add_column(
|
||||||
|
"key",
|
||||||
|
types::text()
|
||||||
|
.size(80)
|
||||||
|
.primary(true)
|
||||||
|
.unique(true)
|
||||||
|
.nullable(false),
|
||||||
|
);
|
||||||
|
t.add_column("value", types::text().size(80).nullable(false));
|
||||||
|
});
|
||||||
|
|
||||||
|
m.make::<Pg>().to_string()
|
||||||
|
}
|
17
src/repo/postgres/migrations/V0005__create_details.rs
Normal file
17
src/repo/postgres/migrations/V0005__create_details.rs
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
use barrel::backend::Pg;
|
||||||
|
use barrel::functions::AutogenFunction;
|
||||||
|
use barrel::{types, Migration};
|
||||||
|
|
||||||
|
pub(crate) fn migration() -> String {
|
||||||
|
let mut m = Migration::new();
|
||||||
|
|
||||||
|
m.create_table("details", |t| {
|
||||||
|
t.add_column(
|
||||||
|
"identifier",
|
||||||
|
types::text().primary(true).unique(true).nullable(false),
|
||||||
|
);
|
||||||
|
t.add_column("json", types::custom("jsonb").nullable(false));
|
||||||
|
});
|
||||||
|
|
||||||
|
m.make::<Pg>().to_string()
|
||||||
|
}
|
57
src/repo/postgres/migrations/V0006__create_queue.rs
Normal file
57
src/repo/postgres/migrations/V0006__create_queue.rs
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
use barrel::backend::Pg;
|
||||||
|
use barrel::functions::AutogenFunction;
|
||||||
|
use barrel::{types, Migration};
|
||||||
|
|
||||||
|
pub(crate) fn migration() -> String {
|
||||||
|
let mut m = Migration::new();
|
||||||
|
|
||||||
|
m.inject_custom("CREATE TYPE job_status AS ENUM ('new', 'running');");
|
||||||
|
|
||||||
|
m.create_table("job_queue", |t| {
|
||||||
|
t.inject_custom(r#""id" UUID PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL UNIQUE"#);
|
||||||
|
t.add_column("queue", types::text().size(50).nullable(false));
|
||||||
|
t.add_column("job", types::custom("jsonb").nullable(false));
|
||||||
|
t.add_column("worker", types::uuid().nullable(true));
|
||||||
|
t.add_column(
|
||||||
|
"status",
|
||||||
|
types::custom("job_status").nullable(false).default("new"),
|
||||||
|
);
|
||||||
|
t.add_column(
|
||||||
|
"queue_time",
|
||||||
|
types::datetime()
|
||||||
|
.nullable(false)
|
||||||
|
.default(AutogenFunction::CurrentTimestamp),
|
||||||
|
);
|
||||||
|
t.add_column("heartbeat", types::datetime().nullable(true));
|
||||||
|
|
||||||
|
t.add_index("queue_status_index", types::index(["queue", "status"]));
|
||||||
|
t.add_index("heartbeat_index", types::index(["heartbeat"]));
|
||||||
|
});
|
||||||
|
|
||||||
|
m.inject_custom(
|
||||||
|
r#"
|
||||||
|
CREATE OR REPLACE FUNCTION queue_status_notify()
|
||||||
|
RETURNS trigger AS
|
||||||
|
$$
|
||||||
|
BEGIN
|
||||||
|
PERFORM pg_notify('queue_status_channel', NEW.id::text || ' ' || NEW.queue::text);
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"#
|
||||||
|
.trim(),
|
||||||
|
);
|
||||||
|
|
||||||
|
m.inject_custom(
|
||||||
|
r#"
|
||||||
|
CREATE TRIGGER queue_status
|
||||||
|
AFTER INSERT OR UPDATE OF status
|
||||||
|
ON job_queue
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE PROCEDURE queue_status_notify();
|
||||||
|
"#
|
||||||
|
.trim(),
|
||||||
|
);
|
||||||
|
|
||||||
|
m.make::<Pg>().to_string()
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
use barrel::backend::Pg;
|
||||||
|
use barrel::functions::AutogenFunction;
|
||||||
|
use barrel::{types, Migration};
|
||||||
|
|
||||||
|
pub(crate) fn migration() -> String {
|
||||||
|
let mut m = Migration::new();
|
||||||
|
|
||||||
|
m.create_table("store_migrations", |t| {
|
||||||
|
t.add_column(
|
||||||
|
"old_identifier",
|
||||||
|
types::text().primary(true).nullable(false).unique(true),
|
||||||
|
);
|
||||||
|
t.add_column("new_identifier", types::text().nullable(false).unique(true));
|
||||||
|
});
|
||||||
|
|
||||||
|
m.make::<Pg>().to_string()
|
||||||
|
}
|
25
src/repo/postgres/migrations/V0008__create_proxies.rs
Normal file
25
src/repo/postgres/migrations/V0008__create_proxies.rs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
use barrel::backend::Pg;
|
||||||
|
use barrel::functions::AutogenFunction;
|
||||||
|
use barrel::{types, Migration};
|
||||||
|
|
||||||
|
pub(crate) fn migration() -> String {
|
||||||
|
let mut m = Migration::new();
|
||||||
|
|
||||||
|
m.create_table("proxies", |t| {
|
||||||
|
t.add_column(
|
||||||
|
"url",
|
||||||
|
types::text().primary(true).unique(true).nullable(false),
|
||||||
|
);
|
||||||
|
t.add_column("alias", types::text().nullable(false));
|
||||||
|
t.add_column(
|
||||||
|
"accessed",
|
||||||
|
types::datetime()
|
||||||
|
.nullable(false)
|
||||||
|
.default(AutogenFunction::CurrentTimestamp),
|
||||||
|
);
|
||||||
|
|
||||||
|
t.add_foreign_key(&["alias"], "aliases", &["alias"]);
|
||||||
|
});
|
||||||
|
|
||||||
|
m.make::<Pg>().to_string()
|
||||||
|
}
|
44
src/repo/postgres/migrations/V0009__create_uploads.rs
Normal file
44
src/repo/postgres/migrations/V0009__create_uploads.rs
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
use barrel::backend::Pg;
|
||||||
|
use barrel::functions::AutogenFunction;
|
||||||
|
use barrel::{types, Migration};
|
||||||
|
|
||||||
|
pub(crate) fn migration() -> String {
|
||||||
|
let mut m = Migration::new();
|
||||||
|
|
||||||
|
m.create_table("uploads", |t| {
|
||||||
|
t.inject_custom(r#""id" UUID PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL UNIQUE"#);
|
||||||
|
t.add_column("result", types::custom("jsonb").nullable(true));
|
||||||
|
t.add_column(
|
||||||
|
"created_at",
|
||||||
|
types::datetime()
|
||||||
|
.nullable(false)
|
||||||
|
.default(AutogenFunction::CurrentTimestamp),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
m.inject_custom(
|
||||||
|
r#"
|
||||||
|
CREATE OR REPLACE FUNCTION upload_completion_notify()
|
||||||
|
RETURNS trigger AS
|
||||||
|
$$
|
||||||
|
BEGIN
|
||||||
|
PERFORM pg_notify('upload_completion_channel', NEW.id::text);
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
"#
|
||||||
|
.trim(),
|
||||||
|
);
|
||||||
|
|
||||||
|
m.inject_custom(
|
||||||
|
r#"
|
||||||
|
CREATE TRIGGER upload_result
|
||||||
|
AFTER INSERT OR UPDATE OF result
|
||||||
|
ON uploads
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE PROCEDURE upload_completion_notify();
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
m.make::<Pg>().to_string()
|
||||||
|
}
|
115
src/repo/postgres/schema.rs
Normal file
115
src/repo/postgres/schema.rs
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
// @generated automatically by Diesel CLI.
|
||||||
|
|
||||||
|
pub mod sql_types {
|
||||||
|
#[derive(diesel::query_builder::QueryId, diesel::sql_types::SqlType)]
|
||||||
|
#[diesel(postgres_type(name = "job_status"))]
|
||||||
|
pub struct JobStatus;
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
aliases (alias) {
|
||||||
|
alias -> Text,
|
||||||
|
hash -> Text,
|
||||||
|
token -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
details (identifier) {
|
||||||
|
identifier -> Text,
|
||||||
|
json -> Jsonb,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
hashes (hash) {
|
||||||
|
hash -> Text,
|
||||||
|
identifier -> Text,
|
||||||
|
motion_identifier -> Nullable<Text>,
|
||||||
|
created_at -> Timestamp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
use diesel::sql_types::*;
|
||||||
|
use super::sql_types::JobStatus;
|
||||||
|
|
||||||
|
job_queue (id) {
|
||||||
|
id -> Uuid,
|
||||||
|
queue -> Text,
|
||||||
|
job -> Jsonb,
|
||||||
|
worker -> Nullable<Uuid>,
|
||||||
|
status -> JobStatus,
|
||||||
|
queue_time -> Timestamp,
|
||||||
|
heartbeat -> Nullable<Timestamp>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
proxies (url) {
|
||||||
|
url -> Text,
|
||||||
|
alias -> Text,
|
||||||
|
accessed -> Timestamp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
refinery_schema_history (version) {
|
||||||
|
version -> Int4,
|
||||||
|
#[max_length = 255]
|
||||||
|
name -> Nullable<Varchar>,
|
||||||
|
#[max_length = 255]
|
||||||
|
applied_on -> Nullable<Varchar>,
|
||||||
|
#[max_length = 255]
|
||||||
|
checksum -> Nullable<Varchar>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
settings (key) {
|
||||||
|
key -> Text,
|
||||||
|
value -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
store_migrations (old_identifier) {
|
||||||
|
old_identifier -> Text,
|
||||||
|
new_identifier -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
uploads (id) {
|
||||||
|
id -> Uuid,
|
||||||
|
result -> Nullable<Jsonb>,
|
||||||
|
created_at -> Timestamp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
variants (id) {
|
||||||
|
id -> Uuid,
|
||||||
|
hash -> Text,
|
||||||
|
variant -> Text,
|
||||||
|
identifier -> Text,
|
||||||
|
accessed -> Timestamp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::joinable!(aliases -> hashes (hash));
|
||||||
|
diesel::joinable!(proxies -> aliases (alias));
|
||||||
|
diesel::joinable!(variants -> hashes (hash));
|
||||||
|
|
||||||
|
diesel::allow_tables_to_appear_in_same_query!(
|
||||||
|
aliases,
|
||||||
|
details,
|
||||||
|
hashes,
|
||||||
|
job_queue,
|
||||||
|
proxies,
|
||||||
|
refinery_schema_history,
|
||||||
|
settings,
|
||||||
|
store_migrations,
|
||||||
|
uploads,
|
||||||
|
variants,
|
||||||
|
);
|
267
src/repo/sled.rs
267
src/repo/sled.rs
|
@ -2,7 +2,6 @@ use crate::{
|
||||||
details::HumanDate,
|
details::HumanDate,
|
||||||
error_code::{ErrorCode, OwnedErrorCode},
|
error_code::{ErrorCode, OwnedErrorCode},
|
||||||
serde_str::Serde,
|
serde_str::Serde,
|
||||||
store::StoreError,
|
|
||||||
stream::{from_iterator, LocalBoxStream},
|
stream::{from_iterator, LocalBoxStream},
|
||||||
};
|
};
|
||||||
use sled::{transaction::TransactionError, Db, IVec, Transactional, Tree};
|
use sled::{transaction::TransactionError, Db, IVec, Transactional, Tree};
|
||||||
|
@ -13,26 +12,25 @@ use std::{
|
||||||
atomic::{AtomicU64, Ordering},
|
atomic::{AtomicU64, Ordering},
|
||||||
Arc, RwLock,
|
Arc, RwLock,
|
||||||
},
|
},
|
||||||
time::Instant,
|
|
||||||
};
|
};
|
||||||
use tokio::sync::Notify;
|
use tokio::sync::Notify;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
hash::Hash, Alias, AliasAccessRepo, AliasAlreadyExists, AliasRepo, BaseRepo, DeleteToken,
|
hash::Hash,
|
||||||
Details, DetailsRepo, FullRepo, HashAlreadyExists, HashPage, HashRepo, Identifier, JobId,
|
metrics::{PopMetricsGuard, PushMetricsGuard, WaitMetricsGuard},
|
||||||
OrderedHash, ProxyRepo, QueueRepo, RepoError, SettingsRepo, StoreMigrationRepo, UploadId,
|
Alias, AliasAccessRepo, AliasAlreadyExists, AliasRepo, BaseRepo, DeleteToken, Details,
|
||||||
UploadRepo, UploadResult, VariantAccessRepo, VariantAlreadyExists,
|
DetailsRepo, FullRepo, HashAlreadyExists, HashPage, HashRepo, JobId, OrderedHash, ProxyRepo,
|
||||||
|
QueueRepo, RepoError, SettingsRepo, StoreMigrationRepo, UploadId, UploadRepo, UploadResult,
|
||||||
|
VariantAccessRepo, VariantAlreadyExists,
|
||||||
};
|
};
|
||||||
|
|
||||||
macro_rules! b {
|
macro_rules! b {
|
||||||
($self:ident.$ident:ident, $expr:expr) => {{
|
($self:ident.$ident:ident, $expr:expr) => {{
|
||||||
let $ident = $self.$ident.clone();
|
let $ident = $self.$ident.clone();
|
||||||
|
|
||||||
let span = tracing::Span::current();
|
crate::sync::spawn_blocking(move || $expr)
|
||||||
|
|
||||||
actix_rt::task::spawn_blocking(move || span.in_scope(|| $expr))
|
|
||||||
.await
|
.await
|
||||||
.map_err(SledError::from)
|
.map_err(SledError::from)
|
||||||
.map_err(RepoError::from)?
|
.map_err(RepoError::from)?
|
||||||
|
@ -47,14 +45,20 @@ pub(crate) enum SledError {
|
||||||
Sled(#[from] sled::Error),
|
Sled(#[from] sled::Error),
|
||||||
|
|
||||||
#[error("Invalid details json")]
|
#[error("Invalid details json")]
|
||||||
Details(serde_json::Error),
|
Details(#[source] serde_json::Error),
|
||||||
|
|
||||||
#[error("Invalid upload result json")]
|
#[error("Invalid upload result json")]
|
||||||
UploadResult(serde_json::Error),
|
UploadResult(#[source] serde_json::Error),
|
||||||
|
|
||||||
#[error("Error parsing variant key")]
|
#[error("Error parsing variant key")]
|
||||||
VariantKey(#[from] VariantKeyError),
|
VariantKey(#[from] VariantKeyError),
|
||||||
|
|
||||||
|
#[error("Invalid string data in db")]
|
||||||
|
Utf8(#[source] std::str::Utf8Error),
|
||||||
|
|
||||||
|
#[error("Invalid job json")]
|
||||||
|
Job(#[source] serde_json::Error),
|
||||||
|
|
||||||
#[error("Operation panicked")]
|
#[error("Operation panicked")]
|
||||||
Panic,
|
Panic,
|
||||||
|
|
||||||
|
@ -65,9 +69,10 @@ pub(crate) enum SledError {
|
||||||
impl SledError {
|
impl SledError {
|
||||||
pub(super) const fn error_code(&self) -> ErrorCode {
|
pub(super) const fn error_code(&self) -> ErrorCode {
|
||||||
match self {
|
match self {
|
||||||
Self::Sled(_) | Self::VariantKey(_) => ErrorCode::SLED_ERROR,
|
Self::Sled(_) | Self::VariantKey(_) | Self::Utf8(_) => ErrorCode::SLED_ERROR,
|
||||||
Self::Details(_) => ErrorCode::EXTRACT_DETAILS,
|
Self::Details(_) => ErrorCode::EXTRACT_DETAILS,
|
||||||
Self::UploadResult(_) => ErrorCode::EXTRACT_UPLOAD_RESULT,
|
Self::UploadResult(_) => ErrorCode::EXTRACT_UPLOAD_RESULT,
|
||||||
|
Self::Job(_) => ErrorCode::EXTRACT_JOB,
|
||||||
Self::Panic => ErrorCode::PANIC,
|
Self::Panic => ErrorCode::PANIC,
|
||||||
Self::Conflict => ErrorCode::CONFLICTED_RECORD,
|
Self::Conflict => ErrorCode::CONFLICTED_RECORD,
|
||||||
}
|
}
|
||||||
|
@ -169,7 +174,7 @@ impl SledRepo {
|
||||||
|
|
||||||
let this = self.db.clone();
|
let this = self.db.clone();
|
||||||
|
|
||||||
actix_rt::task::spawn_blocking(move || {
|
crate::sync::spawn_blocking(move || {
|
||||||
let export = this.export();
|
let export = this.export();
|
||||||
export_db.import(export);
|
export_db.import(export);
|
||||||
})
|
})
|
||||||
|
@ -252,7 +257,7 @@ impl AliasAccessRepo for SledRepo {
|
||||||
let alias_access = self.alias_access.clone();
|
let alias_access = self.alias_access.clone();
|
||||||
let inverse_alias_access = self.inverse_alias_access.clone();
|
let inverse_alias_access = self.inverse_alias_access.clone();
|
||||||
|
|
||||||
let res = actix_rt::task::spawn_blocking(move || {
|
let res = crate::sync::spawn_blocking(move || {
|
||||||
(&alias_access, &inverse_alias_access).transaction(
|
(&alias_access, &inverse_alias_access).transaction(
|
||||||
|(alias_access, inverse_alias_access)| {
|
|(alias_access, inverse_alias_access)| {
|
||||||
if let Some(old) = alias_access.insert(alias.to_bytes(), &value_bytes)? {
|
if let Some(old) = alias_access.insert(alias.to_bytes(), &value_bytes)? {
|
||||||
|
@ -318,7 +323,7 @@ impl AliasAccessRepo for SledRepo {
|
||||||
let alias_access = self.alias_access.clone();
|
let alias_access = self.alias_access.clone();
|
||||||
let inverse_alias_access = self.inverse_alias_access.clone();
|
let inverse_alias_access = self.inverse_alias_access.clone();
|
||||||
|
|
||||||
let res = actix_rt::task::spawn_blocking(move || {
|
let res = crate::sync::spawn_blocking(move || {
|
||||||
(&alias_access, &inverse_alias_access).transaction(
|
(&alias_access, &inverse_alias_access).transaction(
|
||||||
|(alias_access, inverse_alias_access)| {
|
|(alias_access, inverse_alias_access)| {
|
||||||
if let Some(old) = alias_access.remove(alias.to_bytes())? {
|
if let Some(old) = alias_access.remove(alias.to_bytes())? {
|
||||||
|
@ -358,7 +363,7 @@ impl VariantAccessRepo for SledRepo {
|
||||||
let variant_access = self.variant_access.clone();
|
let variant_access = self.variant_access.clone();
|
||||||
let inverse_variant_access = self.inverse_variant_access.clone();
|
let inverse_variant_access = self.inverse_variant_access.clone();
|
||||||
|
|
||||||
let res = actix_rt::task::spawn_blocking(move || {
|
let res = crate::sync::spawn_blocking(move || {
|
||||||
(&variant_access, &inverse_variant_access).transaction(
|
(&variant_access, &inverse_variant_access).transaction(
|
||||||
|(variant_access, inverse_variant_access)| {
|
|(variant_access, inverse_variant_access)| {
|
||||||
if let Some(old) = variant_access.insert(&key, &value_bytes)? {
|
if let Some(old) = variant_access.insert(&key, &value_bytes)? {
|
||||||
|
@ -428,7 +433,7 @@ impl VariantAccessRepo for SledRepo {
|
||||||
let variant_access = self.variant_access.clone();
|
let variant_access = self.variant_access.clone();
|
||||||
let inverse_variant_access = self.inverse_variant_access.clone();
|
let inverse_variant_access = self.inverse_variant_access.clone();
|
||||||
|
|
||||||
let res = actix_rt::task::spawn_blocking(move || {
|
let res = crate::sync::spawn_blocking(move || {
|
||||||
(&variant_access, &inverse_variant_access).transaction(
|
(&variant_access, &inverse_variant_access).transaction(
|
||||||
|(variant_access, inverse_variant_access)| {
|
|(variant_access, inverse_variant_access)| {
|
||||||
if let Some(old) = variant_access.remove(&key)? {
|
if let Some(old) = variant_access.remove(&key)? {
|
||||||
|
@ -486,54 +491,6 @@ impl From<InnerUploadResult> for UploadResult {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct PushMetricsGuard {
|
|
||||||
queue: &'static str,
|
|
||||||
armed: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct PopMetricsGuard {
|
|
||||||
queue: &'static str,
|
|
||||||
start: Instant,
|
|
||||||
armed: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PushMetricsGuard {
|
|
||||||
fn guard(queue: &'static str) -> Self {
|
|
||||||
Self { queue, armed: true }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn disarm(mut self) {
|
|
||||||
self.armed = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PopMetricsGuard {
|
|
||||||
fn guard(queue: &'static str) -> Self {
|
|
||||||
Self {
|
|
||||||
queue,
|
|
||||||
start: Instant::now(),
|
|
||||||
armed: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn disarm(mut self) {
|
|
||||||
self.armed = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for PushMetricsGuard {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
metrics::increment_counter!("pict-rs.queue.push", "completed" => (!self.armed).to_string(), "queue" => self.queue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for PopMetricsGuard {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
metrics::histogram!("pict-rs.queue.pop.duration", self.start.elapsed().as_secs_f64(), "completed" => (!self.armed).to_string(), "queue" => self.queue);
|
|
||||||
metrics::increment_counter!("pict-rs.queue.pop", "completed" => (!self.armed).to_string(), "queue" => self.queue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl UploadRepo for SledRepo {
|
impl UploadRepo for SledRepo {
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
|
@ -547,6 +504,7 @@ impl UploadRepo for SledRepo {
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn wait(&self, upload_id: UploadId) -> Result<UploadResult, RepoError> {
|
async fn wait(&self, upload_id: UploadId) -> Result<UploadResult, RepoError> {
|
||||||
|
let guard = WaitMetricsGuard::guard();
|
||||||
let mut subscriber = self.uploads.watch_prefix(upload_id.as_bytes());
|
let mut subscriber = self.uploads.watch_prefix(upload_id.as_bytes());
|
||||||
|
|
||||||
let bytes = upload_id.as_bytes().to_vec();
|
let bytes = upload_id.as_bytes().to_vec();
|
||||||
|
@ -556,6 +514,7 @@ impl UploadRepo for SledRepo {
|
||||||
if bytes != b"1" {
|
if bytes != b"1" {
|
||||||
let result: InnerUploadResult =
|
let result: InnerUploadResult =
|
||||||
serde_json::from_slice(&bytes).map_err(SledError::UploadResult)?;
|
serde_json::from_slice(&bytes).map_err(SledError::UploadResult)?;
|
||||||
|
guard.disarm();
|
||||||
return Ok(result.into());
|
return Ok(result.into());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -571,6 +530,8 @@ impl UploadRepo for SledRepo {
|
||||||
if value != b"1" {
|
if value != b"1" {
|
||||||
let result: InnerUploadResult =
|
let result: InnerUploadResult =
|
||||||
serde_json::from_slice(&value).map_err(SledError::UploadResult)?;
|
serde_json::from_slice(&value).map_err(SledError::UploadResult)?;
|
||||||
|
|
||||||
|
guard.disarm();
|
||||||
return Ok(result.into());
|
return Ok(result.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -648,19 +609,31 @@ fn job_key(queue: &'static str, job_id: JobId) -> Arc<[u8]> {
|
||||||
Arc::from(key)
|
Arc::from(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn try_into_arc_str(ivec: IVec) -> Result<Arc<str>, SledError> {
|
||||||
|
std::str::from_utf8(&ivec[..])
|
||||||
|
.map_err(SledError::Utf8)
|
||||||
|
.map(String::from)
|
||||||
|
.map(Arc::from)
|
||||||
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl QueueRepo for SledRepo {
|
impl QueueRepo for SledRepo {
|
||||||
#[tracing::instrument(skip(self, job), fields(job = %String::from_utf8_lossy(&job)))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn push(&self, queue_name: &'static str, job: Arc<[u8]>) -> Result<JobId, RepoError> {
|
async fn push(
|
||||||
|
&self,
|
||||||
|
queue_name: &'static str,
|
||||||
|
job: serde_json::Value,
|
||||||
|
) -> Result<JobId, RepoError> {
|
||||||
let metrics_guard = PushMetricsGuard::guard(queue_name);
|
let metrics_guard = PushMetricsGuard::guard(queue_name);
|
||||||
|
|
||||||
let id = JobId::gen();
|
let id = JobId::gen();
|
||||||
let key = job_key(queue_name, id);
|
let key = job_key(queue_name, id);
|
||||||
|
let job = serde_json::to_vec(&job).map_err(SledError::Job)?;
|
||||||
|
|
||||||
let queue = self.queue.clone();
|
let queue = self.queue.clone();
|
||||||
let job_state = self.job_state.clone();
|
let job_state = self.job_state.clone();
|
||||||
|
|
||||||
let res = actix_rt::task::spawn_blocking(move || {
|
let res = crate::sync::spawn_blocking(move || {
|
||||||
(&queue, &job_state).transaction(|(queue, job_state)| {
|
(&queue, &job_state).transaction(|(queue, job_state)| {
|
||||||
let state = JobState::pending();
|
let state = JobState::pending();
|
||||||
|
|
||||||
|
@ -687,7 +660,7 @@ impl QueueRepo for SledRepo {
|
||||||
.write()
|
.write()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.entry(queue_name)
|
.entry(queue_name)
|
||||||
.or_insert_with(|| Arc::new(Notify::new()))
|
.or_insert_with(crate::sync::notify)
|
||||||
.notify_one();
|
.notify_one();
|
||||||
|
|
||||||
metrics_guard.disarm();
|
metrics_guard.disarm();
|
||||||
|
@ -700,7 +673,7 @@ impl QueueRepo for SledRepo {
|
||||||
&self,
|
&self,
|
||||||
queue_name: &'static str,
|
queue_name: &'static str,
|
||||||
worker_id: Uuid,
|
worker_id: Uuid,
|
||||||
) -> Result<(JobId, Arc<[u8]>), RepoError> {
|
) -> Result<(JobId, serde_json::Value), RepoError> {
|
||||||
let metrics_guard = PopMetricsGuard::guard(queue_name);
|
let metrics_guard = PopMetricsGuard::guard(queue_name);
|
||||||
|
|
||||||
let now = time::OffsetDateTime::now_utc();
|
let now = time::OffsetDateTime::now_utc();
|
||||||
|
@ -710,7 +683,7 @@ impl QueueRepo for SledRepo {
|
||||||
let job_state = self.job_state.clone();
|
let job_state = self.job_state.clone();
|
||||||
|
|
||||||
let span = tracing::Span::current();
|
let span = tracing::Span::current();
|
||||||
let opt = actix_rt::task::spawn_blocking(move || {
|
let opt = crate::sync::spawn_blocking(move || {
|
||||||
let _guard = span.enter();
|
let _guard = span.enter();
|
||||||
// Job IDs are generated with Uuid version 7 - defining their first bits as a
|
// Job IDs are generated with Uuid version 7 - defining their first bits as a
|
||||||
// timestamp. Scanning a prefix should give us jobs in the order they were queued.
|
// timestamp. Scanning a prefix should give us jobs in the order they were queued.
|
||||||
|
@ -755,9 +728,12 @@ impl QueueRepo for SledRepo {
|
||||||
|
|
||||||
let opt = queue
|
let opt = queue
|
||||||
.get(&key)?
|
.get(&key)?
|
||||||
.map(|job_bytes| (job_id, Arc::from(job_bytes.to_vec())));
|
.map(|ivec| serde_json::from_slice(&ivec[..]))
|
||||||
|
.transpose()
|
||||||
|
.map_err(SledError::Job)?;
|
||||||
|
|
||||||
return Ok(opt) as Result<Option<(JobId, Arc<[u8]>)>, SledError>;
|
return Ok(opt.map(|job| (job_id, job)))
|
||||||
|
as Result<Option<(JobId, serde_json::Value)>, SledError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(None)
|
Ok(None)
|
||||||
|
@ -781,9 +757,7 @@ impl QueueRepo for SledRepo {
|
||||||
notify
|
notify
|
||||||
} else {
|
} else {
|
||||||
let mut guard = self.queue_notifier.write().unwrap();
|
let mut guard = self.queue_notifier.write().unwrap();
|
||||||
let entry = guard
|
let entry = guard.entry(queue_name).or_insert_with(crate::sync::notify);
|
||||||
.entry(queue_name)
|
|
||||||
.or_insert_with(|| Arc::new(Notify::new()));
|
|
||||||
Arc::clone(entry)
|
Arc::clone(entry)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -802,7 +776,7 @@ impl QueueRepo for SledRepo {
|
||||||
|
|
||||||
let job_state = self.job_state.clone();
|
let job_state = self.job_state.clone();
|
||||||
|
|
||||||
actix_rt::task::spawn_blocking(move || {
|
crate::sync::spawn_blocking(move || {
|
||||||
if let Some(state) = job_state.get(&key)? {
|
if let Some(state) = job_state.get(&key)? {
|
||||||
let new_state = JobState::running(worker_id);
|
let new_state = JobState::running(worker_id);
|
||||||
|
|
||||||
|
@ -832,7 +806,7 @@ impl QueueRepo for SledRepo {
|
||||||
let queue = self.queue.clone();
|
let queue = self.queue.clone();
|
||||||
let job_state = self.job_state.clone();
|
let job_state = self.job_state.clone();
|
||||||
|
|
||||||
let res = actix_rt::task::spawn_blocking(move || {
|
let res = crate::sync::spawn_blocking(move || {
|
||||||
(&queue, &job_state).transaction(|(queue, job_state)| {
|
(&queue, &job_state).transaction(|(queue, job_state)| {
|
||||||
queue.remove(&key[..])?;
|
queue.remove(&key[..])?;
|
||||||
job_state.remove(&key[..])?;
|
job_state.remove(&key[..])?;
|
||||||
|
@ -949,43 +923,46 @@ fn variant_from_key(hash: &[u8], key: &[u8]) -> Option<String> {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl DetailsRepo for SledRepo {
|
impl DetailsRepo for SledRepo {
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn relate_details(
|
async fn relate_details(
|
||||||
&self,
|
&self,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
details: &Details,
|
details: &Details,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
let key = identifier.to_bytes()?;
|
let key = identifier.clone();
|
||||||
let details = serde_json::to_vec(&details.inner)
|
let details = serde_json::to_vec(&details.inner).map_err(SledError::Details)?;
|
||||||
.map_err(SledError::Details)
|
|
||||||
.map_err(RepoError::from)?;
|
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
self.identifier_details,
|
self.identifier_details,
|
||||||
identifier_details.insert(key, details)
|
identifier_details.insert(key.as_bytes(), details)
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn details(&self, identifier: &dyn Identifier) -> Result<Option<Details>, StoreError> {
|
async fn details(&self, identifier: &Arc<str>) -> Result<Option<Details>, RepoError> {
|
||||||
let key = identifier.to_bytes()?;
|
let key = identifier.clone();
|
||||||
|
|
||||||
let opt = b!(self.identifier_details, identifier_details.get(key));
|
let opt = b!(
|
||||||
|
self.identifier_details,
|
||||||
|
identifier_details.get(key.as_bytes())
|
||||||
|
);
|
||||||
|
|
||||||
opt.map(|ivec| serde_json::from_slice(&ivec).map(|inner| Details { inner }))
|
opt.map(|ivec| serde_json::from_slice(&ivec).map(|inner| Details { inner }))
|
||||||
.transpose()
|
.transpose()
|
||||||
.map_err(SledError::Details)
|
.map_err(SledError::Details)
|
||||||
.map_err(RepoError::from)
|
.map_err(RepoError::from)
|
||||||
.map_err(StoreError::from)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn cleanup_details(&self, identifier: &dyn Identifier) -> Result<(), StoreError> {
|
async fn cleanup_details(&self, identifier: &Arc<str>) -> Result<(), RepoError> {
|
||||||
let key = identifier.to_bytes()?;
|
let key = identifier.clone();
|
||||||
|
|
||||||
b!(self.identifier_details, identifier_details.remove(key));
|
b!(
|
||||||
|
self.identifier_details,
|
||||||
|
identifier_details.remove(key.as_bytes())
|
||||||
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -999,24 +976,28 @@ impl StoreMigrationRepo for SledRepo {
|
||||||
|
|
||||||
async fn mark_migrated(
|
async fn mark_migrated(
|
||||||
&self,
|
&self,
|
||||||
old_identifier: &dyn Identifier,
|
old_identifier: &Arc<str>,
|
||||||
new_identifier: &dyn Identifier,
|
new_identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
let key = new_identifier.to_bytes()?;
|
let key = new_identifier.clone();
|
||||||
let value = old_identifier.to_bytes()?;
|
let value = old_identifier.clone();
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
self.migration_identifiers,
|
self.migration_identifiers,
|
||||||
migration_identifiers.insert(key, value)
|
migration_identifiers.insert(key.as_bytes(), value.as_bytes())
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn is_migrated(&self, identifier: &dyn Identifier) -> Result<bool, StoreError> {
|
async fn is_migrated(&self, identifier: &Arc<str>) -> Result<bool, RepoError> {
|
||||||
let key = identifier.to_bytes()?;
|
let key = identifier.clone();
|
||||||
|
|
||||||
Ok(b!(self.migration_identifiers, migration_identifiers.get(key)).is_some())
|
Ok(b!(
|
||||||
|
self.migration_identifiers,
|
||||||
|
migration_identifiers.get(key.as_bytes())
|
||||||
|
)
|
||||||
|
.is_some())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn clear(&self) -> Result<(), RepoError> {
|
async fn clear(&self) -> Result<(), RepoError> {
|
||||||
|
@ -1062,17 +1043,6 @@ impl HashRepo for SledRepo {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn hashes(&self) -> LocalBoxStream<'static, Result<Hash, RepoError>> {
|
|
||||||
let iter = self.hashes.iter().keys().filter_map(|res| {
|
|
||||||
res.map_err(SledError::from)
|
|
||||||
.map_err(RepoError::from)
|
|
||||||
.map(Hash::from_ivec)
|
|
||||||
.transpose()
|
|
||||||
});
|
|
||||||
|
|
||||||
Box::pin(from_iterator(iter, 8))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
|
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
|
||||||
let opt = b!(self.hashes, hashes.get(hash.to_ivec()));
|
let opt = b!(self.hashes, hashes.get(hash.to_ivec()));
|
||||||
|
|
||||||
|
@ -1095,7 +1065,7 @@ impl HashRepo for SledRepo {
|
||||||
None => (self.hashes_inverse.iter(), None),
|
None => (self.hashes_inverse.iter(), None),
|
||||||
};
|
};
|
||||||
|
|
||||||
actix_rt::task::spawn_blocking(move || {
|
crate::sync::spawn_blocking(move || {
|
||||||
let page_iter = page_iter
|
let page_iter = page_iter
|
||||||
.keys()
|
.keys()
|
||||||
.rev()
|
.rev()
|
||||||
|
@ -1147,7 +1117,7 @@ impl HashRepo for SledRepo {
|
||||||
let page_iter = self.hashes_inverse.range(..=date_nanos);
|
let page_iter = self.hashes_inverse.range(..=date_nanos);
|
||||||
let prev_iter = Some(self.hashes_inverse.range(date_nanos..));
|
let prev_iter = Some(self.hashes_inverse.range(date_nanos..));
|
||||||
|
|
||||||
actix_rt::task::spawn_blocking(move || {
|
crate::sync::spawn_blocking(move || {
|
||||||
let page_iter = page_iter
|
let page_iter = page_iter
|
||||||
.keys()
|
.keys()
|
||||||
.rev()
|
.rev()
|
||||||
|
@ -1197,10 +1167,10 @@ impl HashRepo for SledRepo {
|
||||||
async fn create_hash_with_timestamp(
|
async fn create_hash_with_timestamp(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
timestamp: time::OffsetDateTime,
|
timestamp: time::OffsetDateTime,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
) -> Result<Result<(), HashAlreadyExists>, RepoError> {
|
||||||
let identifier: sled::IVec = identifier.to_bytes()?.into();
|
let identifier: sled::IVec = identifier.as_bytes().to_vec().into();
|
||||||
|
|
||||||
let hashes = self.hashes.clone();
|
let hashes = self.hashes.clone();
|
||||||
let hashes_inverse = self.hashes_inverse.clone();
|
let hashes_inverse = self.hashes_inverse.clone();
|
||||||
|
@ -1234,63 +1204,56 @@ impl HashRepo for SledRepo {
|
||||||
match res {
|
match res {
|
||||||
Ok(res) => Ok(res),
|
Ok(res) => Ok(res),
|
||||||
Err(TransactionError::Abort(e) | TransactionError::Storage(e)) => {
|
Err(TransactionError::Abort(e) | TransactionError::Storage(e)) => {
|
||||||
Err(StoreError::from(RepoError::from(SledError::from(e))))
|
Err(RepoError::from(SledError::from(e)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_identifier(
|
async fn update_identifier(&self, hash: Hash, identifier: &Arc<str>) -> Result<(), RepoError> {
|
||||||
&self,
|
let identifier = identifier.clone();
|
||||||
hash: Hash,
|
|
||||||
identifier: &dyn Identifier,
|
|
||||||
) -> Result<(), StoreError> {
|
|
||||||
let identifier = identifier.to_bytes()?;
|
|
||||||
|
|
||||||
let hash = hash.to_ivec();
|
let hash = hash.to_ivec();
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
self.hash_identifiers,
|
self.hash_identifiers,
|
||||||
hash_identifiers.insert(hash, identifier)
|
hash_identifiers.insert(hash, identifier.as_bytes())
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError> {
|
async fn identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let hash = hash.to_ivec();
|
let hash = hash.to_ivec();
|
||||||
|
|
||||||
let Some(ivec) = b!(self.hash_identifiers, hash_identifiers.get(hash)) else {
|
let opt = b!(self.hash_identifiers, hash_identifiers.get(hash));
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Some(Arc::from(ivec.to_vec())))
|
Ok(opt.map(try_into_arc_str).transpose()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn relate_variant_identifier(
|
async fn relate_variant_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<Result<(), VariantAlreadyExists>, StoreError> {
|
) -> Result<Result<(), VariantAlreadyExists>, RepoError> {
|
||||||
let hash = hash.to_bytes();
|
let hash = hash.to_bytes();
|
||||||
|
|
||||||
let key = variant_key(&hash, &variant);
|
let key = variant_key(&hash, &variant);
|
||||||
let value = identifier.to_bytes()?;
|
let value = identifier.clone();
|
||||||
|
|
||||||
let hash_variant_identifiers = self.hash_variant_identifiers.clone();
|
let hash_variant_identifiers = self.hash_variant_identifiers.clone();
|
||||||
|
|
||||||
actix_rt::task::spawn_blocking(move || {
|
crate::sync::spawn_blocking(move || {
|
||||||
hash_variant_identifiers
|
hash_variant_identifiers
|
||||||
.compare_and_swap(key, Option::<&[u8]>::None, Some(value))
|
.compare_and_swap(key, Option::<&[u8]>::None, Some(value.as_bytes()))
|
||||||
.map(|res| res.map_err(|_| VariantAlreadyExists))
|
.map(|res| res.map_err(|_| VariantAlreadyExists))
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
.map_err(|_| RepoError::Canceled)?
|
.map_err(|_| RepoError::Canceled)?
|
||||||
.map_err(SledError::from)
|
.map_err(SledError::from)
|
||||||
.map_err(RepoError::from)
|
.map_err(RepoError::from)
|
||||||
.map_err(StoreError::from)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
|
@ -1298,7 +1261,7 @@ impl HashRepo for SledRepo {
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
) -> Result<Option<Arc<[u8]>>, RepoError> {
|
) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let hash = hash.to_bytes();
|
let hash = hash.to_bytes();
|
||||||
|
|
||||||
let key = variant_key(&hash, &variant);
|
let key = variant_key(&hash, &variant);
|
||||||
|
@ -1308,11 +1271,11 @@ impl HashRepo for SledRepo {
|
||||||
hash_variant_identifiers.get(key)
|
hash_variant_identifiers.get(key)
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(opt.map(|ivec| Arc::from(ivec.to_vec())))
|
Ok(opt.map(try_into_arc_str).transpose()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<[u8]>)>, RepoError> {
|
async fn variants(&self, hash: Hash) -> Result<Vec<(String, Arc<str>)>, RepoError> {
|
||||||
let hash = hash.to_ivec();
|
let hash = hash.to_ivec();
|
||||||
|
|
||||||
let vec = b!(
|
let vec = b!(
|
||||||
|
@ -1321,14 +1284,14 @@ impl HashRepo for SledRepo {
|
||||||
.scan_prefix(hash.clone())
|
.scan_prefix(hash.clone())
|
||||||
.filter_map(|res| res.ok())
|
.filter_map(|res| res.ok())
|
||||||
.filter_map(|(key, ivec)| {
|
.filter_map(|(key, ivec)| {
|
||||||
let identifier = Arc::from(ivec.to_vec());
|
let identifier = try_into_arc_str(ivec).ok();
|
||||||
|
|
||||||
let variant = variant_from_key(&hash, &key);
|
let variant = variant_from_key(&hash, &key);
|
||||||
if variant.is_none() {
|
if variant.is_none() {
|
||||||
tracing::warn!("Skipping a variant: {}", String::from_utf8_lossy(&key));
|
tracing::warn!("Skipping a variant: {}", String::from_utf8_lossy(&key));
|
||||||
}
|
}
|
||||||
|
|
||||||
Some((variant?, identifier))
|
Some((variant?, identifier?))
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()) as Result<Vec<_>, SledError>
|
.collect::<Vec<_>>()) as Result<Vec<_>, SledError>
|
||||||
);
|
);
|
||||||
|
@ -1350,25 +1313,25 @@ impl HashRepo for SledRepo {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn relate_motion_identifier(
|
async fn relate_motion_identifier(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &Arc<str>,
|
||||||
) -> Result<(), StoreError> {
|
) -> Result<(), RepoError> {
|
||||||
let hash = hash.to_ivec();
|
let hash = hash.to_ivec();
|
||||||
let bytes = identifier.to_bytes()?;
|
let bytes = identifier.clone();
|
||||||
|
|
||||||
b!(
|
b!(
|
||||||
self.hash_motion_identifiers,
|
self.hash_motion_identifiers,
|
||||||
hash_motion_identifiers.insert(hash, bytes)
|
hash_motion_identifiers.insert(hash, bytes.as_bytes())
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<[u8]>>, RepoError> {
|
async fn motion_identifier(&self, hash: Hash) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let hash = hash.to_ivec();
|
let hash = hash.to_ivec();
|
||||||
|
|
||||||
let opt = b!(
|
let opt = b!(
|
||||||
|
@ -1376,7 +1339,7 @@ impl HashRepo for SledRepo {
|
||||||
hash_motion_identifiers.get(hash)
|
hash_motion_identifiers.get(hash)
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(opt.map(|ivec| Arc::from(ivec.to_vec())))
|
Ok(opt.map(try_into_arc_str).transpose()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
|
|
|
@ -2,10 +2,9 @@ use crate::{
|
||||||
config,
|
config,
|
||||||
details::Details,
|
details::Details,
|
||||||
repo::{Alias, DeleteToken},
|
repo::{Alias, DeleteToken},
|
||||||
store::{Identifier, StoreError},
|
|
||||||
};
|
};
|
||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
use std::fmt::Debug;
|
use std::{fmt::Debug, sync::Arc};
|
||||||
|
|
||||||
pub(crate) use self::sled::SledRepo;
|
pub(crate) use self::sled::SledRepo;
|
||||||
|
|
||||||
|
@ -46,7 +45,7 @@ pub(crate) trait SettingsRepo: BaseRepo {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait IdentifierRepo: BaseRepo {
|
pub(crate) trait IdentifierRepo: BaseRepo {
|
||||||
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, StoreError>;
|
async fn details(&self, identifier: Arc<str>) -> Result<Option<Details>, RepoError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
|
@ -57,20 +56,11 @@ pub(crate) trait HashRepo: BaseRepo {
|
||||||
|
|
||||||
async fn hashes(&self) -> Self::Stream;
|
async fn hashes(&self) -> Self::Stream;
|
||||||
|
|
||||||
async fn identifier<I: Identifier + 'static>(
|
async fn identifier(&self, hash: Self::Bytes) -> Result<Option<Arc<str>>, RepoError>;
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Option<I>, StoreError>;
|
|
||||||
|
|
||||||
async fn variants<I: Identifier + 'static>(
|
async fn variants(&self, hash: Self::Bytes) -> Result<Vec<(String, Arc<str>)>, RepoError>;
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Vec<(String, I)>, StoreError>;
|
|
||||||
|
|
||||||
async fn motion_identifier<I: Identifier + 'static>(
|
async fn motion_identifier(&self, hash: Self::Bytes) -> Result<Option<Arc<str>>, RepoError>;
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Option<I>, StoreError>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
details::HumanDate,
|
details::HumanDate,
|
||||||
repo_04::{
|
repo_04::{
|
||||||
Alias, AliasRepo, BaseRepo, DeleteToken, Details, HashRepo, Identifier, IdentifierRepo,
|
Alias, AliasRepo, BaseRepo, DeleteToken, Details, HashRepo, IdentifierRepo, RepoError,
|
||||||
RepoError, SettingsRepo,
|
SettingsRepo,
|
||||||
},
|
},
|
||||||
store::StoreError,
|
|
||||||
stream::{from_iterator, LocalBoxStream},
|
stream::{from_iterator, LocalBoxStream},
|
||||||
};
|
};
|
||||||
use sled::{Db, IVec, Tree};
|
use sled::{Db, IVec, Tree};
|
||||||
|
@ -35,9 +34,7 @@ macro_rules! b {
|
||||||
($self:ident.$ident:ident, $expr:expr) => {{
|
($self:ident.$ident:ident, $expr:expr) => {{
|
||||||
let $ident = $self.$ident.clone();
|
let $ident = $self.$ident.clone();
|
||||||
|
|
||||||
let span = tracing::Span::current();
|
crate::sync::spawn_blocking(move || $expr)
|
||||||
|
|
||||||
actix_rt::task::spawn_blocking(move || span.in_scope(|| $expr))
|
|
||||||
.await
|
.await
|
||||||
.map_err(SledError::from)
|
.map_err(SledError::from)
|
||||||
.map_err(RepoError::from)?
|
.map_err(RepoError::from)?
|
||||||
|
@ -56,6 +53,9 @@ pub(crate) enum SledError {
|
||||||
|
|
||||||
#[error("Operation panicked")]
|
#[error("Operation panicked")]
|
||||||
Panic,
|
Panic,
|
||||||
|
|
||||||
|
#[error("Error reading string")]
|
||||||
|
Utf8(#[from] std::str::Utf8Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -179,17 +179,17 @@ fn variant_from_key(hash: &[u8], key: &[u8]) -> Option<String> {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl IdentifierRepo for SledRepo {
|
impl IdentifierRepo for SledRepo {
|
||||||
#[tracing::instrument(level = "trace", skip(self, identifier), fields(identifier = identifier.string_repr()))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn details<I: Identifier>(&self, identifier: &I) -> Result<Option<Details>, StoreError> {
|
async fn details(&self, key: Arc<str>) -> Result<Option<Details>, RepoError> {
|
||||||
let key = identifier.to_bytes()?;
|
let opt = b!(
|
||||||
|
self.identifier_details,
|
||||||
let opt = b!(self.identifier_details, identifier_details.get(key));
|
identifier_details.get(key.as_bytes())
|
||||||
|
);
|
||||||
|
|
||||||
opt.map(|ivec| serde_json::from_slice::<OldDetails>(&ivec))
|
opt.map(|ivec| serde_json::from_slice::<OldDetails>(&ivec))
|
||||||
.transpose()
|
.transpose()
|
||||||
.map_err(SledError::from)
|
.map_err(SledError::from)
|
||||||
.map_err(RepoError::from)
|
.map_err(RepoError::from)
|
||||||
.map_err(StoreError::from)
|
|
||||||
.map(|opt| opt.and_then(OldDetails::into_details))
|
.map(|opt| opt.and_then(OldDetails::into_details))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -219,29 +219,27 @@ impl HashRepo for SledRepo {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
||||||
async fn identifier<I: Identifier + 'static>(
|
async fn identifier(&self, hash: Self::Bytes) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Option<I>, StoreError> {
|
|
||||||
let Some(ivec) = b!(self.hash_identifiers, hash_identifiers.get(hash)) else {
|
let Some(ivec) = b!(self.hash_identifiers, hash_identifiers.get(hash)) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Some(I::from_bytes(ivec.to_vec())?))
|
Ok(Some(Arc::from(
|
||||||
|
std::str::from_utf8(&ivec[..])
|
||||||
|
.map_err(SledError::from)?
|
||||||
|
.to_string(),
|
||||||
|
)))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
#[tracing::instrument(level = "debug", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
||||||
async fn variants<I: Identifier + 'static>(
|
async fn variants(&self, hash: Self::Bytes) -> Result<Vec<(String, Arc<str>)>, RepoError> {
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Vec<(String, I)>, StoreError> {
|
|
||||||
let vec = b!(
|
let vec = b!(
|
||||||
self.hash_variant_identifiers,
|
self.hash_variant_identifiers,
|
||||||
Ok(hash_variant_identifiers
|
Ok(hash_variant_identifiers
|
||||||
.scan_prefix(&hash)
|
.scan_prefix(&hash)
|
||||||
.filter_map(|res| res.ok())
|
.filter_map(|res| res.ok())
|
||||||
.filter_map(|(key, ivec)| {
|
.filter_map(|(key, ivec)| {
|
||||||
let identifier = I::from_bytes(ivec.to_vec()).ok();
|
let identifier = String::from_utf8(ivec.to_vec()).ok();
|
||||||
if identifier.is_none() {
|
if identifier.is_none() {
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
"Skipping an identifier: {}",
|
"Skipping an identifier: {}",
|
||||||
|
@ -254,7 +252,7 @@ impl HashRepo for SledRepo {
|
||||||
tracing::warn!("Skipping a variant: {}", String::from_utf8_lossy(&key));
|
tracing::warn!("Skipping a variant: {}", String::from_utf8_lossy(&key));
|
||||||
}
|
}
|
||||||
|
|
||||||
Some((variant?, identifier?))
|
Some((variant?, Arc::from(identifier?)))
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()) as Result<Vec<_>, SledError>
|
.collect::<Vec<_>>()) as Result<Vec<_>, SledError>
|
||||||
);
|
);
|
||||||
|
@ -263,16 +261,20 @@ impl HashRepo for SledRepo {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
#[tracing::instrument(level = "trace", skip(self, hash), fields(hash = hex::encode(&hash)))]
|
||||||
async fn motion_identifier<I: Identifier + 'static>(
|
async fn motion_identifier(&self, hash: Self::Bytes) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
&self,
|
|
||||||
hash: Self::Bytes,
|
|
||||||
) -> Result<Option<I>, StoreError> {
|
|
||||||
let opt = b!(
|
let opt = b!(
|
||||||
self.hash_motion_identifiers,
|
self.hash_motion_identifiers,
|
||||||
hash_motion_identifiers.get(hash)
|
hash_motion_identifiers.get(hash)
|
||||||
);
|
);
|
||||||
|
|
||||||
opt.map(|ivec| I::from_bytes(ivec.to_vec())).transpose()
|
opt.map(|ivec| {
|
||||||
|
Ok(Arc::from(
|
||||||
|
std::str::from_utf8(&ivec[..])
|
||||||
|
.map_err(SledError::from)?
|
||||||
|
.to_string(),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
.transpose()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
208
src/store.rs
208
src/store.rs
|
@ -1,10 +1,9 @@
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
|
||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
use std::{fmt::Debug, sync::Arc};
|
use std::{fmt::Debug, sync::Arc};
|
||||||
use tokio::io::{AsyncRead, AsyncWrite};
|
use tokio::io::{AsyncRead, AsyncWrite};
|
||||||
|
|
||||||
use crate::error_code::ErrorCode;
|
use crate::{error_code::ErrorCode, stream::LocalBoxStream};
|
||||||
|
|
||||||
pub(crate) mod file_store;
|
pub(crate) mod file_store;
|
||||||
pub(crate) mod object_store;
|
pub(crate) mod object_store;
|
||||||
|
@ -40,9 +39,17 @@ impl StoreError {
|
||||||
Self::FileNotFound(_) | Self::ObjectNotFound(_) => ErrorCode::NOT_FOUND,
|
Self::FileNotFound(_) | Self::ObjectNotFound(_) => ErrorCode::NOT_FOUND,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) const fn is_not_found(&self) -> bool {
|
pub(crate) const fn is_not_found(&self) -> bool {
|
||||||
matches!(self, Self::FileNotFound(_)) || matches!(self, Self::ObjectNotFound(_))
|
matches!(self, Self::FileNotFound(_)) || matches!(self, Self::ObjectNotFound(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) const fn is_disconnected(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::Repo(e) => e.is_disconnected(),
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<crate::store::file_store::FileError> for StoreError {
|
impl From<crate::store::file_store::FileError> for StoreError {
|
||||||
|
@ -70,32 +77,15 @@ impl From<crate::store::object_store::ObjectError> for StoreError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) trait Identifier: Send + Sync + Debug {
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, StoreError>;
|
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized;
|
|
||||||
|
|
||||||
fn from_arc(arc: Arc<[u8]>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized;
|
|
||||||
|
|
||||||
fn string_repr(&self) -> String;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
pub(crate) trait Store: Clone + Debug {
|
pub(crate) trait Store: Clone + Debug {
|
||||||
type Identifier: Identifier + Clone + 'static;
|
|
||||||
type Stream: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static;
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<(), StoreError>;
|
async fn health_check(&self) -> Result<(), StoreError>;
|
||||||
|
|
||||||
async fn save_async_read<Reader>(
|
async fn save_async_read<Reader>(
|
||||||
&self,
|
&self,
|
||||||
reader: Reader,
|
reader: Reader,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin + 'static;
|
Reader: AsyncRead + Unpin + 'static;
|
||||||
|
|
||||||
|
@ -103,7 +93,7 @@ pub(crate) trait Store: Clone + Debug {
|
||||||
&self,
|
&self,
|
||||||
stream: S,
|
stream: S,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static;
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static;
|
||||||
|
|
||||||
|
@ -111,28 +101,28 @@ pub(crate) trait Store: Clone + Debug {
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>;
|
) -> Result<Arc<str>, StoreError>;
|
||||||
|
|
||||||
fn public_url(&self, _: &Self::Identifier) -> Option<url::Url>;
|
fn public_url(&self, _: &Arc<str>) -> Option<url::Url>;
|
||||||
|
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, StoreError>;
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError>;
|
||||||
|
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
writer: &mut Writer,
|
writer: &mut Writer,
|
||||||
) -> Result<(), std::io::Error>
|
) -> Result<(), std::io::Error>
|
||||||
where
|
where
|
||||||
Writer: AsyncWrite + Unpin;
|
Writer: AsyncWrite + Unpin;
|
||||||
|
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, StoreError>;
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError>;
|
||||||
|
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), StoreError>;
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
|
@ -140,9 +130,6 @@ impl<T> Store for actix_web::web::Data<T>
|
||||||
where
|
where
|
||||||
T: Store,
|
T: Store,
|
||||||
{
|
{
|
||||||
type Identifier = T::Identifier;
|
|
||||||
type Stream = T::Stream;
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<(), StoreError> {
|
async fn health_check(&self) -> Result<(), StoreError> {
|
||||||
T::health_check(self).await
|
T::health_check(self).await
|
||||||
}
|
}
|
||||||
|
@ -151,7 +138,7 @@ where
|
||||||
&self,
|
&self,
|
||||||
reader: Reader,
|
reader: Reader,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin + 'static,
|
Reader: AsyncRead + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -162,7 +149,7 @@ where
|
||||||
&self,
|
&self,
|
||||||
stream: S,
|
stream: S,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -173,26 +160,26 @@ where
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError> {
|
) -> Result<Arc<str>, StoreError> {
|
||||||
T::save_bytes(self, bytes, content_type).await
|
T::save_bytes(self, bytes, content_type).await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn public_url(&self, identifier: &Self::Identifier) -> Option<url::Url> {
|
fn public_url(&self, identifier: &Arc<str>) -> Option<url::Url> {
|
||||||
T::public_url(self, identifier)
|
T::public_url(self, identifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, StoreError> {
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError> {
|
||||||
T::to_stream(self, identifier, from_start, len).await
|
T::to_stream(self, identifier, from_start, len).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
writer: &mut Writer,
|
writer: &mut Writer,
|
||||||
) -> Result<(), std::io::Error>
|
) -> Result<(), std::io::Error>
|
||||||
where
|
where
|
||||||
|
@ -201,11 +188,83 @@ where
|
||||||
T::read_into(self, identifier, writer).await
|
T::read_into(self, identifier, writer).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, StoreError> {
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError> {
|
||||||
T::len(self, identifier).await
|
T::len(self, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), StoreError> {
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError> {
|
||||||
|
T::remove(self, identifier).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl<T> Store for Arc<T>
|
||||||
|
where
|
||||||
|
T: Store,
|
||||||
|
{
|
||||||
|
async fn health_check(&self) -> Result<(), StoreError> {
|
||||||
|
T::health_check(self).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn save_async_read<Reader>(
|
||||||
|
&self,
|
||||||
|
reader: Reader,
|
||||||
|
content_type: mime::Mime,
|
||||||
|
) -> Result<Arc<str>, StoreError>
|
||||||
|
where
|
||||||
|
Reader: AsyncRead + Unpin + 'static,
|
||||||
|
{
|
||||||
|
T::save_async_read(self, reader, content_type).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn save_stream<S>(
|
||||||
|
&self,
|
||||||
|
stream: S,
|
||||||
|
content_type: mime::Mime,
|
||||||
|
) -> Result<Arc<str>, StoreError>
|
||||||
|
where
|
||||||
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
||||||
|
{
|
||||||
|
T::save_stream(self, stream, content_type).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn save_bytes(
|
||||||
|
&self,
|
||||||
|
bytes: Bytes,
|
||||||
|
content_type: mime::Mime,
|
||||||
|
) -> Result<Arc<str>, StoreError> {
|
||||||
|
T::save_bytes(self, bytes, content_type).await
|
||||||
|
}
|
||||||
|
|
||||||
|
fn public_url(&self, identifier: &Arc<str>) -> Option<url::Url> {
|
||||||
|
T::public_url(self, identifier)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn to_stream(
|
||||||
|
&self,
|
||||||
|
identifier: &Arc<str>,
|
||||||
|
from_start: Option<u64>,
|
||||||
|
len: Option<u64>,
|
||||||
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError> {
|
||||||
|
T::to_stream(self, identifier, from_start, len).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn read_into<Writer>(
|
||||||
|
&self,
|
||||||
|
identifier: &Arc<str>,
|
||||||
|
writer: &mut Writer,
|
||||||
|
) -> Result<(), std::io::Error>
|
||||||
|
where
|
||||||
|
Writer: AsyncWrite + Unpin,
|
||||||
|
{
|
||||||
|
T::read_into(self, identifier, writer).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError> {
|
||||||
|
T::len(self, identifier).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError> {
|
||||||
T::remove(self, identifier).await
|
T::remove(self, identifier).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -215,9 +274,6 @@ impl<'a, T> Store for &'a T
|
||||||
where
|
where
|
||||||
T: Store,
|
T: Store,
|
||||||
{
|
{
|
||||||
type Identifier = T::Identifier;
|
|
||||||
type Stream = T::Stream;
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<(), StoreError> {
|
async fn health_check(&self) -> Result<(), StoreError> {
|
||||||
T::health_check(self).await
|
T::health_check(self).await
|
||||||
}
|
}
|
||||||
|
@ -226,7 +282,7 @@ where
|
||||||
&self,
|
&self,
|
||||||
reader: Reader,
|
reader: Reader,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin + 'static,
|
Reader: AsyncRead + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -237,7 +293,7 @@ where
|
||||||
&self,
|
&self,
|
||||||
stream: S,
|
stream: S,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -248,26 +304,26 @@ where
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError> {
|
) -> Result<Arc<str>, StoreError> {
|
||||||
T::save_bytes(self, bytes, content_type).await
|
T::save_bytes(self, bytes, content_type).await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn public_url(&self, identifier: &Self::Identifier) -> Option<url::Url> {
|
fn public_url(&self, identifier: &Arc<str>) -> Option<url::Url> {
|
||||||
T::public_url(self, identifier)
|
T::public_url(self, identifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, StoreError> {
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError> {
|
||||||
T::to_stream(self, identifier, from_start, len).await
|
T::to_stream(self, identifier, from_start, len).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
writer: &mut Writer,
|
writer: &mut Writer,
|
||||||
) -> Result<(), std::io::Error>
|
) -> Result<(), std::io::Error>
|
||||||
where
|
where
|
||||||
|
@ -276,59 +332,11 @@ where
|
||||||
T::read_into(self, identifier, writer).await
|
T::read_into(self, identifier, writer).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, StoreError> {
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError> {
|
||||||
T::len(self, identifier).await
|
T::len(self, identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), StoreError> {
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError> {
|
||||||
T::remove(self, identifier).await
|
T::remove(self, identifier).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Identifier for Vec<u8> {
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Ok(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_arc(arc: Arc<[u8]>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Ok(Vec::from(&arc[..]))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, StoreError> {
|
|
||||||
Ok(self.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn string_repr(&self) -> String {
|
|
||||||
BASE64_STANDARD.encode(self.as_slice())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Identifier for Arc<[u8]> {
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Ok(Arc::from(bytes))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_arc(arc: Arc<[u8]>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Ok(arc)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, StoreError> {
|
|
||||||
Ok(Vec::from(&self[..]))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn string_repr(&self) -> String {
|
|
||||||
BASE64_STANDARD.encode(&self[..])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,23 +1,17 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
error_code::ErrorCode,
|
error_code::ErrorCode, file::File, repo::ArcRepo, store::Store, stream::LocalBoxStream,
|
||||||
file::File,
|
|
||||||
repo::{Repo, SettingsRepo},
|
|
||||||
store::Store,
|
|
||||||
};
|
};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
use std::{
|
use std::{
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
pin::Pin,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use storage_path_generator::Generator;
|
use storage_path_generator::Generator;
|
||||||
use tokio::io::{AsyncRead, AsyncWrite};
|
use tokio::io::{AsyncRead, AsyncWrite};
|
||||||
use tokio_util::io::StreamReader;
|
use tokio_util::io::StreamReader;
|
||||||
use tracing::Instrument;
|
use tracing::Instrument;
|
||||||
|
|
||||||
mod file_id;
|
|
||||||
pub(crate) use file_id::FileId;
|
|
||||||
|
|
||||||
use super::StoreError;
|
use super::StoreError;
|
||||||
|
|
||||||
// - Settings Tree
|
// - Settings Tree
|
||||||
|
@ -33,12 +27,12 @@ pub(crate) enum FileError {
|
||||||
#[error("Failed to generate path")]
|
#[error("Failed to generate path")]
|
||||||
PathGenerator(#[from] storage_path_generator::PathError),
|
PathGenerator(#[from] storage_path_generator::PathError),
|
||||||
|
|
||||||
#[error("Error formatting file store ID")]
|
#[error("Couldn't strip root dir")]
|
||||||
IdError,
|
|
||||||
|
|
||||||
#[error("Malformed file store ID")]
|
|
||||||
PrefixError,
|
PrefixError,
|
||||||
|
|
||||||
|
#[error("Couldn't convert Path to String")]
|
||||||
|
StringError,
|
||||||
|
|
||||||
#[error("Tried to save over existing file")]
|
#[error("Tried to save over existing file")]
|
||||||
FileExists,
|
FileExists,
|
||||||
}
|
}
|
||||||
|
@ -49,7 +43,7 @@ impl FileError {
|
||||||
Self::Io(_) => ErrorCode::FILE_IO_ERROR,
|
Self::Io(_) => ErrorCode::FILE_IO_ERROR,
|
||||||
Self::PathGenerator(_) => ErrorCode::PARSE_PATH_ERROR,
|
Self::PathGenerator(_) => ErrorCode::PARSE_PATH_ERROR,
|
||||||
Self::FileExists => ErrorCode::FILE_EXISTS,
|
Self::FileExists => ErrorCode::FILE_EXISTS,
|
||||||
Self::IdError | Self::PrefixError => ErrorCode::FORMAT_FILE_ID_ERROR,
|
Self::StringError | Self::PrefixError => ErrorCode::FORMAT_FILE_ID_ERROR,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -58,14 +52,12 @@ impl FileError {
|
||||||
pub(crate) struct FileStore {
|
pub(crate) struct FileStore {
|
||||||
path_gen: Generator,
|
path_gen: Generator,
|
||||||
root_dir: PathBuf,
|
root_dir: PathBuf,
|
||||||
repo: Repo,
|
repo: ArcRepo,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl Store for FileStore {
|
impl Store for FileStore {
|
||||||
type Identifier = FileId;
|
#[tracing::instrument(level = "DEBUG", skip(self))]
|
||||||
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<(), StoreError> {
|
async fn health_check(&self) -> Result<(), StoreError> {
|
||||||
tokio::fs::metadata(&self.root_dir)
|
tokio::fs::metadata(&self.root_dir)
|
||||||
.await
|
.await
|
||||||
|
@ -74,12 +66,12 @@ impl Store for FileStore {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(reader))]
|
#[tracing::instrument(skip(self, reader))]
|
||||||
async fn save_async_read<Reader>(
|
async fn save_async_read<Reader>(
|
||||||
&self,
|
&self,
|
||||||
mut reader: Reader,
|
mut reader: Reader,
|
||||||
_content_type: mime::Mime,
|
_content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin + 'static,
|
Reader: AsyncRead + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -97,7 +89,7 @@ impl Store for FileStore {
|
||||||
&self,
|
&self,
|
||||||
stream: S,
|
stream: S,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -105,12 +97,12 @@ impl Store for FileStore {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(bytes))]
|
#[tracing::instrument(skip(self, bytes))]
|
||||||
async fn save_bytes(
|
async fn save_bytes(
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
_content_type: mime::Mime,
|
_content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError> {
|
) -> Result<Arc<str>, StoreError> {
|
||||||
let path = self.next_file().await?;
|
let path = self.next_file().await?;
|
||||||
|
|
||||||
if let Err(e) = self.safe_save_bytes(&path, bytes).await {
|
if let Err(e) = self.safe_save_bytes(&path, bytes).await {
|
||||||
|
@ -121,17 +113,17 @@ impl Store for FileStore {
|
||||||
Ok(self.file_id_from_path(path)?)
|
Ok(self.file_id_from_path(path)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn public_url(&self, _identifier: &Self::Identifier) -> Option<url::Url> {
|
fn public_url(&self, _identifier: &Arc<str>) -> Option<url::Url> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, StoreError> {
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
let file_span = tracing::trace_span!(parent: None, "File Stream");
|
let file_span = tracing::trace_span!(parent: None, "File Stream");
|
||||||
|
@ -149,10 +141,10 @@ impl Store for FileStore {
|
||||||
Ok(Box::pin(stream))
|
Ok(Box::pin(stream))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(writer))]
|
#[tracing::instrument(skip(self, writer))]
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
writer: &mut Writer,
|
writer: &mut Writer,
|
||||||
) -> Result<(), std::io::Error>
|
) -> Result<(), std::io::Error>
|
||||||
where
|
where
|
||||||
|
@ -165,8 +157,8 @@ impl Store for FileStore {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, StoreError> {
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
let len = tokio::fs::metadata(path)
|
let len = tokio::fs::metadata(path)
|
||||||
|
@ -177,8 +169,8 @@ impl Store for FileStore {
|
||||||
Ok(len)
|
Ok(len)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), StoreError> {
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError> {
|
||||||
let path = self.path_from_file_id(identifier);
|
let path = self.path_from_file_id(identifier);
|
||||||
|
|
||||||
self.safe_remove_file(path).await?;
|
self.safe_remove_file(path).await?;
|
||||||
|
@ -189,7 +181,7 @@ impl Store for FileStore {
|
||||||
|
|
||||||
impl FileStore {
|
impl FileStore {
|
||||||
#[tracing::instrument(skip(repo))]
|
#[tracing::instrument(skip(repo))]
|
||||||
pub(crate) async fn build(root_dir: PathBuf, repo: Repo) -> color_eyre::Result<Self> {
|
pub(crate) async fn build(root_dir: PathBuf, repo: ArcRepo) -> color_eyre::Result<Self> {
|
||||||
let path_gen = init_generator(&repo).await?;
|
let path_gen = init_generator(&repo).await?;
|
||||||
|
|
||||||
tokio::fs::create_dir_all(&root_dir).await?;
|
tokio::fs::create_dir_all(&root_dir).await?;
|
||||||
|
@ -201,16 +193,24 @@ impl FileStore {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn file_id_from_path(&self, path: PathBuf) -> Result<Arc<str>, FileError> {
|
||||||
|
path.strip_prefix(&self.root_dir)
|
||||||
|
.map_err(|_| FileError::PrefixError)?
|
||||||
|
.to_str()
|
||||||
|
.ok_or(FileError::StringError)
|
||||||
|
.map(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn path_from_file_id(&self, file_id: &Arc<str>) -> PathBuf {
|
||||||
|
self.root_dir.join(file_id.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
async fn next_directory(&self) -> Result<PathBuf, StoreError> {
|
async fn next_directory(&self) -> Result<PathBuf, StoreError> {
|
||||||
let path = self.path_gen.next();
|
let path = self.path_gen.next();
|
||||||
|
|
||||||
match self.repo {
|
self.repo
|
||||||
Repo::Sled(ref sled_repo) => {
|
|
||||||
sled_repo
|
|
||||||
.set(GENERATOR_KEY, path.to_be_bytes().into())
|
.set(GENERATOR_KEY, path.to_be_bytes().into())
|
||||||
.await?;
|
.await?;
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut target_path = self.root_dir.clone();
|
let mut target_path = self.root_dir.clone();
|
||||||
for dir in path.to_strings() {
|
for dir in path.to_strings() {
|
||||||
|
@ -227,6 +227,7 @@ impl FileStore {
|
||||||
Ok(target_path.join(filename))
|
Ok(target_path.join(filename))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "DEBUG", skip(self, path), fields(path = ?path.as_ref()))]
|
||||||
async fn safe_remove_file<P: AsRef<Path>>(&self, path: P) -> Result<(), FileError> {
|
async fn safe_remove_file<P: AsRef<Path>>(&self, path: P) -> Result<(), FileError> {
|
||||||
tokio::fs::remove_file(&path).await?;
|
tokio::fs::remove_file(&path).await?;
|
||||||
self.try_remove_parents(path.as_ref()).await;
|
self.try_remove_parents(path.as_ref()).await;
|
||||||
|
@ -308,19 +309,14 @@ pub(crate) async fn safe_create_parent<P: AsRef<Path>>(path: P) -> Result<(), Fi
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn init_generator(repo: &Repo) -> Result<Generator, StoreError> {
|
async fn init_generator(repo: &ArcRepo) -> Result<Generator, StoreError> {
|
||||||
match repo {
|
if let Some(ivec) = repo.get(GENERATOR_KEY).await? {
|
||||||
Repo::Sled(sled_repo) => {
|
|
||||||
if let Some(ivec) = sled_repo.get(GENERATOR_KEY).await? {
|
|
||||||
Ok(Generator::from_existing(
|
Ok(Generator::from_existing(
|
||||||
storage_path_generator::Path::from_be_bytes(ivec.to_vec())
|
storage_path_generator::Path::from_be_bytes(ivec.to_vec()).map_err(FileError::from)?,
|
||||||
.map_err(FileError::from)?,
|
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
Ok(Generator::new())
|
Ok(Generator::new())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for FileStore {
|
impl std::fmt::Debug for FileStore {
|
||||||
|
|
|
@ -1,57 +0,0 @@
|
||||||
use crate::store::{
|
|
||||||
file_store::{FileError, FileStore},
|
|
||||||
Identifier, StoreError,
|
|
||||||
};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
|
||||||
pub(crate) struct FileId(PathBuf);
|
|
||||||
|
|
||||||
impl Identifier for FileId {
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, StoreError> {
|
|
||||||
let vec = self
|
|
||||||
.0
|
|
||||||
.to_str()
|
|
||||||
.ok_or(FileError::IdError)?
|
|
||||||
.as_bytes()
|
|
||||||
.to_vec();
|
|
||||||
|
|
||||||
Ok(vec)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
let string = String::from_utf8(bytes).map_err(|_| FileError::IdError)?;
|
|
||||||
|
|
||||||
let id = FileId(PathBuf::from(string));
|
|
||||||
|
|
||||||
Ok(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_arc(arc: std::sync::Arc<[u8]>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Self::from_bytes(Vec::from(&arc[..]))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn string_repr(&self) -> String {
|
|
||||||
self.0.to_string_lossy().into_owned()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileStore {
|
|
||||||
pub(super) fn file_id_from_path(&self, path: PathBuf) -> Result<FileId, FileError> {
|
|
||||||
let stripped = path
|
|
||||||
.strip_prefix(&self.root_dir)
|
|
||||||
.map_err(|_| FileError::PrefixError)?;
|
|
||||||
|
|
||||||
Ok(FileId(stripped.to_path_buf()))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn path_from_file_id(&self, file_id: &FileId) -> PathBuf {
|
|
||||||
self.root_dir.join(&file_id.0)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,9 +1,9 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
bytes_stream::BytesStream,
|
bytes_stream::BytesStream,
|
||||||
error_code::ErrorCode,
|
error_code::ErrorCode,
|
||||||
repo::{Repo, SettingsRepo},
|
repo::ArcRepo,
|
||||||
store::Store,
|
store::Store,
|
||||||
stream::{IntoStreamer, StreamMap},
|
stream::{IntoStreamer, LocalBoxStream, StreamMap},
|
||||||
};
|
};
|
||||||
use actix_rt::task::JoinError;
|
use actix_rt::task::JoinError;
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
|
@ -19,16 +19,13 @@ use futures_core::Stream;
|
||||||
use reqwest::{header::RANGE, Body, Response};
|
use reqwest::{header::RANGE, Body, Response};
|
||||||
use reqwest_middleware::{ClientWithMiddleware, RequestBuilder};
|
use reqwest_middleware::{ClientWithMiddleware, RequestBuilder};
|
||||||
use rusty_s3::{actions::S3Action, Bucket, BucketError, Credentials, UrlStyle};
|
use rusty_s3::{actions::S3Action, Bucket, BucketError, Credentials, UrlStyle};
|
||||||
use std::{pin::Pin, string::FromUtf8Error, time::Duration};
|
use std::{string::FromUtf8Error, sync::Arc, time::Duration};
|
||||||
use storage_path_generator::{Generator, Path};
|
use storage_path_generator::{Generator, Path};
|
||||||
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
|
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
|
||||||
use tokio_util::io::ReaderStream;
|
use tokio_util::io::ReaderStream;
|
||||||
use tracing::Instrument;
|
use tracing::Instrument;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
mod object_id;
|
|
||||||
pub(crate) use object_id::ObjectId;
|
|
||||||
|
|
||||||
use super::StoreError;
|
use super::StoreError;
|
||||||
|
|
||||||
const CHUNK_SIZE: usize = 8_388_608; // 8 Mebibytes, min is 5 (5_242_880);
|
const CHUNK_SIZE: usize = 8_388_608; // 8 Mebibytes, min is 5 (5_242_880);
|
||||||
|
@ -107,7 +104,7 @@ impl From<BlockingError> for ObjectError {
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub(crate) struct ObjectStore {
|
pub(crate) struct ObjectStore {
|
||||||
path_gen: Generator,
|
path_gen: Generator,
|
||||||
repo: Repo,
|
repo: ArcRepo,
|
||||||
bucket: Bucket,
|
bucket: Bucket,
|
||||||
credentials: Credentials,
|
credentials: Credentials,
|
||||||
client: ClientWithMiddleware,
|
client: ClientWithMiddleware,
|
||||||
|
@ -119,7 +116,7 @@ pub(crate) struct ObjectStore {
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub(crate) struct ObjectStoreConfig {
|
pub(crate) struct ObjectStoreConfig {
|
||||||
path_gen: Generator,
|
path_gen: Generator,
|
||||||
repo: Repo,
|
repo: ArcRepo,
|
||||||
bucket: Bucket,
|
bucket: Bucket,
|
||||||
credentials: Credentials,
|
credentials: Credentials,
|
||||||
signature_expiration: u64,
|
signature_expiration: u64,
|
||||||
|
@ -189,9 +186,6 @@ async fn status_error(response: Response) -> StoreError {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
#[async_trait::async_trait(?Send)]
|
||||||
impl Store for ObjectStore {
|
impl Store for ObjectStore {
|
||||||
type Identifier = ObjectId;
|
|
||||||
type Stream = Pin<Box<dyn Stream<Item = std::io::Result<Bytes>>>>;
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<(), StoreError> {
|
async fn health_check(&self) -> Result<(), StoreError> {
|
||||||
let response = self
|
let response = self
|
||||||
.head_bucket_request()
|
.head_bucket_request()
|
||||||
|
@ -211,7 +205,7 @@ impl Store for ObjectStore {
|
||||||
&self,
|
&self,
|
||||||
reader: Reader,
|
reader: Reader,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
Reader: AsyncRead + Unpin + 'static,
|
Reader: AsyncRead + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -224,7 +218,7 @@ impl Store for ObjectStore {
|
||||||
&self,
|
&self,
|
||||||
mut stream: S,
|
mut stream: S,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError>
|
) -> Result<Arc<str>, StoreError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
S: Stream<Item = std::io::Result<Bytes>> + Unpin + 'static,
|
||||||
{
|
{
|
||||||
|
@ -283,7 +277,7 @@ impl Store for ObjectStore {
|
||||||
|
|
||||||
let object_id2 = object_id.clone();
|
let object_id2 = object_id.clone();
|
||||||
let upload_id2 = upload_id.clone();
|
let upload_id2 = upload_id.clone();
|
||||||
let handle = actix_rt::spawn(
|
let handle = crate::sync::spawn(
|
||||||
async move {
|
async move {
|
||||||
let response = this
|
let response = this
|
||||||
.create_upload_part_request(
|
.create_upload_part_request(
|
||||||
|
@ -363,7 +357,7 @@ impl Store for ObjectStore {
|
||||||
&self,
|
&self,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<Self::Identifier, StoreError> {
|
) -> Result<Arc<str>, StoreError> {
|
||||||
let (req, object_id) = self.put_object_request(bytes.len(), content_type).await?;
|
let (req, object_id) = self.put_object_request(bytes.len(), content_type).await?;
|
||||||
|
|
||||||
let response = req.body(bytes).send().await.map_err(ObjectError::from)?;
|
let response = req.body(bytes).send().await.map_err(ObjectError::from)?;
|
||||||
|
@ -375,9 +369,9 @@ impl Store for ObjectStore {
|
||||||
Ok(object_id)
|
Ok(object_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn public_url(&self, identifier: &Self::Identifier) -> Option<url::Url> {
|
fn public_url(&self, identifier: &Arc<str>) -> Option<url::Url> {
|
||||||
self.public_endpoint.clone().map(|mut endpoint| {
|
self.public_endpoint.clone().map(|mut endpoint| {
|
||||||
endpoint.set_path(identifier.as_str());
|
endpoint.set_path(identifier.as_ref());
|
||||||
endpoint
|
endpoint
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -385,10 +379,10 @@ impl Store for ObjectStore {
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn to_stream(
|
async fn to_stream(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> Result<Self::Stream, StoreError> {
|
) -> Result<LocalBoxStream<'static, std::io::Result<Bytes>>, StoreError> {
|
||||||
let response = self
|
let response = self
|
||||||
.get_object_request(identifier, from_start, len)
|
.get_object_request(identifier, from_start, len)
|
||||||
.send()
|
.send()
|
||||||
|
@ -409,7 +403,7 @@ impl Store for ObjectStore {
|
||||||
#[tracing::instrument(skip(self, writer))]
|
#[tracing::instrument(skip(self, writer))]
|
||||||
async fn read_into<Writer>(
|
async fn read_into<Writer>(
|
||||||
&self,
|
&self,
|
||||||
identifier: &Self::Identifier,
|
identifier: &Arc<str>,
|
||||||
writer: &mut Writer,
|
writer: &mut Writer,
|
||||||
) -> Result<(), std::io::Error>
|
) -> Result<(), std::io::Error>
|
||||||
where
|
where
|
||||||
|
@ -440,7 +434,7 @@ impl Store for ObjectStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn len(&self, identifier: &Self::Identifier) -> Result<u64, StoreError> {
|
async fn len(&self, identifier: &Arc<str>) -> Result<u64, StoreError> {
|
||||||
let response = self
|
let response = self
|
||||||
.head_object_request(identifier)
|
.head_object_request(identifier)
|
||||||
.send()
|
.send()
|
||||||
|
@ -464,7 +458,7 @@ impl Store for ObjectStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
#[tracing::instrument(skip(self))]
|
||||||
async fn remove(&self, identifier: &Self::Identifier) -> Result<(), StoreError> {
|
async fn remove(&self, identifier: &Arc<str>) -> Result<(), StoreError> {
|
||||||
let response = self
|
let response = self
|
||||||
.delete_object_request(identifier)
|
.delete_object_request(identifier)
|
||||||
.send()
|
.send()
|
||||||
|
@ -493,7 +487,7 @@ impl ObjectStore {
|
||||||
signature_expiration: u64,
|
signature_expiration: u64,
|
||||||
client_timeout: u64,
|
client_timeout: u64,
|
||||||
public_endpoint: Option<Url>,
|
public_endpoint: Option<Url>,
|
||||||
repo: Repo,
|
repo: ArcRepo,
|
||||||
) -> Result<ObjectStoreConfig, StoreError> {
|
) -> Result<ObjectStoreConfig, StoreError> {
|
||||||
let path_gen = init_generator(&repo).await?;
|
let path_gen = init_generator(&repo).await?;
|
||||||
|
|
||||||
|
@ -523,7 +517,7 @@ impl ObjectStore {
|
||||||
&self,
|
&self,
|
||||||
length: usize,
|
length: usize,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<(RequestBuilder, ObjectId), StoreError> {
|
) -> Result<(RequestBuilder, Arc<str>), StoreError> {
|
||||||
let path = self.next_file().await?;
|
let path = self.next_file().await?;
|
||||||
|
|
||||||
let mut action = self.bucket.put_object(Some(&self.credentials), &path);
|
let mut action = self.bucket.put_object(Some(&self.credentials), &path);
|
||||||
|
@ -535,13 +529,13 @@ impl ObjectStore {
|
||||||
.headers_mut()
|
.headers_mut()
|
||||||
.insert("content-length", length.to_string());
|
.insert("content-length", length.to_string());
|
||||||
|
|
||||||
Ok((self.build_request(action), ObjectId::from_string(path)))
|
Ok((self.build_request(action), Arc::from(path)))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create_multipart_request(
|
async fn create_multipart_request(
|
||||||
&self,
|
&self,
|
||||||
content_type: mime::Mime,
|
content_type: mime::Mime,
|
||||||
) -> Result<(RequestBuilder, ObjectId), StoreError> {
|
) -> Result<(RequestBuilder, Arc<str>), StoreError> {
|
||||||
let path = self.next_file().await?;
|
let path = self.next_file().await?;
|
||||||
|
|
||||||
let mut action = self
|
let mut action = self
|
||||||
|
@ -552,13 +546,13 @@ impl ObjectStore {
|
||||||
.headers_mut()
|
.headers_mut()
|
||||||
.insert("content-type", content_type.as_ref());
|
.insert("content-type", content_type.as_ref());
|
||||||
|
|
||||||
Ok((self.build_request(action), ObjectId::from_string(path)))
|
Ok((self.build_request(action), Arc::from(path)))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create_upload_part_request(
|
async fn create_upload_part_request(
|
||||||
&self,
|
&self,
|
||||||
buf: BytesStream,
|
buf: BytesStream,
|
||||||
object_id: &ObjectId,
|
object_id: &Arc<str>,
|
||||||
part_number: u16,
|
part_number: u16,
|
||||||
upload_id: &str,
|
upload_id: &str,
|
||||||
) -> Result<RequestBuilder, ObjectError> {
|
) -> Result<RequestBuilder, ObjectError> {
|
||||||
|
@ -566,7 +560,7 @@ impl ObjectStore {
|
||||||
|
|
||||||
let mut action = self.bucket.upload_part(
|
let mut action = self.bucket.upload_part(
|
||||||
Some(&self.credentials),
|
Some(&self.credentials),
|
||||||
object_id.as_str(),
|
object_id.as_ref(),
|
||||||
part_number,
|
part_number,
|
||||||
upload_id,
|
upload_id,
|
||||||
);
|
);
|
||||||
|
@ -601,13 +595,13 @@ impl ObjectStore {
|
||||||
|
|
||||||
async fn send_complete_multipart_request<'a, I: Iterator<Item = &'a str>>(
|
async fn send_complete_multipart_request<'a, I: Iterator<Item = &'a str>>(
|
||||||
&'a self,
|
&'a self,
|
||||||
object_id: &'a ObjectId,
|
object_id: &'a Arc<str>,
|
||||||
upload_id: &'a str,
|
upload_id: &'a str,
|
||||||
etags: I,
|
etags: I,
|
||||||
) -> Result<Response, reqwest_middleware::Error> {
|
) -> Result<Response, reqwest_middleware::Error> {
|
||||||
let mut action = self.bucket.complete_multipart_upload(
|
let mut action = self.bucket.complete_multipart_upload(
|
||||||
Some(&self.credentials),
|
Some(&self.credentials),
|
||||||
object_id.as_str(),
|
object_id.as_ref(),
|
||||||
upload_id,
|
upload_id,
|
||||||
etags,
|
etags,
|
||||||
);
|
);
|
||||||
|
@ -628,12 +622,12 @@ impl ObjectStore {
|
||||||
|
|
||||||
fn create_abort_multipart_request(
|
fn create_abort_multipart_request(
|
||||||
&self,
|
&self,
|
||||||
object_id: &ObjectId,
|
object_id: &Arc<str>,
|
||||||
upload_id: &str,
|
upload_id: &str,
|
||||||
) -> RequestBuilder {
|
) -> RequestBuilder {
|
||||||
let action = self.bucket.abort_multipart_upload(
|
let action = self.bucket.abort_multipart_upload(
|
||||||
Some(&self.credentials),
|
Some(&self.credentials),
|
||||||
object_id.as_str(),
|
object_id.as_ref(),
|
||||||
upload_id,
|
upload_id,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -671,13 +665,13 @@ impl ObjectStore {
|
||||||
|
|
||||||
fn get_object_request(
|
fn get_object_request(
|
||||||
&self,
|
&self,
|
||||||
identifier: &ObjectId,
|
identifier: &Arc<str>,
|
||||||
from_start: Option<u64>,
|
from_start: Option<u64>,
|
||||||
len: Option<u64>,
|
len: Option<u64>,
|
||||||
) -> RequestBuilder {
|
) -> RequestBuilder {
|
||||||
let action = self
|
let action = self
|
||||||
.bucket
|
.bucket
|
||||||
.get_object(Some(&self.credentials), identifier.as_str());
|
.get_object(Some(&self.credentials), identifier.as_ref());
|
||||||
|
|
||||||
let req = self.build_request(action);
|
let req = self.build_request(action);
|
||||||
|
|
||||||
|
@ -695,18 +689,18 @@ impl ObjectStore {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn head_object_request(&self, identifier: &ObjectId) -> RequestBuilder {
|
fn head_object_request(&self, identifier: &Arc<str>) -> RequestBuilder {
|
||||||
let action = self
|
let action = self
|
||||||
.bucket
|
.bucket
|
||||||
.head_object(Some(&self.credentials), identifier.as_str());
|
.head_object(Some(&self.credentials), identifier.as_ref());
|
||||||
|
|
||||||
self.build_request(action)
|
self.build_request(action)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_object_request(&self, identifier: &ObjectId) -> RequestBuilder {
|
fn delete_object_request(&self, identifier: &Arc<str>) -> RequestBuilder {
|
||||||
let action = self
|
let action = self
|
||||||
.bucket
|
.bucket
|
||||||
.delete_object(Some(&self.credentials), identifier.as_str());
|
.delete_object(Some(&self.credentials), identifier.as_ref());
|
||||||
|
|
||||||
self.build_request(action)
|
self.build_request(action)
|
||||||
}
|
}
|
||||||
|
@ -714,13 +708,9 @@ impl ObjectStore {
|
||||||
async fn next_directory(&self) -> Result<Path, StoreError> {
|
async fn next_directory(&self) -> Result<Path, StoreError> {
|
||||||
let path = self.path_gen.next();
|
let path = self.path_gen.next();
|
||||||
|
|
||||||
match self.repo {
|
self.repo
|
||||||
Repo::Sled(ref sled_repo) => {
|
|
||||||
sled_repo
|
|
||||||
.set(GENERATOR_KEY, path.to_be_bytes().into())
|
.set(GENERATOR_KEY, path.to_be_bytes().into())
|
||||||
.await?;
|
.await?;
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(path)
|
Ok(path)
|
||||||
}
|
}
|
||||||
|
@ -733,10 +723,8 @@ impl ObjectStore {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn init_generator(repo: &Repo) -> Result<Generator, StoreError> {
|
async fn init_generator(repo: &ArcRepo) -> Result<Generator, StoreError> {
|
||||||
match repo {
|
if let Some(ivec) = repo.get(GENERATOR_KEY).await? {
|
||||||
Repo::Sled(sled_repo) => {
|
|
||||||
if let Some(ivec) = sled_repo.get(GENERATOR_KEY).await? {
|
|
||||||
Ok(Generator::from_existing(
|
Ok(Generator::from_existing(
|
||||||
storage_path_generator::Path::from_be_bytes(ivec.to_vec())
|
storage_path_generator::Path::from_be_bytes(ivec.to_vec())
|
||||||
.map_err(ObjectError::from)?,
|
.map_err(ObjectError::from)?,
|
||||||
|
@ -744,8 +732,6 @@ async fn init_generator(repo: &Repo) -> Result<Generator, StoreError> {
|
||||||
} else {
|
} else {
|
||||||
Ok(Generator::new())
|
Ok(Generator::new())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for ObjectStore {
|
impl std::fmt::Debug for ObjectStore {
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
use crate::store::{object_store::ObjectError, Identifier, StoreError};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub(crate) struct ObjectId(String);
|
|
||||||
|
|
||||||
impl Identifier for ObjectId {
|
|
||||||
fn to_bytes(&self) -> Result<Vec<u8>, StoreError> {
|
|
||||||
Ok(self.0.as_bytes().to_vec())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_bytes(bytes: Vec<u8>) -> Result<Self, StoreError> {
|
|
||||||
Ok(ObjectId(
|
|
||||||
String::from_utf8(bytes).map_err(ObjectError::from)?,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_arc(arc: std::sync::Arc<[u8]>) -> Result<Self, StoreError>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
{
|
|
||||||
Self::from_bytes(Vec::from(&arc[..]))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn string_repr(&self) -> String {
|
|
||||||
self.0.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ObjectId {
|
|
||||||
pub(super) fn from_string(string: String) -> Self {
|
|
||||||
ObjectId(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn as_str(&self) -> &str {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,5 +1,6 @@
|
||||||
use actix_rt::{task::JoinHandle, time::Sleep};
|
use actix_rt::{task::JoinHandle, time::Sleep};
|
||||||
use actix_web::web::Bytes;
|
use actix_web::web::Bytes;
|
||||||
|
use flume::r#async::RecvStream;
|
||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
use std::{
|
use std::{
|
||||||
future::Future,
|
future::Future,
|
||||||
|
@ -174,19 +175,25 @@ pin_project_lite::pin_project! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
enum IterStreamState<I, T> {
|
enum IterStreamState<I, T>
|
||||||
|
where
|
||||||
|
T: 'static,
|
||||||
|
{
|
||||||
New {
|
New {
|
||||||
iterator: I,
|
iterator: I,
|
||||||
buffer: usize,
|
buffer: usize,
|
||||||
},
|
},
|
||||||
Running {
|
Running {
|
||||||
handle: JoinHandle<()>,
|
handle: JoinHandle<()>,
|
||||||
receiver: tokio::sync::mpsc::Receiver<T>,
|
receiver: RecvStream<'static, T>,
|
||||||
},
|
},
|
||||||
Pending,
|
Pending,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct IterStream<I, T> {
|
pub(crate) struct IterStream<I, T>
|
||||||
|
where
|
||||||
|
T: 'static,
|
||||||
|
{
|
||||||
state: IterStreamState<I, T>,
|
state: IterStreamState<I, T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -287,14 +294,13 @@ where
|
||||||
|
|
||||||
match std::mem::replace(&mut this.state, IterStreamState::Pending) {
|
match std::mem::replace(&mut this.state, IterStreamState::Pending) {
|
||||||
IterStreamState::New { iterator, buffer } => {
|
IterStreamState::New { iterator, buffer } => {
|
||||||
let (sender, receiver) = tracing::trace_span!(parent: None, "Create channel")
|
let (sender, receiver) = crate::sync::channel(buffer);
|
||||||
.in_scope(|| tokio::sync::mpsc::channel(buffer));
|
|
||||||
|
|
||||||
let mut handle = actix_rt::task::spawn_blocking(move || {
|
let mut handle = crate::sync::spawn_blocking(move || {
|
||||||
let iterator = iterator.into_iter();
|
let iterator = iterator.into_iter();
|
||||||
|
|
||||||
for item in iterator {
|
for item in iterator {
|
||||||
if sender.blocking_send(item).is_err() {
|
if sender.send(item).is_err() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -304,14 +310,17 @@ where
|
||||||
return Poll::Ready(None);
|
return Poll::Ready(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.state = IterStreamState::Running { handle, receiver };
|
this.state = IterStreamState::Running {
|
||||||
|
handle,
|
||||||
|
receiver: receiver.into_stream(),
|
||||||
|
};
|
||||||
|
|
||||||
self.poll_next(cx)
|
self.poll_next(cx)
|
||||||
}
|
}
|
||||||
IterStreamState::Running {
|
IterStreamState::Running {
|
||||||
mut handle,
|
mut handle,
|
||||||
mut receiver,
|
mut receiver,
|
||||||
} => match Pin::new(&mut receiver).poll_recv(cx) {
|
} => match Pin::new(&mut receiver).poll_next(cx) {
|
||||||
Poll::Ready(Some(item)) => {
|
Poll::Ready(Some(item)) => {
|
||||||
this.state = IterStreamState::Running { handle, receiver };
|
this.state = IterStreamState::Running { handle, receiver };
|
||||||
|
|
||||||
|
|
38
src/sync.rs
Normal file
38
src/sync.rs
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use tokio::sync::{Notify, Semaphore};
|
||||||
|
|
||||||
|
pub(crate) fn channel<T>(bound: usize) -> (flume::Sender<T>, flume::Receiver<T>) {
|
||||||
|
tracing::trace_span!(parent: None, "make channel").in_scope(|| flume::bounded(bound))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn notify() -> Arc<Notify> {
|
||||||
|
Arc::new(bare_notify())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn bare_notify() -> Notify {
|
||||||
|
tracing::trace_span!(parent: None, "make notifier").in_scope(Notify::new)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn bare_semaphore(permits: usize) -> Semaphore {
|
||||||
|
tracing::trace_span!(parent: None, "make semaphore").in_scope(|| Semaphore::new(permits))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn spawn<F>(future: F) -> actix_rt::task::JoinHandle<F::Output>
|
||||||
|
where
|
||||||
|
F: std::future::Future + 'static,
|
||||||
|
F::Output: 'static,
|
||||||
|
{
|
||||||
|
tracing::trace_span!(parent: None, "spawn task").in_scope(|| actix_rt::spawn(future))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn spawn_blocking<F, Out>(function: F) -> actix_rt::task::JoinHandle<Out>
|
||||||
|
where
|
||||||
|
F: FnOnce() -> Out + Send + 'static,
|
||||||
|
Out: Send + 'static,
|
||||||
|
{
|
||||||
|
let outer_span = tracing::Span::current();
|
||||||
|
|
||||||
|
tracing::trace_span!(parent: None, "spawn blocking task")
|
||||||
|
.in_scope(|| actix_rt::task::spawn_blocking(move || outer_span.in_scope(function)))
|
||||||
|
}
|
|
@ -13,8 +13,7 @@ struct TmpFile(PathBuf);
|
||||||
|
|
||||||
impl Drop for TmpFile {
|
impl Drop for TmpFile {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
tracing::trace_span!(parent: None, "Spawn task")
|
crate::sync::spawn(tokio::fs::remove_file(self.0.clone()));
|
||||||
.in_scope(|| actix_rt::spawn(tokio::fs::remove_file(self.0.clone())));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue