2
0
Fork 0
mirror of https://git.asonix.dog/asonix/pict-rs synced 2024-11-10 06:25:00 +00:00

Shorten pagination slugs

This commit is contained in:
asonix 2023-08-29 12:53:29 -05:00
parent 1271ff2cc7
commit 64950bfe0e
5 changed files with 71 additions and 53 deletions

View file

@ -79,35 +79,35 @@ impl InternalFormat {
} }
} }
pub(crate) const fn to_bytes(self) -> &'static [u8] { pub(crate) const fn to_byte(self) -> u8 {
match self { match self {
Self::Animation(AnimationFormat::Apng) => b"a-apng", Self::Animation(AnimationFormat::Apng) => 0,
Self::Animation(AnimationFormat::Avif) => b"a-avif", Self::Animation(AnimationFormat::Avif) => 1,
Self::Animation(AnimationFormat::Gif) => b"a-gif", Self::Animation(AnimationFormat::Gif) => 2,
Self::Animation(AnimationFormat::Webp) => b"a-webp", Self::Animation(AnimationFormat::Webp) => 3,
Self::Image(ImageFormat::Avif) => b"i-avif", Self::Image(ImageFormat::Avif) => 4,
Self::Image(ImageFormat::Jpeg) => b"i-jpeg", Self::Image(ImageFormat::Jpeg) => 5,
Self::Image(ImageFormat::Jxl) => b"i-jxl", Self::Image(ImageFormat::Jxl) => 6,
Self::Image(ImageFormat::Png) => b"i-png", Self::Image(ImageFormat::Png) => 7,
Self::Image(ImageFormat::Webp) => b"i-webp", Self::Image(ImageFormat::Webp) => 8,
Self::Video(InternalVideoFormat::Mp4) => b"v-mp4", Self::Video(InternalVideoFormat::Mp4) => 9,
Self::Video(InternalVideoFormat::Webm) => b"v-webm", Self::Video(InternalVideoFormat::Webm) => 10,
} }
} }
pub(crate) const fn from_bytes(bytes: &[u8]) -> Option<Self> { pub(crate) const fn from_byte(byte: u8) -> Option<Self> {
match bytes { match byte {
b"a-apng" => Some(Self::Animation(AnimationFormat::Apng)), 0 => Some(Self::Animation(AnimationFormat::Apng)),
b"a-avif" => Some(Self::Animation(AnimationFormat::Avif)), 1 => Some(Self::Animation(AnimationFormat::Avif)),
b"a-gif" => Some(Self::Animation(AnimationFormat::Gif)), 2 => Some(Self::Animation(AnimationFormat::Gif)),
b"a-webp" => Some(Self::Animation(AnimationFormat::Webp)), 3 => Some(Self::Animation(AnimationFormat::Webp)),
b"i-avif" => Some(Self::Image(ImageFormat::Avif)), 4 => Some(Self::Image(ImageFormat::Avif)),
b"i-jpeg" => Some(Self::Image(ImageFormat::Jpeg)), 5 => Some(Self::Image(ImageFormat::Jpeg)),
b"i-jxl" => Some(Self::Image(ImageFormat::Jxl)), 6 => Some(Self::Image(ImageFormat::Jxl)),
b"i-png" => Some(Self::Image(ImageFormat::Png)), 7 => Some(Self::Image(ImageFormat::Png)),
b"i-webp" => Some(Self::Image(ImageFormat::Webp)), 8 => Some(Self::Image(ImageFormat::Webp)),
b"v-mp4" => Some(Self::Video(InternalVideoFormat::Mp4)), 9 => Some(Self::Video(InternalVideoFormat::Mp4)),
b"v-webm" => Some(Self::Video(InternalVideoFormat::Webm)), 10 => Some(Self::Video(InternalVideoFormat::Webm)),
_ => None, _ => None,
} }
} }

View file

@ -612,7 +612,7 @@ async fn page(
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
let limit = limit.unwrap_or(20); let limit = limit.unwrap_or(20);
let page = repo.hash_page(slug.clone(), limit).await?; let page = repo.hash_page(slug, limit).await?;
let mut hashes = Vec::with_capacity(page.hashes.len()); let mut hashes = Vec::with_capacity(page.hashes.len());
@ -641,7 +641,7 @@ async fn page(
let page = PageJson { let page = PageJson {
limit: page.limit, limit: page.limit,
current: slug, current: page.current(),
prev: page.prev(), prev: page.prev(),
next: page.next(), next: page.next(),
hashes, hashes,

View file

@ -508,34 +508,33 @@ pub(crate) struct OrderedHash {
pub(crate) struct HashPage { pub(crate) struct HashPage {
pub(crate) limit: usize, pub(crate) limit: usize,
prev: Option<OrderedHash>, prev: Option<Hash>,
next: Option<OrderedHash>, next: Option<Hash>,
pub(crate) hashes: Vec<Hash>, pub(crate) hashes: Vec<Hash>,
} }
fn ordered_hash_to_string(OrderedHash { timestamp, hash }: &OrderedHash) -> String { fn hash_to_slug(hash: &Hash) -> String {
let mut bytes: Vec<u8> = timestamp.unix_timestamp_nanos().to_be_bytes().into(); base64::prelude::BASE64_URL_SAFE.encode(hash.to_bytes())
bytes.extend(hash.to_bytes());
base64::prelude::BASE64_URL_SAFE.encode(bytes)
} }
fn ordered_hash_from_string(s: &str) -> Option<OrderedHash> { fn hash_from_slug(s: &str) -> Option<Hash> {
let bytes = base64::prelude::BASE64_URL_SAFE.decode(s).ok()?; let bytes = base64::prelude::BASE64_URL_SAFE.decode(s).ok()?;
let timestamp: [u8; 16] = bytes[0..16].try_into().ok()?; let hash = Hash::from_bytes(&bytes)?;
let timestamp = i128::from_be_bytes(timestamp);
let timestamp = time::OffsetDateTime::from_unix_timestamp_nanos(timestamp).ok()?;
let hash = Hash::from_bytes(&bytes[16..])?;
Some(OrderedHash { timestamp, hash }) Some(hash)
} }
impl HashPage { impl HashPage {
pub(crate) fn current(&self) -> Option<String> {
self.hashes.first().map(hash_to_slug)
}
pub(crate) fn next(&self) -> Option<String> { pub(crate) fn next(&self) -> Option<String> {
self.next.as_ref().map(ordered_hash_to_string) self.next.as_ref().map(hash_to_slug)
} }
pub(crate) fn prev(&self) -> Option<String> { pub(crate) fn prev(&self) -> Option<String> {
self.prev.as_ref().map(ordered_hash_to_string) self.prev.as_ref().map(hash_to_slug)
} }
} }
@ -546,11 +545,19 @@ pub(crate) trait HashRepo: BaseRepo {
async fn hashes(&self) -> LocalBoxStream<'static, Result<Hash, RepoError>>; async fn hashes(&self) -> LocalBoxStream<'static, Result<Hash, RepoError>>;
async fn hash_page(&self, slug: Option<String>, limit: usize) -> Result<HashPage, RepoError> { async fn hash_page(&self, slug: Option<String>, limit: usize) -> Result<HashPage, RepoError> {
let bound = slug.as_deref().and_then(ordered_hash_from_string); let hash = slug.as_deref().and_then(hash_from_slug);
let bound = if let Some(hash) = hash {
self.bound(hash).await?
} else {
None
};
self.hashes_ordered(bound, limit).await self.hashes_ordered(bound, limit).await
} }
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError>;
async fn hashes_ordered( async fn hashes_ordered(
&self, &self,
bound: Option<OrderedHash>, bound: Option<OrderedHash>,
@ -618,6 +625,10 @@ where
T::hashes(self).await T::hashes(self).await
} }
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
T::bound(self, hash).await
}
async fn hashes_ordered( async fn hashes_ordered(
&self, &self,
bound: Option<OrderedHash>, bound: Option<OrderedHash>,

View file

@ -31,13 +31,13 @@ impl Hash {
} }
pub(super) fn to_bytes(&self) -> Vec<u8> { pub(super) fn to_bytes(&self) -> Vec<u8> {
let format = self.format.to_bytes(); let format_byte = self.format.to_byte();
let mut vec = Vec::with_capacity(32 + 8 + format.len()); let mut vec = Vec::with_capacity(32 + 6 + 1);
vec.extend_from_slice(&self.hash[..]); vec.extend_from_slice(&self.hash[..]);
vec.extend(self.size.to_be_bytes()); vec.extend_from_slice(&self.size.to_be_bytes()[2..]);
vec.extend(format); vec.push(format_byte);
vec vec
} }
@ -51,17 +51,18 @@ impl Hash {
} }
pub(super) fn from_bytes(bytes: &[u8]) -> Option<Self> { pub(super) fn from_bytes(bytes: &[u8]) -> Option<Self> {
if bytes.len() < 32 + 8 + 5 { if bytes.len() != 32 + 6 + 1 {
return None; return None;
} }
let hash = &bytes[..32]; let hash = &bytes[..32];
let size = &bytes[32..40]; let size_bytes = &bytes[32..38];
let format = &bytes[40..]; let format_byte = bytes[38];
let hash: [u8; 32] = hash.try_into().expect("Correct length"); let hash: [u8; 32] = hash.try_into().expect("Correct length");
let size: [u8; 8] = size.try_into().expect("Correct length"); let mut size = [0u8; 8];
let format = InternalFormat::from_bytes(format)?; size[2..].copy_from_slice(size_bytes);
let format = InternalFormat::from_byte(format_byte)?;
Some(Self { Some(Self {
hash: Arc::new(hash), hash: Arc::new(hash),

View file

@ -1046,6 +1046,12 @@ impl HashRepo for SledRepo {
Box::pin(from_iterator(iter, 8)) Box::pin(from_iterator(iter, 8))
} }
async fn bound(&self, hash: Hash) -> Result<Option<OrderedHash>, RepoError> {
let opt = b!(self.hashes, hashes.get(hash.to_ivec()));
Ok(opt.and_then(parse_ordered_hash))
}
async fn hashes_ordered( async fn hashes_ordered(
&self, &self,
bound: Option<OrderedHash>, bound: Option<OrderedHash>,
@ -1091,8 +1097,8 @@ impl HashRepo for SledRepo {
Ok(HashPage { Ok(HashPage {
limit, limit,
prev, prev: prev.map(|OrderedHash { hash, .. }| hash),
next, next: next.map(|OrderedHash { hash, .. }| hash),
hashes: hashes hashes: hashes
.into_iter() .into_iter()
.map(|OrderedHash { hash, .. }| hash) .map(|OrderedHash { hash, .. }| hash)