From 2b6ea97773542710423becab8c9af2ea7606de36 Mon Sep 17 00:00:00 2001 From: asonix Date: Mon, 28 Aug 2023 18:43:24 -0500 Subject: [PATCH] Enable setting timestamp on hash creation, improve paging api --- src/details.rs | 7 +++++++ src/lib.rs | 6 +++++- src/repo.rs | 17 ++++++++++++++--- src/repo/migrate.rs | 4 +++- src/repo/sled.rs | 21 ++++++++++++++------- 5 files changed, 43 insertions(+), 12 deletions(-) diff --git a/src/details.rs b/src/details.rs index 37942a0..8255aee 100644 --- a/src/details.rs +++ b/src/details.rs @@ -30,6 +30,13 @@ impl Details { self.content_type.type_() == "video" } + pub(crate) fn created_at(&self) -> time::OffsetDateTime { + match self.created_at { + MaybeHumanDate::OldDate(timestamp) => timestamp, + MaybeHumanDate::HumanDate(timestamp) => timestamp, + } + } + pub(crate) async fn from_bytes(timeout: u64, input: web::Bytes) -> Result { let DiscoveryLite { format, diff --git a/src/lib.rs b/src/lib.rs index caee89c..4d7beec 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -585,6 +585,9 @@ struct PageQuery { struct PageJson { limit: usize, + #[serde(skip_serializing_if = "Option::is_none")] + current: Option, + #[serde(skip_serializing_if = "Option::is_none")] prev: Option, @@ -609,7 +612,7 @@ async fn page( ) -> Result { let limit = limit.unwrap_or(20); - let page = repo.hash_page(slug, limit).await?; + let page = repo.hash_page(slug.clone(), limit).await?; let mut hashes = Vec::with_capacity(page.hashes.len()); @@ -638,6 +641,7 @@ async fn page( let page = PageJson { limit: page.limit, + current: slug, prev: page.prev(), next: page.next(), hashes, diff --git a/src/repo.rs b/src/repo.rs index a96380c..6cde3b5 100644 --- a/src/repo.rs +++ b/src/repo.rs @@ -500,7 +500,7 @@ where } } -#[derive(Clone)] +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] pub(crate) struct OrderedHash { timestamp: time::OffsetDateTime, hash: Hash, @@ -561,6 +561,16 @@ pub(crate) trait HashRepo: BaseRepo { &self, hash: Hash, identifier: &dyn Identifier, + ) -> Result, StoreError> { + self.create_hash_with_timestamp(hash, identifier, time::OffsetDateTime::now_utc()) + .await + } + + async fn create_hash_with_timestamp( + &self, + hash: Hash, + identifier: &dyn Identifier, + timestamp: time::OffsetDateTime, ) -> Result, StoreError>; async fn update_identifier( @@ -616,12 +626,13 @@ where T::hashes_ordered(self, bound, limit).await } - async fn create_hash( + async fn create_hash_with_timestamp( &self, hash: Hash, identifier: &dyn Identifier, + timestamp: time::OffsetDateTime, ) -> Result, StoreError> { - T::create_hash(self, hash, identifier).await + T::create_hash_with_timestamp(self, hash, identifier, timestamp).await } async fn update_identifier( diff --git a/src/repo/migrate.rs b/src/repo/migrate.rs index 0931ebd..408cc31 100644 --- a/src/repo/migrate.rs +++ b/src/repo/migrate.rs @@ -285,7 +285,9 @@ async fn do_migrate_hash_04( let hash = Hash::new(hash, size, hash_details.internal_format()); - let _ = new_repo.create_hash(hash.clone(), &identifier).await?; + let _ = new_repo + .create_hash_with_timestamp(hash.clone(), &identifier, hash_details.created_at()) + .await?; for alias in aliases { let delete_token = old_repo diff --git a/src/repo/sled.rs b/src/repo/sled.rs index fa30792..0c4ee66 100644 --- a/src/repo/sled.rs +++ b/src/repo/sled.rs @@ -1055,7 +1055,7 @@ impl HashRepo for SledRepo { Some(ordered_hash) => { let hash_bytes = serialize_ordered_hash(ordered_hash); ( - self.hashes_inverse.range(..hash_bytes.clone()), + self.hashes_inverse.range(..=hash_bytes.clone()), Some(self.hashes_inverse.range(hash_bytes..)), ) } @@ -1067,21 +1067,27 @@ impl HashRepo for SledRepo { .keys() .rev() .filter_map(|res| res.map(parse_ordered_hash).transpose()) - .take(limit); + .take(limit + 1); let prev = prev_iter .and_then(|prev_iter| { prev_iter .keys() .filter_map(|res| res.map(parse_ordered_hash).transpose()) - .take(limit) + .take(limit + 1) .last() }) .transpose()?; - let hashes = page_iter.collect::, _>>()?; + let mut hashes = page_iter.collect::, _>>()?; - let next = hashes.last().cloned(); + let next = if hashes.len() > limit { + hashes.pop() + } else { + None + }; + + let prev = if prev == bound { None } else { prev }; Ok(HashPage { limit, @@ -1099,10 +1105,11 @@ impl HashRepo for SledRepo { } #[tracing::instrument(level = "trace", skip(self))] - async fn create_hash( + async fn create_hash_with_timestamp( &self, hash: Hash, identifier: &dyn Identifier, + timestamp: time::OffsetDateTime, ) -> Result, StoreError> { let identifier: sled::IVec = identifier.to_bytes()?.into(); @@ -1111,7 +1118,7 @@ impl HashRepo for SledRepo { let hash_identifiers = self.hash_identifiers.clone(); let created_key = serialize_ordered_hash(&OrderedHash { - timestamp: time::OffsetDateTime::now_utc(), + timestamp, hash: hash.clone(), });