mirror of
https://git.asonix.dog/asonix/pict-rs
synced 2024-12-22 19:31:35 +00:00
Enable setting timestamp on hash creation, improve paging api
This commit is contained in:
parent
24812afeba
commit
2b6ea97773
5 changed files with 43 additions and 12 deletions
|
@ -30,6 +30,13 @@ impl Details {
|
||||||
self.content_type.type_() == "video"
|
self.content_type.type_() == "video"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn created_at(&self) -> time::OffsetDateTime {
|
||||||
|
match self.created_at {
|
||||||
|
MaybeHumanDate::OldDate(timestamp) => timestamp,
|
||||||
|
MaybeHumanDate::HumanDate(timestamp) => timestamp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) async fn from_bytes(timeout: u64, input: web::Bytes) -> Result<Self, Error> {
|
pub(crate) async fn from_bytes(timeout: u64, input: web::Bytes) -> Result<Self, Error> {
|
||||||
let DiscoveryLite {
|
let DiscoveryLite {
|
||||||
format,
|
format,
|
||||||
|
|
|
@ -585,6 +585,9 @@ struct PageQuery {
|
||||||
struct PageJson {
|
struct PageJson {
|
||||||
limit: usize,
|
limit: usize,
|
||||||
|
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
current: Option<String>,
|
||||||
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
prev: Option<String>,
|
prev: Option<String>,
|
||||||
|
|
||||||
|
@ -609,7 +612,7 @@ async fn page(
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let limit = limit.unwrap_or(20);
|
let limit = limit.unwrap_or(20);
|
||||||
|
|
||||||
let page = repo.hash_page(slug, limit).await?;
|
let page = repo.hash_page(slug.clone(), limit).await?;
|
||||||
|
|
||||||
let mut hashes = Vec::with_capacity(page.hashes.len());
|
let mut hashes = Vec::with_capacity(page.hashes.len());
|
||||||
|
|
||||||
|
@ -638,6 +641,7 @@ async fn page(
|
||||||
|
|
||||||
let page = PageJson {
|
let page = PageJson {
|
||||||
limit: page.limit,
|
limit: page.limit,
|
||||||
|
current: slug,
|
||||||
prev: page.prev(),
|
prev: page.prev(),
|
||||||
next: page.next(),
|
next: page.next(),
|
||||||
hashes,
|
hashes,
|
||||||
|
|
17
src/repo.rs
17
src/repo.rs
|
@ -500,7 +500,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub(crate) struct OrderedHash {
|
pub(crate) struct OrderedHash {
|
||||||
timestamp: time::OffsetDateTime,
|
timestamp: time::OffsetDateTime,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
|
@ -561,6 +561,16 @@ pub(crate) trait HashRepo: BaseRepo {
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &dyn Identifier,
|
||||||
|
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
||||||
|
self.create_hash_with_timestamp(hash, identifier, time::OffsetDateTime::now_utc())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_hash_with_timestamp(
|
||||||
|
&self,
|
||||||
|
hash: Hash,
|
||||||
|
identifier: &dyn Identifier,
|
||||||
|
timestamp: time::OffsetDateTime,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError>;
|
) -> Result<Result<(), HashAlreadyExists>, StoreError>;
|
||||||
|
|
||||||
async fn update_identifier(
|
async fn update_identifier(
|
||||||
|
@ -616,12 +626,13 @@ where
|
||||||
T::hashes_ordered(self, bound, limit).await
|
T::hashes_ordered(self, bound, limit).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create_hash(
|
async fn create_hash_with_timestamp(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &dyn Identifier,
|
||||||
|
timestamp: time::OffsetDateTime,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
||||||
T::create_hash(self, hash, identifier).await
|
T::create_hash_with_timestamp(self, hash, identifier, timestamp).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_identifier(
|
async fn update_identifier(
|
||||||
|
|
|
@ -285,7 +285,9 @@ async fn do_migrate_hash_04<S: Store>(
|
||||||
|
|
||||||
let hash = Hash::new(hash, size, hash_details.internal_format());
|
let hash = Hash::new(hash, size, hash_details.internal_format());
|
||||||
|
|
||||||
let _ = new_repo.create_hash(hash.clone(), &identifier).await?;
|
let _ = new_repo
|
||||||
|
.create_hash_with_timestamp(hash.clone(), &identifier, hash_details.created_at())
|
||||||
|
.await?;
|
||||||
|
|
||||||
for alias in aliases {
|
for alias in aliases {
|
||||||
let delete_token = old_repo
|
let delete_token = old_repo
|
||||||
|
|
|
@ -1055,7 +1055,7 @@ impl HashRepo for SledRepo {
|
||||||
Some(ordered_hash) => {
|
Some(ordered_hash) => {
|
||||||
let hash_bytes = serialize_ordered_hash(ordered_hash);
|
let hash_bytes = serialize_ordered_hash(ordered_hash);
|
||||||
(
|
(
|
||||||
self.hashes_inverse.range(..hash_bytes.clone()),
|
self.hashes_inverse.range(..=hash_bytes.clone()),
|
||||||
Some(self.hashes_inverse.range(hash_bytes..)),
|
Some(self.hashes_inverse.range(hash_bytes..)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -1067,21 +1067,27 @@ impl HashRepo for SledRepo {
|
||||||
.keys()
|
.keys()
|
||||||
.rev()
|
.rev()
|
||||||
.filter_map(|res| res.map(parse_ordered_hash).transpose())
|
.filter_map(|res| res.map(parse_ordered_hash).transpose())
|
||||||
.take(limit);
|
.take(limit + 1);
|
||||||
|
|
||||||
let prev = prev_iter
|
let prev = prev_iter
|
||||||
.and_then(|prev_iter| {
|
.and_then(|prev_iter| {
|
||||||
prev_iter
|
prev_iter
|
||||||
.keys()
|
.keys()
|
||||||
.filter_map(|res| res.map(parse_ordered_hash).transpose())
|
.filter_map(|res| res.map(parse_ordered_hash).transpose())
|
||||||
.take(limit)
|
.take(limit + 1)
|
||||||
.last()
|
.last()
|
||||||
})
|
})
|
||||||
.transpose()?;
|
.transpose()?;
|
||||||
|
|
||||||
let hashes = page_iter.collect::<Result<Vec<_>, _>>()?;
|
let mut hashes = page_iter.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
let next = hashes.last().cloned();
|
let next = if hashes.len() > limit {
|
||||||
|
hashes.pop()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let prev = if prev == bound { None } else { prev };
|
||||||
|
|
||||||
Ok(HashPage {
|
Ok(HashPage {
|
||||||
limit,
|
limit,
|
||||||
|
@ -1099,10 +1105,11 @@ impl HashRepo for SledRepo {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn create_hash(
|
async fn create_hash_with_timestamp(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
identifier: &dyn Identifier,
|
identifier: &dyn Identifier,
|
||||||
|
timestamp: time::OffsetDateTime,
|
||||||
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
|
||||||
let identifier: sled::IVec = identifier.to_bytes()?.into();
|
let identifier: sled::IVec = identifier.to_bytes()?.into();
|
||||||
|
|
||||||
|
@ -1111,7 +1118,7 @@ impl HashRepo for SledRepo {
|
||||||
let hash_identifiers = self.hash_identifiers.clone();
|
let hash_identifiers = self.hash_identifiers.clone();
|
||||||
|
|
||||||
let created_key = serialize_ordered_hash(&OrderedHash {
|
let created_key = serialize_ordered_hash(&OrderedHash {
|
||||||
timestamp: time::OffsetDateTime::now_utc(),
|
timestamp,
|
||||||
hash: hash.clone(),
|
hash: hash.clone(),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue