mirror of
https://git.asonix.dog/asonix/pict-rs
synced 2024-12-22 11:21:24 +00:00
Make postgres work
This commit is contained in:
parent
612e4017d5
commit
d9d5ac5388
5 changed files with 128 additions and 55 deletions
|
@ -69,7 +69,7 @@ pub(crate) async fn generate<S: Store + 'static>(
|
||||||
let variant = thumbnail_path.to_string_lossy().to_string();
|
let variant = thumbnail_path.to_string_lossy().to_string();
|
||||||
|
|
||||||
let mut attempts = 0;
|
let mut attempts = 0;
|
||||||
let (details, identifier) = loop {
|
let tup = loop {
|
||||||
if attempts > 4 {
|
if attempts > 4 {
|
||||||
todo!("return error");
|
todo!("return error");
|
||||||
}
|
}
|
||||||
|
@ -91,31 +91,44 @@ pub(crate) async fn generate<S: Store + 'static>(
|
||||||
)
|
)
|
||||||
.with_poll_timer("process-future");
|
.with_poll_timer("process-future");
|
||||||
|
|
||||||
let tuple = heartbeat(state, hash.clone(), variant.clone(), process_future)
|
let res = heartbeat(state, hash.clone(), variant.clone(), process_future)
|
||||||
.with_poll_timer("heartbeat-future")
|
.with_poll_timer("heartbeat-future")
|
||||||
.await??;
|
.await;
|
||||||
|
|
||||||
break tuple;
|
match res {
|
||||||
|
Ok(Ok(tuple)) => break tuple,
|
||||||
|
Ok(Err(e)) | Err(e) => {
|
||||||
|
state
|
||||||
|
.repo
|
||||||
|
.fail_variant(hash.clone(), variant.clone())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Err(_) => match state
|
Err(_) => {
|
||||||
.repo
|
match state
|
||||||
.await_variant(hash.clone(), variant.clone())
|
.repo
|
||||||
.await?
|
.await_variant(hash.clone(), variant.clone())
|
||||||
{
|
.await?
|
||||||
Some(identifier) => {
|
{
|
||||||
let details = crate::ensure_details_identifier(state, &identifier).await?;
|
Some(identifier) => {
|
||||||
|
let details =
|
||||||
|
crate::ensure_details_identifier(state, &identifier).await?;
|
||||||
|
|
||||||
break (details, identifier);
|
break (details, identifier);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
attempts += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
None => {
|
}
|
||||||
attempts += 1;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok((details, identifier))
|
Ok(tup)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -121,6 +121,7 @@ async fn ensure_details<S: Store + 'static>(
|
||||||
ensure_details_identifier(state, &identifier).await
|
ensure_details_identifier(state, &identifier).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(state))]
|
||||||
async fn ensure_details_identifier<S: Store + 'static>(
|
async fn ensure_details_identifier<S: Store + 'static>(
|
||||||
state: &State<S>,
|
state: &State<S>,
|
||||||
identifier: &Arc<str>,
|
identifier: &Arc<str>,
|
||||||
|
|
|
@ -748,6 +748,8 @@ pub(crate) trait VariantRepo: BaseRepo {
|
||||||
|
|
||||||
async fn variant_heartbeat(&self, hash: Hash, variant: String) -> Result<(), RepoError>;
|
async fn variant_heartbeat(&self, hash: Hash, variant: String) -> Result<(), RepoError>;
|
||||||
|
|
||||||
|
async fn fail_variant(&self, hash: Hash, variant: String) -> Result<(), RepoError>;
|
||||||
|
|
||||||
async fn await_variant(
|
async fn await_variant(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
|
@ -789,6 +791,10 @@ where
|
||||||
T::variant_heartbeat(self, hash, variant).await
|
T::variant_heartbeat(self, hash, variant).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn fail_variant(&self, hash: Hash, variant: String) -> Result<(), RepoError> {
|
||||||
|
T::fail_variant(self, hash, variant).await
|
||||||
|
}
|
||||||
|
|
||||||
async fn await_variant(
|
async fn await_variant(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
|
|
|
@ -21,6 +21,7 @@ use diesel_async::{
|
||||||
bb8::{Pool, PooledConnection, RunError},
|
bb8::{Pool, PooledConnection, RunError},
|
||||||
AsyncDieselConnectionManager, ManagerConfig, PoolError,
|
AsyncDieselConnectionManager, ManagerConfig, PoolError,
|
||||||
},
|
},
|
||||||
|
scoped_futures::ScopedFutureExt,
|
||||||
AsyncConnection, AsyncPgConnection, RunQueryDsl,
|
AsyncConnection, AsyncPgConnection, RunQueryDsl,
|
||||||
};
|
};
|
||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
|
@ -133,7 +134,7 @@ pub(crate) enum PostgresError {
|
||||||
Pool(#[source] RunError),
|
Pool(#[source] RunError),
|
||||||
|
|
||||||
#[error("Error in database")]
|
#[error("Error in database")]
|
||||||
Diesel(#[source] diesel::result::Error),
|
Diesel(#[from] diesel::result::Error),
|
||||||
|
|
||||||
#[error("Error deserializing hex value")]
|
#[error("Error deserializing hex value")]
|
||||||
Hex(#[source] hex::FromHexError),
|
Hex(#[source] hex::FromHexError),
|
||||||
|
@ -404,6 +405,16 @@ impl PostgresRepo {
|
||||||
|
|
||||||
let mut conn = self.get_connection().await?;
|
let mut conn = self.get_connection().await?;
|
||||||
|
|
||||||
|
let timestamp = to_primitive(time::OffsetDateTime::now_utc());
|
||||||
|
|
||||||
|
diesel::delete(keyed_notifications)
|
||||||
|
.filter(heartbeat.le(timestamp.saturating_sub(time::Duration::minutes(2))))
|
||||||
|
.execute(&mut conn)
|
||||||
|
.with_timeout(Duration::from_secs(5))
|
||||||
|
.await
|
||||||
|
.map_err(|_| PostgresError::DbTimeout)?
|
||||||
|
.map_err(PostgresError::Diesel)?;
|
||||||
|
|
||||||
let res = diesel::insert_into(keyed_notifications)
|
let res = diesel::insert_into(keyed_notifications)
|
||||||
.values(key.eq(input_key))
|
.values(key.eq(input_key))
|
||||||
.execute(&mut conn)
|
.execute(&mut conn)
|
||||||
|
@ -468,7 +479,7 @@ impl PostgresRepo {
|
||||||
async fn register_interest(&self) -> Result<(), PostgresError> {
|
async fn register_interest(&self) -> Result<(), PostgresError> {
|
||||||
let mut notifier_conn = self.get_notifier_connection().await?;
|
let mut notifier_conn = self.get_notifier_connection().await?;
|
||||||
|
|
||||||
diesel::sql_query("LISTEN upload_completion_channel;")
|
diesel::sql_query("LISTEN keyed_notification_channel;")
|
||||||
.execute(&mut notifier_conn)
|
.execute(&mut notifier_conn)
|
||||||
.with_timeout(Duration::from_secs(5))
|
.with_timeout(Duration::from_secs(5))
|
||||||
.await
|
.await
|
||||||
|
@ -478,7 +489,7 @@ impl PostgresRepo {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn clear_keyed_notifier(&self, input_key: &str) -> Result<(), PostgresError> {
|
async fn clear_keyed_notifier(&self, input_key: String) -> Result<(), PostgresError> {
|
||||||
use schema::keyed_notifications::dsl::*;
|
use schema::keyed_notifications::dsl::*;
|
||||||
|
|
||||||
let mut conn = self.get_connection().await?;
|
let mut conn = self.get_connection().await?;
|
||||||
|
@ -650,8 +661,8 @@ impl<'a> KeyedNotifierState<'a> {
|
||||||
if let Some(notification_entry) = self
|
if let Some(notification_entry) = self
|
||||||
.inner
|
.inner
|
||||||
.keyed_notifications
|
.keyed_notifications
|
||||||
.remove(key)
|
.get(key)
|
||||||
.and_then(|(_, weak)| weak.upgrade())
|
.and_then(|weak| weak.upgrade())
|
||||||
{
|
{
|
||||||
notification_entry.notify.notify_waiters();
|
notification_entry.notify.notify_waiters();
|
||||||
}
|
}
|
||||||
|
@ -1165,16 +1176,23 @@ impl VariantRepo for PostgresRepo {
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
|
async fn fail_variant(&self, hash: Hash, variant: String) -> Result<(), RepoError> {
|
||||||
|
let key = format!("{}{variant}", hash.to_base64());
|
||||||
|
|
||||||
|
self.clear_keyed_notifier(key).await.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
async fn await_variant(
|
async fn await_variant(
|
||||||
&self,
|
&self,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
variant: String,
|
variant: String,
|
||||||
) -> Result<Option<Arc<str>>, RepoError> {
|
) -> Result<Option<Arc<str>>, RepoError> {
|
||||||
let key = Arc::from(format!("{}{}", hash.to_base64(), variant.clone()));
|
let key = Arc::from(format!("{}{variant}", hash.to_base64()));
|
||||||
|
|
||||||
let listener = self.listen_on_key(key);
|
let listener = self.listen_on_key(key);
|
||||||
let notified = listener.notified_timeout(Duration::from_secs(10));
|
let notified = listener.notified_timeout(Duration::from_secs(5));
|
||||||
|
|
||||||
self.register_interest().await?;
|
self.register_interest().await?;
|
||||||
|
|
||||||
|
@ -1200,36 +1218,60 @@ impl VariantRepo for PostgresRepo {
|
||||||
input_variant: String,
|
input_variant: String,
|
||||||
input_identifier: &Arc<str>,
|
input_identifier: &Arc<str>,
|
||||||
) -> Result<Result<(), VariantAlreadyExists>, RepoError> {
|
) -> Result<Result<(), VariantAlreadyExists>, RepoError> {
|
||||||
use schema::variants::dsl::*;
|
|
||||||
|
|
||||||
let mut conn = self.get_connection().await?;
|
let mut conn = self.get_connection().await?;
|
||||||
|
|
||||||
let res = diesel::insert_into(variants)
|
conn.transaction(|conn| {
|
||||||
.values((
|
async move {
|
||||||
hash.eq(&input_hash),
|
let res = async {
|
||||||
variant.eq(&input_variant),
|
use schema::variants::dsl::*;
|
||||||
identifier.eq(input_identifier.as_ref()),
|
|
||||||
))
|
|
||||||
.execute(&mut conn)
|
|
||||||
.with_metrics(crate::init_metrics::POSTGRES_VARIANTS_RELATE_VARIANT_IDENTIFIER)
|
|
||||||
.with_timeout(Duration::from_secs(5))
|
|
||||||
.await
|
|
||||||
.map_err(|_| PostgresError::DbTimeout)?;
|
|
||||||
|
|
||||||
let key = format!("{}{}", input_hash.to_base64(), input_variant.clone());
|
diesel::insert_into(variants)
|
||||||
match self.clear_keyed_notifier(&key).await {
|
.values((
|
||||||
Ok(()) => {}
|
hash.eq(&input_hash),
|
||||||
Err(e) => tracing::warn!("Failed to clear notifier: {e}"),
|
variant.eq(&input_variant),
|
||||||
}
|
identifier.eq(input_identifier.to_string()),
|
||||||
|
))
|
||||||
|
.execute(conn)
|
||||||
|
.with_metrics(
|
||||||
|
crate::init_metrics::POSTGRES_VARIANTS_RELATE_VARIANT_IDENTIFIER,
|
||||||
|
)
|
||||||
|
.with_timeout(Duration::from_secs(5))
|
||||||
|
.await
|
||||||
|
.map_err(|_| PostgresError::DbTimeout)
|
||||||
|
}
|
||||||
|
.await;
|
||||||
|
|
||||||
match res {
|
let notification_res = async {
|
||||||
Ok(_) => Ok(Ok(())),
|
use schema::keyed_notifications::dsl::*;
|
||||||
Err(diesel::result::Error::DatabaseError(
|
|
||||||
diesel::result::DatabaseErrorKind::UniqueViolation,
|
let input_key = format!("{}{input_variant}", input_hash.to_base64());
|
||||||
_,
|
diesel::delete(keyed_notifications)
|
||||||
)) => Ok(Err(VariantAlreadyExists)),
|
.filter(key.eq(input_key))
|
||||||
Err(e) => Err(PostgresError::Diesel(e).into()),
|
.execute(conn)
|
||||||
}
|
.with_timeout(Duration::from_secs(5))
|
||||||
|
.await
|
||||||
|
.map_err(|_| PostgresError::DbTimeout)
|
||||||
|
}
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match notification_res? {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => tracing::warn!("Failed to clear notifier: {e}"),
|
||||||
|
}
|
||||||
|
|
||||||
|
match res? {
|
||||||
|
Ok(_) => Ok(Ok(())),
|
||||||
|
Err(diesel::result::Error::DatabaseError(
|
||||||
|
diesel::result::DatabaseErrorKind::UniqueViolation,
|
||||||
|
_,
|
||||||
|
)) => Ok(Err(VariantAlreadyExists)),
|
||||||
|
Err(e) => Err(PostgresError::Diesel(e)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.scope_boxed()
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(PostgresError::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
@ -1500,16 +1542,22 @@ impl DetailsRepo for PostgresRepo {
|
||||||
let value =
|
let value =
|
||||||
serde_json::to_value(&input_details.inner).map_err(PostgresError::SerializeDetails)?;
|
serde_json::to_value(&input_details.inner).map_err(PostgresError::SerializeDetails)?;
|
||||||
|
|
||||||
diesel::insert_into(details)
|
let res = diesel::insert_into(details)
|
||||||
.values((identifier.eq(input_identifier.as_ref()), json.eq(&value)))
|
.values((identifier.eq(input_identifier.as_ref()), json.eq(&value)))
|
||||||
.execute(&mut conn)
|
.execute(&mut conn)
|
||||||
.with_metrics(crate::init_metrics::POSTGRES_DETAILS_RELATE)
|
.with_metrics(crate::init_metrics::POSTGRES_DETAILS_RELATE)
|
||||||
.with_timeout(Duration::from_secs(5))
|
.with_timeout(Duration::from_secs(5))
|
||||||
.await
|
.await
|
||||||
.map_err(|_| PostgresError::DbTimeout)?
|
.map_err(|_| PostgresError::DbTimeout)?;
|
||||||
.map_err(PostgresError::Diesel)?;
|
|
||||||
|
|
||||||
Ok(())
|
match res {
|
||||||
|
Ok(_)
|
||||||
|
| Err(diesel::result::Error::DatabaseError(
|
||||||
|
diesel::result::DatabaseErrorKind::UniqueViolation,
|
||||||
|
_,
|
||||||
|
)) => Ok(()),
|
||||||
|
Err(e) => Err(PostgresError::Diesel(e).into()),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
|
|
@ -1462,6 +1462,11 @@ impl VariantRepo for SledRepo {
|
||||||
todo!()
|
todo!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
|
async fn fail_variant(&self, hash: Hash, variant: String) -> Result<(), RepoError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
#[tracing::instrument(level = "trace", skip(self))]
|
||||||
async fn await_variant(
|
async fn await_variant(
|
||||||
&self,
|
&self,
|
||||||
|
|
Loading…
Reference in a new issue