mirror of
https://github.com/Nutomic/ibis.git
synced 2025-01-10 18:05:47 +00:00
Nodeinfo with user, article and active stats, standard compliant
This commit is contained in:
parent
af09ee23bc
commit
9eb5f4cfb1
12 changed files with 285 additions and 15 deletions
10
Cargo.lock
generated
10
Cargo.lock
generated
|
@ -529,6 +529,15 @@ dependencies = [
|
|||
"inout",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clokwerk"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bd108d365fcb6d7eddf17a6718eb6a33db18ba4178f8cc6b667f480710f10d76"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codee"
|
||||
version = "0.2.0"
|
||||
|
@ -1822,6 +1831,7 @@ dependencies = [
|
|||
"axum-macros",
|
||||
"bcrypt",
|
||||
"chrono",
|
||||
"clokwerk",
|
||||
"codee",
|
||||
"config",
|
||||
"console_error_panic_hook",
|
||||
|
|
|
@ -105,6 +105,7 @@ env_logger = { version = "0.11.5", default-features = false }
|
|||
anyhow = "1.0.94"
|
||||
include_dir = "0.7.4"
|
||||
mime_guess = "2.0.5"
|
||||
clokwerk = "0.4.0"
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "1.4.1"
|
||||
|
|
14
migrations/2024-12-18-214511_site-stats/down.sql
Normal file
14
migrations/2024-12-18-214511_site-stats/down.sql
Normal file
|
@ -0,0 +1,14 @@
|
|||
DROP TABLE instance_stats;
|
||||
|
||||
DROP TRIGGER instance_stats_local_user_insert ON local_user;
|
||||
|
||||
DROP TRIGGER instance_stats_local_user_delete ON local_user;
|
||||
|
||||
DROP TRIGGER instance_stats_article_insert ON article;
|
||||
|
||||
DROP TRIGGER instance_stats_article_delete ON article;
|
||||
|
||||
DROP FUNCTION instance_stats_local_user_insert,
|
||||
instance_stats_local_user_delete, instance_stats_article_insert,
|
||||
instance_stats_article_delete, instance_stats_activity;
|
||||
|
135
migrations/2024-12-18-214511_site-stats/up.sql
Normal file
135
migrations/2024-12-18-214511_site-stats/up.sql
Normal file
|
@ -0,0 +1,135 @@
|
|||
CREATE TABLE instance_stats (
|
||||
id serial PRIMARY KEY,
|
||||
users int NOT NULL DEFAULT 0,
|
||||
users_active_month int NOT NULL DEFAULT 0,
|
||||
users_active_half_year int NOT NULL DEFAULT 0,
|
||||
articles int NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
INSERT INTO instance_stats (users, articles)
|
||||
SELECT
|
||||
(SELECT count(*) FROM local_user) AS users,
|
||||
(SELECT count(*) FROM article WHERE local = TRUE) AS article
|
||||
FROM instance;
|
||||
|
||||
CREATE FUNCTION instance_stats_local_user_insert ()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE
|
||||
instance_stats
|
||||
SET
|
||||
users = users + 1;
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION instance_stats_local_user_delete ()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE
|
||||
instance_stats sa
|
||||
SET
|
||||
users = users - 1
|
||||
FROM
|
||||
instance s
|
||||
WHERE
|
||||
sa.instance_id = s.id;
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE TRIGGER instance_stats_local_user_insert
|
||||
AFTER INSERT ON local_user
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE instance_stats_local_user_insert ();
|
||||
|
||||
CREATE TRIGGER instance_stats_local_user_delete
|
||||
AFTER DELETE ON local_user
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE instance_stats_local_user_delete ();
|
||||
|
||||
CREATE FUNCTION instance_stats_article_insert ()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE
|
||||
instance_stats
|
||||
SET
|
||||
articles = articles + 1;
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION instance_stats_article_delete ()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE
|
||||
instance_stats ia
|
||||
SET
|
||||
articles = articles - 1
|
||||
FROM
|
||||
instance i
|
||||
WHERE
|
||||
ia.instance_id = i.id;
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE TRIGGER instance_stats_article_insert
|
||||
AFTER INSERT ON article
|
||||
FOR EACH ROW
|
||||
WHEN (NEW.local = TRUE)
|
||||
EXECUTE PROCEDURE instance_stats_article_insert ();
|
||||
|
||||
CREATE TRIGGER instance_stats_article_delete
|
||||
AFTER DELETE ON article
|
||||
FOR EACH ROW
|
||||
WHEN (OLD.local = TRUE)
|
||||
EXECUTE PROCEDURE instance_stats_article_delete ();
|
||||
|
||||
CREATE OR REPLACE FUNCTION instance_stats_activity (i text)
|
||||
RETURNS int
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
count_ integer;
|
||||
BEGIN
|
||||
SELECT
|
||||
count(*) INTO count_
|
||||
FROM (
|
||||
SELECT
|
||||
e.creator_id
|
||||
FROM
|
||||
edit e
|
||||
INNER JOIN person p ON e.creator_id = p.id
|
||||
WHERE
|
||||
e.published > ('now'::timestamp - i::interval)
|
||||
AND p.local = TRUE);
|
||||
RETURN count_;
|
||||
END;
|
||||
$$;
|
||||
|
||||
UPDATE
|
||||
instance_stats
|
||||
SET
|
||||
users_active_month = (
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
instance_stats_activity ('1 month'));
|
||||
|
||||
UPDATE
|
||||
instance_stats
|
||||
SET
|
||||
users_active_half_year = (
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
instance_stats_activity ('6 months'));
|
21
src/backend/database/instance_stats.rs
Normal file
21
src/backend/database/instance_stats.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use super::schema::instance_stats;
|
||||
use crate::backend::{IbisData, MyResult};
|
||||
use diesel::{query_dsl::methods::FindDsl, Queryable, RunQueryDsl, Selectable};
|
||||
use std::ops::DerefMut;
|
||||
|
||||
#[derive(Queryable, Selectable)]
|
||||
#[diesel(table_name = instance_stats, check_for_backend(diesel::pg::Pg))]
|
||||
pub struct InstanceStats {
|
||||
pub id: i32,
|
||||
pub users: i32,
|
||||
pub users_active_month: i32,
|
||||
pub users_active_half_year: i32,
|
||||
pub articles: i32,
|
||||
}
|
||||
|
||||
impl InstanceStats {
|
||||
pub fn read(data: &IbisData) -> MyResult<Self> {
|
||||
let mut conn = data.db_pool.get()?;
|
||||
Ok(instance_stats::table.find(1).get_result(conn.deref_mut())?)
|
||||
}
|
||||
}
|
|
@ -11,12 +11,15 @@ pub mod article;
|
|||
pub mod conflict;
|
||||
pub mod edit;
|
||||
pub mod instance;
|
||||
pub mod instance_stats;
|
||||
pub(crate) mod schema;
|
||||
pub mod user;
|
||||
|
||||
pub type DbPool = Pool<ConnectionManager<PgConnection>>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct IbisData {
|
||||
pub db_pool: Pool<ConnectionManager<PgConnection>>,
|
||||
pub db_pool: DbPool,
|
||||
pub config: IbisConfig,
|
||||
}
|
||||
|
||||
|
|
|
@ -72,6 +72,16 @@ diesel::table! {
|
|||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
instance_stats (id) {
|
||||
id -> Int4,
|
||||
users -> Int4,
|
||||
users_active_month -> Int4,
|
||||
users_active_half_year -> Int4,
|
||||
articles -> Int4,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
jwt_secret (id) {
|
||||
id -> Int4,
|
||||
|
@ -118,6 +128,7 @@ diesel::allow_tables_to_appear_in_same_query!(
|
|||
edit,
|
||||
instance,
|
||||
instance_follow,
|
||||
instance_stats,
|
||||
jwt_secret,
|
||||
local_user,
|
||||
person,
|
||||
|
|
|
@ -49,7 +49,7 @@ use federation::objects::{
|
|||
use leptos::prelude::*;
|
||||
use leptos_axum::{generate_route_list, LeptosRoutes};
|
||||
use log::info;
|
||||
use std::net::SocketAddr;
|
||||
use std::{net::SocketAddr, thread};
|
||||
use tokio::{net::TcpListener, sync::oneshot};
|
||||
use tower_http::{compression::CompressionLayer, cors::CorsLayer};
|
||||
use tower_layer::Layer;
|
||||
|
@ -62,6 +62,7 @@ pub mod database;
|
|||
pub mod error;
|
||||
pub mod federation;
|
||||
mod nodeinfo;
|
||||
mod scheduled_tasks;
|
||||
mod utils;
|
||||
|
||||
const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations");
|
||||
|
@ -97,6 +98,11 @@ pub async fn start(
|
|||
setup(&data.to_request_data()).await?;
|
||||
}
|
||||
|
||||
let db_pool = data.db_pool.clone();
|
||||
thread::spawn(move || {
|
||||
scheduled_tasks::start(db_pool);
|
||||
});
|
||||
|
||||
let leptos_options = get_config_from_str(include_str!("../../Cargo.toml"))?;
|
||||
let mut addr = leptos_options.site_addr;
|
||||
if let Some(override_hostname) = override_hostname {
|
||||
|
|
|
@ -1,24 +1,25 @@
|
|||
use super::database::instance_stats::InstanceStats;
|
||||
use crate::{
|
||||
backend::{database::IbisData, error::MyResult},
|
||||
common::utils::http_protocol_str,
|
||||
};
|
||||
use activitypub_federation::config::Data;
|
||||
use axum::{routing::get, Json, Router};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::Serialize;
|
||||
use url::Url;
|
||||
|
||||
pub fn config() -> Router<()> {
|
||||
Router::new()
|
||||
.route("/nodeinfo/2.0.json", get(node_info))
|
||||
.route("/nodeinfo/2.1.json", get(node_info))
|
||||
.route("/.well-known/nodeinfo", get(node_info_well_known))
|
||||
}
|
||||
|
||||
async fn node_info_well_known(data: Data<IbisData>) -> MyResult<Json<NodeInfoWellKnown>> {
|
||||
Ok(Json(NodeInfoWellKnown {
|
||||
links: vec![NodeInfoWellKnownLinks {
|
||||
rel: Url::parse("http://nodeinfo.diaspora.software/ns/schema/2.0")?,
|
||||
rel: Url::parse("http://nodeinfo.diaspora.software/ns/schema/2.1")?,
|
||||
href: Url::parse(&format!(
|
||||
"{}://{}/nodeinfo/2.0.json",
|
||||
"{}://{}/nodeinfo/2.1.json",
|
||||
http_protocol_str(),
|
||||
data.domain()
|
||||
))?,
|
||||
|
@ -27,40 +28,79 @@ async fn node_info_well_known(data: Data<IbisData>) -> MyResult<Json<NodeInfoWel
|
|||
}
|
||||
|
||||
async fn node_info(data: Data<IbisData>) -> MyResult<Json<NodeInfo>> {
|
||||
let stats = InstanceStats::read(&data)?;
|
||||
Ok(Json(NodeInfo {
|
||||
version: "2.0".to_string(),
|
||||
version: "2.1".to_string(),
|
||||
software: NodeInfoSoftware {
|
||||
name: "ibis".to_string(),
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
repository: "https://github.com/Nutomic/ibis".to_string(),
|
||||
homepage: "https://ibis.wiki/".to_string(),
|
||||
},
|
||||
protocols: vec!["activitypub".to_string()],
|
||||
usage: NodeInfoUsage {
|
||||
users: NodeInfoUsers {
|
||||
total: stats.users,
|
||||
active_month: stats.users_active_month,
|
||||
active_halfyear: stats.users_active_half_year,
|
||||
},
|
||||
local_posts: stats.articles,
|
||||
},
|
||||
open_registrations: data.config.options.registration_open,
|
||||
services: Default::default(),
|
||||
metadata: vec![],
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[derive(Serialize)]
|
||||
struct NodeInfoWellKnown {
|
||||
pub links: Vec<NodeInfoWellKnownLinks>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[derive(Serialize)]
|
||||
struct NodeInfoWellKnownLinks {
|
||||
pub rel: Url,
|
||||
pub href: Url,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NodeInfo {
|
||||
pub version: String,
|
||||
pub software: NodeInfoSoftware,
|
||||
pub protocols: Vec<String>,
|
||||
pub usage: NodeInfoUsage,
|
||||
pub open_registrations: bool,
|
||||
/// These fields are required by the spec for no reason
|
||||
pub services: NodeInfoServices,
|
||||
pub metadata: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
#[serde(default)]
|
||||
#[derive(Serialize)]
|
||||
pub struct NodeInfoSoftware {
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
pub repository: String,
|
||||
pub homepage: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NodeInfoUsage {
|
||||
pub users: NodeInfoUsers,
|
||||
pub local_posts: i32,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NodeInfoUsers {
|
||||
pub total: i32,
|
||||
pub active_month: i32,
|
||||
pub active_halfyear: i32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct NodeInfoServices {
|
||||
pub inbound: Vec<String>,
|
||||
pub outbound: Vec<String>,
|
||||
}
|
||||
|
|
29
src/backend/scheduled_tasks.rs
Normal file
29
src/backend/scheduled_tasks.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
use super::{database::DbPool, error::MyResult};
|
||||
use clokwerk::{Scheduler, TimeUnits};
|
||||
use diesel::{sql_query, RunQueryDsl};
|
||||
use log::{error, info};
|
||||
use std::time::Duration;
|
||||
|
||||
pub fn start(pool: DbPool) {
|
||||
let mut scheduler = Scheduler::new();
|
||||
|
||||
active_counts(&pool).inspect_err(|e| error!("{e}")).ok();
|
||||
scheduler.every(1.hour()).run(move || {
|
||||
active_counts(&pool).inspect_err(|e| error!("{e}")).ok();
|
||||
});
|
||||
|
||||
let _ = scheduler.watch_thread(Duration::from_secs(60));
|
||||
}
|
||||
|
||||
fn active_counts(pool: &DbPool) -> MyResult<()> {
|
||||
info!("Updating active user count");
|
||||
let mut conn = pool.get()?;
|
||||
|
||||
sql_query("update instance_stats set users_active_month = (select * from instance_stats_activity('1 month'))")
|
||||
.execute(&mut conn)?;
|
||||
sql_query("update instance_stats set users_active_half_year = (select * from instance_stats_activity('6 months'))")
|
||||
.execute(&mut conn)?;
|
||||
|
||||
info!("Done with active user count");
|
||||
Ok(())
|
||||
}
|
|
@ -10,7 +10,7 @@ pub fn EditorView(
|
|||
) -> impl IntoView {
|
||||
let (preview, set_preview) = signal(render_markdown(&content.get_untracked()));
|
||||
let cookie = use_cookie("editor_preview");
|
||||
let show_preview = Signal::derive(move || cookie.0.get().unwrap_or_else(|| true));
|
||||
let show_preview = Signal::derive(move || cookie.0.get().unwrap_or(true));
|
||||
|
||||
// Prevent user from accidentally closing the page while editing. Doesnt prevent navigation
|
||||
// within Ibis.
|
||||
|
|
|
@ -119,7 +119,7 @@ impl InlineRule for ArticleLinkScanner {
|
|||
let content = &state.src[start..i];
|
||||
content.split_once('@').map(|(title, domain)| {
|
||||
// Handle custom link label if provided, otherwise use title as label
|
||||
let (domain, label) = domain.split_once('|').unwrap_or((&domain, &title));
|
||||
let (domain, label) = domain.split_once('|').unwrap_or((domain, title));
|
||||
let node = Node::new(ArticleLink {
|
||||
label: label.to_string(),
|
||||
title: title.to_string(),
|
||||
|
|
Loading…
Reference in a new issue