Merge remote-tracking branch 'upstream/master'

This commit is contained in:
Jose Francisco Martinez Salgado 2020-02-04 08:57:13 -06:00
commit 7382defa4c
212 changed files with 23523 additions and 7752 deletions

1
.dockerignore vendored
View file

@ -1,5 +1,4 @@
ui/node_modules ui/node_modules
ui/dist ui/dist
server/target server/target
docs
.git .git

4
.gitignore vendored
View file

@ -1,2 +1,6 @@
ansible/inventory ansible/inventory
ansible/passwords/ ansible/passwords/
docker/lemmy_mine.hjson
docker/dev/env_deploy.sh
build/
.idea/

25
.travis.yml vendored
View file

@ -6,17 +6,30 @@ matrix:
- rust: nightly - rust: nightly
fast_finish: true fast_finish: true
cache: cargo cache: cargo
before_cache:
- rm -rfv target/debug/incremental/lemmy_server-*
- rm -rfv target/debug/.fingerprint/lemmy_server-*
- rm -rfv target/debug/build/lemmy_server-*
- rm -rfv target/debug/deps/lemmy_server-*
- rm -rfv target/debug/lemmy_server.d
- cargo clean
before_script: before_script:
- psql -c "create user rrr with password 'rrr' superuser;" -U postgres - psql -c "create user lemmy with password 'password' superuser;" -U postgres
- psql -c 'create database rrr with owner rrr;' -U postgres - psql -c 'create database lemmy with owner lemmy;' -U postgres
- rustup component add clippy --toolchain stable-x86_64-unknown-linux-gnu
before_install: before_install:
- cd server - cd server
script: script:
- cargo install --force diesel_cli --no-default-features --features postgres # Default checks, but fail if anything is detected
- cargo build
- cargo clippy -- -D clippy::style -D clippy::correctness -D clippy::complexity -D clippy::perf
- cargo install diesel_cli --no-default-features --features postgres --force
- diesel migration run - diesel migration run
- cargo build --all - cargo test
- cargo test --all
env: env:
- DATABASE_URL=postgres://rrr:rrr@localhost/rrr global:
- DATABASE_URL=postgres://lemmy:password@localhost:5432/lemmy
- RUST_TEST_THREADS=1
addons: addons:
postgresql: "9.4" postgresql: "9.4"

35
CODE_OF_CONDUCT.md vendored Normal file
View file

@ -0,0 +1,35 @@
# Code of Conduct
- We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic.
- Please avoid using overtly sexual aliases or other nicknames that might detract from a friendly, safe and welcoming environment for all.
- Please be kind and courteous. Theres no need to be mean or rude.
- Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer.
- Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works.
- We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We interpret the term “harassment” as including the definition in the Citizen Code of Conduct; if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we dont tolerate behavior that excludes people in socially marginalized groups.
- Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or any of the Lemmy moderation team immediately. Whether youre a regular contributor or a newcomer, we care about making this community a safe place for you and weve got your back.
- Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
[**Message the Moderation Team on Mastodon**](https://mastodon.social/@LemmyDev)
[**Email The Moderation Team**](mailto:contact@lemmy.ml)
## Moderation
These are the policies for upholding our communitys standards of conduct. If you feel that a thread needs moderation, please contact the Lemmy moderation team .
1. Remarks that violate the Lemmy standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks, are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
3. Moderators will first respond to such remarks with a warning.
4. If the warning is unheeded, the user will be “kicked,” i.e., kicked out of the communication channel to cool off.
5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended party a genuine apology.
7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a different moderator, in private. Complaints about bans in-channel are not allowed.
8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate situation, they should expect less leeway than others.
In the Lemmy community we strive to go the extra step to look out for each other. Dont just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if theyre off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely.
And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you couldve communicated better — remember that its your responsibility to make others comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
The enforcement policies listed above apply to all official Lemmy venues; including git repositories under [github.com/dessalines/lemmy](https://github.com/dessalines/lemmy) and [yerbamate.dev/dessalines/lemmy](https://yerbamate.dev/dessalines/lemmy), the [Matrix channel](https://matrix.to/#/!BZVTUuEiNmRcbFeLeI:matrix.org?via=matrix.org&via=privacytools.io&via=permaweb.io); and all instances under lemmy.ml. For other projects adopting the Rust Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
Adapted from the [Rust Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct), which is based on the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the [Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).

4
CONTRIBUTING.md vendored Normal file
View file

@ -0,0 +1,4 @@
# Contributing
See [here](https://dev.lemmy.ml/docs/contributing.html) for contributing Instructions.

229
README.md vendored
View file

@ -1,9 +1,14 @@
<h1><img src="ui/assets/favicon.svg" width="50px" height="50px" style="vertical-align:bottom" /><span>Lemmy</span></h1> <p align="center">
<a href="https://dev.lemmy.ml/" rel="noopener">
<img width=200px height=200px src="ui/assets/favicon.svg"></a>
</p>
<h3 align="center">Lemmy</h3>
<div align="center">
[![Github](https://img.shields.io/badge/-Github-blue)](https://github.com/dessalines/lemmy)
[![Gitlab](https://img.shields.io/badge/-Gitlab-yellowgreen)](https://gitlab.com/dessalines/lemmy)
![Mastodon Follow](https://img.shields.io/mastodon/follow/810572?domain=https%3A%2F%2Fmastodon.social&style=social)
![GitHub stars](https://img.shields.io/github/stars/dessalines/lemmy?style=social) ![GitHub stars](https://img.shields.io/github/stars/dessalines/lemmy?style=social)
[![Mastodon Follow](https://img.shields.io/mastodon/follow/810572?domain=https%3A%2F%2Fmastodon.social&style=social)](https://mastodon.social/@LemmyDev)
[![Matrix](https://img.shields.io/matrix/rust-reddit-fediverse:matrix.org.svg?label=matrix-chat)](https://riot.im/app/#/room/#rust-reddit-fediverse:matrix.org) [![Matrix](https://img.shields.io/matrix/rust-reddit-fediverse:matrix.org.svg?label=matrix-chat)](https://riot.im/app/#/room/#rust-reddit-fediverse:matrix.org)
![GitHub tag (latest SemVer)](https://img.shields.io/github/tag/dessalines/lemmy.svg) ![GitHub tag (latest SemVer)](https://img.shields.io/github/tag/dessalines/lemmy.svg)
[![Build Status](https://travis-ci.org/dessalines/lemmy.svg?branch=master)](https://travis-ci.org/dessalines/lemmy) [![Build Status](https://travis-ci.org/dessalines/lemmy.svg?branch=master)](https://travis-ci.org/dessalines/lemmy)
@ -13,39 +18,21 @@
![GitHub repo size](https://img.shields.io/github/repo-size/dessalines/lemmy.svg) ![GitHub repo size](https://img.shields.io/github/repo-size/dessalines/lemmy.svg)
[![License](https://img.shields.io/github/license/dessalines/lemmy.svg)](LICENSE) [![License](https://img.shields.io/github/license/dessalines/lemmy.svg)](LICENSE)
[![Patreon](https://img.shields.io/badge/-Support%20on%20Patreon-blueviolet.svg)](https://www.patreon.com/dessalines) [![Patreon](https://img.shields.io/badge/-Support%20on%20Patreon-blueviolet.svg)](https://www.patreon.com/dessalines)
</div>
A link aggregator / reddit clone for the fediverse. ---
[Lemmy Dev instance](https://dev.lemmy.ml) *for testing purposes only* <p align="center">A link aggregator / reddit clone for the fediverse.
<br>
</p>
[Lemmy Dev instance](https://dev.lemmy.ml) *This data is being backed up, and once federation is working, it will be the basis for a main instance.*
This is a **very early beta version**, and a lot of features are currently broken or in active development, such as federation. This is a **very early beta version**, and a lot of features are currently broken or in active development, such as federation.
Front Page|Post Front Page|Post
---|--- ---|---
![main screen](https://i.imgur.com/y64BtXC.png)|![chat screen](https://i.imgur.com/vsOr87q.png) ![main screen](https://i.imgur.com/kZSRcRu.png)|![chat screen](https://i.imgur.com/4XghNh6.png)
## Features
- Open source, [AGPL License](/LICENSE).
- Self hostable, easy to deploy.
- Comes with [Docker](#docker), [Ansible](#ansible), [Kubernetes](#kubernetes).
- Live-updating Comment threads.
- Full vote scores `(+/-)` like old reddit.
- Moderation abilities.
- Public Moderation Logs.
- Both site admins, and community moderators, who can appoint other moderators.
- Can lock, remove, and restore posts and comments.
- Can ban and unban users from communities and the site.
- Clean, mobile-friendly interface.
- i18n / internationalization support.
- NSFW post / community support.
- Cross-posting support.
- A *similar post search* when creating new posts.
- Can transfer site and communities to others.
- High performance.
- Server is written in rust.
- Front end is `~80kB` gzipped.
## About
[Lemmy](https://github.com/dessalines/lemmy) is similar to sites like [Reddit](https://reddit.com), [Lobste.rs](https://lobste.rs), [Raddle](https://raddle.me), or [Hacker News](https://news.ycombinator.com/): you subscribe to forums you're interested in, post links and discussions, then vote, and comment on them. Behind the scenes, it is very different; anyone can easily run a server, and all these servers are federated (think email), and connected to the same universe, called the [Fediverse](https://en.wikipedia.org/wiki/Fediverse). [Lemmy](https://github.com/dessalines/lemmy) is similar to sites like [Reddit](https://reddit.com), [Lobste.rs](https://lobste.rs), [Raddle](https://raddle.me), or [Hacker News](https://news.ycombinator.com/): you subscribe to forums you're interested in, post links and discussions, then vote, and comment on them. Behind the scenes, it is very different; anyone can easily run a server, and all these servers are federated (think email), and connected to the same universe, called the [Fediverse](https://en.wikipedia.org/wiki/Fediverse).
@ -55,33 +42,78 @@ The overall goal is to create an easily self-hostable, decentralized alternative
Each lemmy server can set its own moderation policy; appointing site-wide admins, and community moderators to keep out the trolls, and foster a healthy, non-toxic environment where all can feel comfortable contributing. Each lemmy server can set its own moderation policy; appointing site-wide admins, and community moderators to keep out the trolls, and foster a healthy, non-toxic environment where all can feel comfortable contributing.
Made with [Rust](https://www.rust-lang.org), [Actix](https://actix.rs/), [Inferno](https://infernojs.org), [Typescript](https://www.typescriptlang.org/) and [Diesel](http://diesel.rs/).
- [Documentation](https://dev.lemmy.ml/docs/index.html)
- [Releases / Changelog](/RELEASES.md)
- [Contributing](https://dev.lemmy.ml/docs/contributing.html)
## Repository Mirrors
- [GitHub](https://github.com/dessalines/lemmy)
- [Gitea](https://yerbamate.dev/dessalines/lemmy)
- [GitLab](https://gitlab.com/dessalines/lemmy)
## Features
- Open source, [AGPL License](/LICENSE).
- Self hostable, easy to deploy.
- Comes with [Docker](#docker), [Ansible](#ansible), [Kubernetes](#kubernetes).
- Clean, mobile-friendly interface.
- Only a minimum of a username and password is required to sign up!
- User avatar support.
- Live-updating Comment threads.
- Full vote scores `(+/-)` like old reddit.
- Themes, including light, dark, and solarized.
- Emojis with autocomplete support. Start typing `:`
- User tagging using `@`, Community tagging using `#`.
- Integrated image uploading in both posts and comments.
- A post can consist of a title and any combination of self text, a URL, or nothing else.
- Notifications, on comment replies and when you're tagged.
- Notifications can be sent via email.
- i18n / internationalization support.
- RSS / Atom feeds for `All`, `Subscribed`, `Inbox`, `User`, and `Community`.
- Cross-posting support.
- A *similar post search* when creating new posts. Great for question / answer communities.
- Moderation abilities.
- Public Moderation Logs.
- Can sticky posts to the top of communities.
- Both site admins, and community moderators, who can appoint other moderators.
- Can lock, remove, and restore posts and comments.
- Can ban and unban users from communities and the site.
- Can transfer site and communities to others.
- Can fully erase your data, replacing all posts and comments.
- NSFW post / community support.
- High performance.
- Server is written in rust.
- Front end is `~80kB` gzipped.
- Supports arm64 / Raspberry Pi.
## Why's it called Lemmy? ## Why's it called Lemmy?
- Lead singer from [motorhead](https://invidio.us/watch?v=pWB5JZRGl0U). - Lead singer from [Motörhead](https://invidio.us/watch?v=pWB5JZRGl0U).
- The old school [video game](<https://en.wikipedia.org/wiki/Lemmings_(video_game)>). - The old school [video game](<https://en.wikipedia.org/wiki/Lemmings_(video_game)>).
- The [Koopa from Super Mario](https://www.mariowiki.com/Lemmy_Koopa). - The [Koopa from Super Mario](https://www.mariowiki.com/Lemmy_Koopa).
- The [furry rodents](http://sunchild.fpwc.org/lemming-the-little-giant-of-the-north/). - The [furry rodents](http://sunchild.fpwc.org/lemming-the-little-giant-of-the-north/).
Made with [Rust](https://www.rust-lang.org), [Actix](https://actix.rs/), [Inferno](https://www.infernojs.org), [Typescript](https://www.typescriptlang.org/) and [Diesel](http://diesel.rs/).
## Install ## Install
### Docker ### Docker
Make sure you have both docker and docker-compose(>=`1.24.0`) installed. Make sure you have both docker and docker-compose(>=`1.24.0`) installed:
```bash ```bash
mkdir lemmy/ mkdir lemmy/
cd lemmy/ cd lemmy/
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/prod/docker-compose.yml wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/prod/docker-compose.yml
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/prod/.env wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/lemmy.hjson
# Edit the .env if you want custom passwords # Edit lemmy.hjson to do more configuration
docker-compose up -d docker-compose up -d
``` ```
and goto http://localhost:8536 and go to http://localhost:8536.
[A sample nginx config](/ansible/templates/nginx.conf), could be setup with: [A sample nginx config](/ansible/templates/nginx.conf) (Image uploading won't work without this), could be setup with:
```bash ```bash
wget https://raw.githubusercontent.com/dessalines/lemmy/master/ansible/templates/nginx.conf wget https://raw.githubusercontent.com/dessalines/lemmy/master/ansible/templates/nginx.conf
@ -99,8 +131,7 @@ docker-compose up -d
### Ansible ### Ansible
First, you need to [install Ansible on your local computer](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html), First, you need to [install Ansible on your local computer](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) (e.g. using `sudo apt install ansible`) or the equivalent for you platform.
eg using `sudo apt install ansible`, or the equivalent for you platform.
Then run the following commands on your local computer: Then run the following commands on your local computer:
@ -112,103 +143,51 @@ nano inventory # enter your server, domain, contact email
ansible-playbook lemmy.yml --become ansible-playbook lemmy.yml --become
``` ```
### Kubernetes ## Support / Donate
You'll need to have an existing Kubernetes cluster and [storage class](https://kubernetes.io/docs/concepts/storage/storage-classes/).
Setting this up will vary depending on your provider.
To try it locally, you can use [MicroK8s](https://microk8s.io/) or [Minikube](https://kubernetes.io/docs/tasks/tools/install-minikube/).
Once you have a working cluster, edit the environment variables and volume sizes in `docker/k8s/*.yml`.
You may also want to change the service types to use `LoadBalancer`s depending on where you're running your cluster (add `type: LoadBalancer` to `ports)`, or `NodePort`s.
By default they will use `ClusterIP`s, which will allow access only within the cluster. See the [docs](https://kubernetes.io/docs/concepts/services-networking/service/) for more on networking in Kubernetes.
**Important** Running a database in Kubernetes will work, but is generally not recommended.
If you're deploying on any of the common cloud providers, you should consider using their managed database service instead (RDS, Cloud SQL, Azure Databse, etc.).
Now you can deploy:
```bash
# Add `-n foo` if you want to deploy into a specific namespace `foo`;
# otherwise your resources will be created in the `default` namespace.
kubectl apply -f docker/k8s/db.yml
kubectl apply -f docker/k8s/pictshare.yml
kubectl apply -f docker/k8s/lemmy.yml
```
If you used a `LoadBalancer`, you should see it in your cloud provider's console.
## Develop
### Docker Development
```bash
git clone https://github.com/dessalines/lemmy
cd lemmy/docker/dev
./docker_update.sh # This builds and runs it, updating for your changes
```
and goto http://localhost:8536
### Local Development
#### Requirements
- [Rust](https://www.rust-lang.org/)
- [Yarn](https://yarnpkg.com/en/)
- [Postgres](https://www.postgresql.org/)
#### Set up Postgres DB
```bash
psql -c "create user lemmy with password 'password' superuser;" -U postgres
psql -c 'create database lemmy with owner lemmy;' -U postgres
export DATABASE_URL=postgres://lemmy:password@localhost:5432/lemmy
```
#### Running
```bash
git clone https://github.com/dessalines/lemmy
cd lemmy
./install.sh
# For live coding, where both the front and back end, automagically reload on any save, do:
# cd ui && yarn start
# cd server && cargo watch -x run
```
## Documentation
- [Websocket API for App developers](docs/api.md)
- [ActivityPub API.md](docs/apub_api_outline.md)
- [Goals](docs/goals.md)
- [Ranking Algorithm](docs/ranking.md)
## Support
Lemmy is free, open-source software, meaning no advertising, monetizing, or venture capital, ever. Your donations directly support full-time development of the project. Lemmy is free, open-source software, meaning no advertising, monetizing, or venture capital, ever. Your donations directly support full-time development of the project.
- [Support on Patreon](https://www.patreon.com/dessalines). - [Support on Patreon](https://www.patreon.com/dessalines).
- [Sponsor List](https://dev.lemmy.ml/sponsors). - [List of Sponsors](https://dev.lemmy.ml/sponsors).
- Soon to add either liberapay or opencollective.
### Crypto
- bitcoin: `1Hefs7miXS5ff5Ck5xvmjKjXf5242KzRtK` - bitcoin: `1Hefs7miXS5ff5Ck5xvmjKjXf5242KzRtK`
- ethereum: `0x400c96c96acbC6E7B3B43B1dc1BB446540a88A01` - ethereum: `0x400c96c96acbC6E7B3B43B1dc1BB446540a88A01`
- monero: `41taVyY6e1xApqKyMVDRVxJ76sPkfZhALLTjRvVKpaAh2pBd4wv9RgYj1tSPrx8wc6iE1uWUfjtQdTmTy2FGMeChGVKPQuV` - monero: `41taVyY6e1xApqKyMVDRVxJ76sPkfZhALLTjRvVKpaAh2pBd4wv9RgYj1tSPrx8wc6iE1uWUfjtQdTmTy2FGMeChGVKPQuV`
## Translations ## Translations
If you'd like to add translations, take a look a look at the [english translation file](ui/src/translations/en.ts). If you'd like to add translations, take a look at the [English translation file](ui/src/translations/en.ts).
- Languages supported: English (`en`), Chinese (`zh`), Dutch (`nl`), Esperanto (`eo`), French (`fr`), Spanish (`es`), Swedish (`sv`), German (`de`), Russian (`ru`). - Languages supported: Catalan, (`ca`), Farsi (`fa`), English (`en`), Chinese (`zh`), Dutch (`nl`), Esperanto (`eo`), Finnish (`fi`), French (`fr`), Spanish (`es`), Swedish (`sv`), German (`de`), Russian (`ru`), Italian (`it`).
<!-- translations -->
lang | done | missing lang | done | missing
--- | --- | --- ---- | ---- | -------
de | 82% | cross_posts,cross_post,users,number_of_communities,preview,upload_image,formatting_help,view_source,sticky,unsticky,settings,stickied,delete_account,delete_account_confirm,banned,creator,number_online,subscribed,expires,recent_comments,nsfw,show_nsfw,theme,crypto,monero,joined,by,to,transfer_community,transfer_site,are_you_sure,yes,no ca | 99% | old,time,action
eo | 91% | number_of_communities,preview,upload_image,formatting_help,view_source,sticky,unsticky,stickied,delete_account,delete_account_confirm,banned,creator,number_online,theme,are_you_sure,yes,no de | 87% | create_private_message,send_secure_message,send_message,message,avatar,upload_avatar,show_avatars,old,docs,message_sent,messages,old_password,matrix_user_id,private_message_disclaimer,send_notifications_to_email,downvotes_disabled,enable_downvotes,open_registration,registration_closed,enable_nsfw,donate_to_lemmy,donate,from,logged_in,email_already_exists,couldnt_create_private_message,no_private_message_edit_allowed,couldnt_update_private_message,time,action
es | 97% | delete_account,delete_account_confirm,creator,number_online,theme fa | 72% | cross_post,subscribed_to_communities,trending_communities,create_private_message,send_secure_message,send_message,message,mod,mods,moderates,remove_as_mod,appoint_as_mod,modlog,stickied,ban,ban_from_site,unban,unban_from_site,banned,number_of_subscribers,subscribers,both,saved,unsubscribe,subscribe,subscribed,old,api,docs,inbox,inbox_for,message_sent,notifications_error,messages,no_email_setup,matrix_user_id,private_message_disclaimer,url,body,copy_suggested_title,community,expand_here,subscribe_to_communities,theme,sponsor_message,general_sponsors,joined,by,to,from,landing_0,logged_in,community_moderator_already_exists,community_follower_already_exists,community_user_already_banned,no_slurs,admin_already_created,couldnt_create_private_message,no_private_message_edit_allowed,couldnt_update_private_message,time,action
fr | 95% | view_source,sticky,unsticky,stickied,delete_account,delete_account_confirm,creator,number_online,theme eo | 75% | number_of_communities,create_private_message,send_secure_message,send_message,message,preview,upload_image,avatar,upload_avatar,show_avatars,formatting_help,view_source,sticky,unsticky,archive_link,stickied,delete_account,delete_account_confirm,banned,creator,number_online,old,docs,replies,mentions,message_sent,messages,old_password,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,matrix_user_id,private_message_disclaimer,send_notifications_to_email,language,browser_default,downvotes_disabled,enable_downvotes,open_registration,registration_closed,enable_nsfw,theme,donate_to_lemmy,donate,from,are_you_sure,yes,no,logged_in,email_already_exists,couldnt_create_private_message,no_private_message_edit_allowed,couldnt_update_private_message,time,action
nl | 93% | preview,upload_image,formatting_help,view_source,sticky,unsticky,stickied,delete_account,delete_account_confirm,banned,creator,number_online,theme es | 99% | old,time,action
ru | 86% | cross_posts,cross_post,number_of_communities,preview,upload_image,formatting_help,view_source,sticky,unsticky,stickied,delete_account,delete_account_confirm,banned,creator,number_online,recent_comments,theme,monero,by,to,transfer_community,transfer_site,are_you_sure,yes,no fi | 99% | old,time,action
sv | 100% | fr | 82% | create_private_message,send_secure_message,send_message,message,avatar,upload_avatar,show_avatars,archive_link,old,docs,replies,mentions,message_sent,messages,old_password,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,matrix_user_id,private_message_disclaimer,send_notifications_to_email,language,browser_default,downvotes_disabled,enable_downvotes,open_registration,registration_closed,enable_nsfw,donate_to_lemmy,donate,from,logged_in,email_already_exists,couldnt_create_private_message,no_private_message_edit_allowed,couldnt_update_private_message,time,action
zh | 84% | cross_posts,cross_post,users,number_of_communities,preview,upload_image,formatting_help,view_source,sticky,unsticky,settings,stickied,delete_account,delete_account_confirm,banned,creator,number_online,recent_comments,nsfw,show_nsfw,theme,monero,by,to,transfer_community,transfer_site,are_you_sure,yes,no it | 83% | create_private_message,send_secure_message,send_message,message,avatar,upload_avatar,show_avatars,archive_link,old,docs,message_sent,messages,old_password,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,matrix_user_id,private_message_disclaimer,send_notifications_to_email,language,browser_default,downvotes_disabled,enable_downvotes,open_registration,registration_closed,enable_nsfw,donate_to_lemmy,donate,from,logged_in,email_already_exists,couldnt_create_private_message,no_private_message_edit_allowed,couldnt_update_private_message,time,action
nl | 99% | time,action
ru | 71% | cross_posts,cross_post,number_of_communities,create_private_message,send_secure_message,send_message,message,preview,upload_image,avatar,upload_avatar,show_avatars,formatting_help,view_source,sticky,unsticky,archive_link,stickied,delete_account,delete_account_confirm,banned,creator,number_online,old,docs,replies,mentions,message_sent,messages,old_password,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,matrix_user_id,private_message_disclaimer,send_notifications_to_email,language,browser_default,downvotes_disabled,enable_downvotes,open_registration,registration_closed,enable_nsfw,recent_comments,theme,donate_to_lemmy,donate,monero,by,to,from,transfer_community,transfer_site,are_you_sure,yes,no,logged_in,email_already_exists,couldnt_create_private_message,no_private_message_edit_allowed,couldnt_update_private_message,time,action
sv | 82% | create_private_message,send_secure_message,send_message,message,avatar,upload_avatar,show_avatars,archive_link,old,docs,replies,mentions,message_sent,messages,old_password,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,matrix_user_id,private_message_disclaimer,send_notifications_to_email,language,browser_default,downvotes_disabled,enable_downvotes,open_registration,registration_closed,enable_nsfw,donate_to_lemmy,donate,from,logged_in,email_already_exists,couldnt_create_private_message,no_private_message_edit_allowed,couldnt_update_private_message,time,action
zh | 69% | cross_posts,cross_post,users,number_of_communities,create_private_message,send_secure_message,send_message,message,preview,upload_image,avatar,upload_avatar,show_avatars,formatting_help,view_source,sticky,unsticky,archive_link,settings,stickied,delete_account,delete_account_confirm,banned,creator,number_online,old,docs,replies,mentions,message_sent,messages,old_password,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,matrix_user_id,private_message_disclaimer,send_notifications_to_email,language,browser_default,downvotes_disabled,enable_downvotes,open_registration,registration_closed,enable_nsfw,recent_comments,nsfw,show_nsfw,theme,donate_to_lemmy,donate,monero,by,to,from,transfer_community,transfer_site,are_you_sure,yes,no,logged_in,email_already_exists,couldnt_create_private_message,no_private_message_edit_allowed,couldnt_update_private_message,time,action
<!-- translationsstop -->
If you'd like to update this report, run:
```bash
cd ui
ts-node translation_report.ts
```
## Credits ## Credits
Logo made by Andy Cuccaro (@andycuccaro) under the CC-BY-SA 4.0 license Logo made by Andy Cuccaro (@andycuccaro) under the CC-BY-SA 4.0 license.

22
RELEASES.md vendored Normal file
View file

@ -0,0 +1,22 @@
# Lemmy v0.6.0 Release (2020-01-16)
`v0.6.0` is here, and we've closed [41 issues!](https://github.com/dessalines/lemmy/milestone/15?closed=1)
This is the biggest release by far:
- Avatars!
- Optional Email notifications for username mentions, post and comment replies.
- Ability to change your password and email address.
- Can set a custom language.
- Lemmy-wide settings to disable downvotes, and close registration.
- A better documentation system, hosted in lemmy itself.
- [Huge DB performance gains](https://github.com/dessalines/lemmy/issues/411) (everthing down to < `30ms`) by using materialized views.
- Fixed major issue with similar post URL and title searching.
- Upgraded to Actix `2.0`
- Faster comment / post voting.
- Better small screen support.
- Lots of bug fixes, refactoring of back end code.
Another major announcement is that Lemmy now has another lead developer besides me, [@felix@radical.town](https://radical.town/@felix). Theyve created a better documentation system, implemented RSS feeds, simplified docker and project configs, upgraded actix, working on federation, a whole lot else.
https://dev.lemmy.ml

1
ansible/VERSION vendored Normal file
View file

@ -0,0 +1 @@
v0.6.10

24
ansible/lemmy.yml vendored
View file

@ -29,23 +29,19 @@
- { path: '/lemmy/' } - { path: '/lemmy/' }
- { path: '/lemmy/volumes/' } - { path: '/lemmy/volumes/' }
- name: add all template files - block:
template: src={{item.src}} dest={{item.dest}} - name: add template files
with_items: template: src={{item.src}} dest={{item.dest}} mode={{item.mode}}
- { src: 'templates/env', dest: '/lemmy/.env' } with_items:
- { src: '../docker/prod/docker-compose.yml', dest: '/lemmy/docker-compose.yml' } - { src: 'templates/docker-compose.yml', dest: '/lemmy/docker-compose.yml', mode: '0600' }
- { src: 'templates/nginx.conf', dest: '/etc/nginx/sites-enabled/lemmy.conf' } - { src: 'templates/nginx.conf', dest: '/etc/nginx/sites-enabled/lemmy.conf', mode: '0644' }
- name: add config file (only during initial setup)
template: src='templates/config.hjson' dest='/lemmy/lemmy.hjson' mode='0600' force='no' owner='1000' group='1000'
vars: vars:
postgres_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/postgres chars=ascii_letters,digits') }}" postgres_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/postgres chars=ascii_letters,digits') }}"
jwt_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/jwt chars=ascii_letters,digits') }}" jwt_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/jwt chars=ascii_letters,digits') }}"
lemmy_docker_image: "dessalines/lemmy:{{ lookup('file', 'VERSION') }}"
- name: set env file permissions
file:
path: "/lemmy/.env"
state: touch
mode: 0600
access_time: preserve
modification_time: preserve
- name: enable and start docker service - name: enable and start docker service
systemd: systemd:

100
ansible/lemmy_dev.yml vendored Normal file
View file

@ -0,0 +1,100 @@
---
- hosts: all
vars:
lemmy_docker_image: "lemmy:dev"
# Install python if required
# https://www.josharcher.uk/code/ansible-python-connection-failure-ubuntu-server-1604/
gather_facts: False
pre_tasks:
- name: install python for Ansible
raw: test -e /usr/bin/python || (apt -y update && apt install -y python-minimal python-setuptools)
args:
executable: /bin/bash
register: output
changed_when: output.stdout != ""
- setup: # gather facts
tasks:
- name: install dependencies
apt:
pkg: ['nginx', 'docker-compose', 'docker.io', 'certbot', 'python-certbot-nginx']
- name: request initial letsencrypt certificate
command: certbot certonly --nginx --agree-tos -d '{{ domain }}' -m '{{ letsencrypt_contact_email }}'
args:
creates: '/etc/letsencrypt/live/{{domain}}/privkey.pem'
- name: create lemmy folder
file: path={{item.path}} state=directory
with_items:
- { path: '/lemmy/' }
- { path: '/lemmy/volumes/' }
- block:
- name: add template files
template: src={{item.src}} dest={{item.dest}} mode={{item.mode}}
with_items:
- { src: 'templates/docker-compose.yml', dest: '/lemmy/docker-compose.yml', mode: '0600' }
- { src: 'templates/nginx.conf', dest: '/etc/nginx/sites-enabled/lemmy.conf', mode: '0644' }
- name: add config file (only during initial setup)
template: src='templates/config.hjson' dest='/lemmy/lemmy.hjson' mode='0600' force='no' owner='1000' group='1000'
vars:
postgres_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/postgres chars=ascii_letters,digits') }}"
jwt_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/jwt chars=ascii_letters,digits') }}"
- name: build the dev docker image
local_action: shell cd .. && sudo docker build . -f docker/dev/Dockerfile -t lemmy:dev
register: image_build
- name: find hash of the new docker image
set_fact:
image_hash: "{{ image_build.stdout | regex_search('(?<=Successfully built )[0-9a-f]{12}') }}"
# this does not use become so that the output file is written as non-root user and is easy to delete later
- name: save dev docker image to file
local_action: shell sudo docker save lemmy:dev > lemmy-dev.tar
- name: copy dev docker image to server
copy: src=lemmy-dev.tar dest=/lemmy/lemmy-dev.tar
- name: import docker image
docker_image:
name: lemmy
tag: dev
load_path: /lemmy/lemmy-dev.tar
source: load
force_source: yes
register: image_import
- name: delete remote image file
file: path=/lemmy/lemmy-dev.tar state=absent
- name: delete local image file
local_action: file path=lemmy-dev.tar state=absent
- name: enable and start docker service
systemd:
name: docker
enabled: yes
state: started
# cant pull here because that fails due to lemmy:dev (without dessalines/) not being on docker hub, but that shouldnt
# be a problem for testing
- name: start docker-compose
docker_compose:
project_src: /lemmy/
state: present
recreate: always
ignore_errors: yes
- name: reload nginx with new config
shell: nginx -s reload
- name: certbot renewal cronjob
cron:
special_time=daily
name=certbot-renew-lemmy
user=root
job="certbot certonly --nginx -d '{{ domain }}' --deploy-hook 'docker-compose -f /peertube/docker-compose.yml exec nginx nginx -s reload'"

14
ansible/templates/config.hjson vendored Normal file
View file

@ -0,0 +1,14 @@
{
database: {
password: "{{ postgres_password }}"
host: "lemmy_db"
}
hostname: "{{ domain }}"
jwt_secret: "{{ jwt_password }}"
front_end_dir: "/app/dist"
email: {
smtp_server: "postfix:25"
smtp_from_address: "noreply@{{ domain }}"
use_tls: false
}
}

40
ansible/templates/docker-compose.yml vendored Normal file
View file

@ -0,0 +1,40 @@
version: '3.3'
services:
lemmy:
image: {{ lemmy_docker_image }}
ports:
- "127.0.0.1:8536:8536"
restart: always
volumes:
- ./lemmy.hjson:/config/config.hjson:ro
depends_on:
- lemmy_db
- lemmy_pictshare
lemmy_db:
image: postgres:12-alpine
environment:
- POSTGRES_USER=lemmy
- POSTGRES_PASSWORD={{ postgres_password }}
- POSTGRES_DB=lemmy
volumes:
- lemmy_db:/var/lib/postgresql/data
restart: always
lemmy_pictshare:
image: shtripok/pictshare:latest
ports:
- "127.0.0.1:8537:80"
volumes:
- lemmy_pictshare:/usr/share/nginx/html/data
restart: always
postfix:
image: mwader/postfix-relay
environment:
- POSTFIX_myhostname={{ domain }}
restart: "always"
volumes:
lemmy_db:
lemmy_pictshare:

10
ansible/templates/env vendored
View file

@ -1,10 +0,0 @@
DOMAIN={{ domain }}
DATABASE_PASSWORD={{ postgres_password }}
DATABASE_URL=postgres://lemmy:{{ postgres_password }}@lemmy_db:5432/lemmy
JWT_SECRET={{ jwt_password }}
RATE_LIMIT_MESSAGE=30
RATE_LIMIT_MESSAGE_PER_SECOND=60
RATE_LIMIT_POST=3
RATE_LIMIT_POST_PER_SECOND=600
RATE_LIMIT_REGISTER=1
RATE_LIMIT_REGISTER_PER_SECOND=3600

View file

@ -1,3 +1,5 @@
proxy_cache_path /var/cache/lemmy_frontend levels=1:2 keys_zone=lemmy_frontend_cache:10m max_size=100m use_temp_path=off;
server { server {
listen 80; listen 80;
server_name {{ domain }}; server_name {{ domain }};
@ -50,7 +52,6 @@ server {
client_max_body_size 50M; client_max_body_size 50M;
location / { location / {
rewrite (\/(user|u\/|inbox|post|community|c\/|create_post|create_community|login|search|setup|sponsors|communities|modlog|home)+) /static/index.html break;
proxy_pass http://0.0.0.0:8536; proxy_pass http://0.0.0.0:8536;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host; proxy_set_header Host $host;
@ -60,6 +61,13 @@ server {
proxy_http_version 1.1; proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade; proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade"; proxy_set_header Connection "upgrade";
# Proxy Cache
proxy_cache lemmy_frontend_cache;
proxy_cache_use_stale error timeout http_500 http_502 http_503 http_504;
proxy_cache_revalidate on;
proxy_cache_lock on;
proxy_cache_min_uses 5;
} }
location /pictshare/ { location /pictshare/ {
@ -69,8 +77,20 @@ server {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
if ($request_uri ~ \.(?:ico|gif|jpe?g|png|webp|bmp|mp4)$) { if ($request_uri ~ \.(?:ico|gif|jpe?g|png|webp|bmp|mp4)$) {
add_header Cache-Control "public"; add_header Cache-Control "public, max-age=31536000, immutable";
expires max;
} }
} }
} }
# Anonymize IP addresses
# https://www.supertechcrew.com/anonymizing-logs-nginx-apache/
map $remote_addr $remote_addr_anon {
~(?P<ip>\d+\.\d+\.\d+)\. $ip.0;
~(?P<ip>[^:]+:[^:]+): $ip::;
127.0.0.1 $remote_addr;
::1 $remote_addr;
default 0.0.0.0;
}
log_format main '$remote_addr_anon - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" "$http_user_agent"';
access_log /var/log/nginx/access.log main;

48
ansible/uninstall.yml vendored Normal file
View file

@ -0,0 +1,48 @@
---
- hosts: all
vars_prompt:
- name: confirm_uninstall
prompt: "Do you really want to uninstall Lemmy? This will delete all data and can not be reverted [yes/no]"
private: no
- name: delete_certs
prompt: "Delete certificates? Select 'no' if you want to reinstall Lemmy [yes/no]"
private: no
tasks:
- name: end play if no confirmation was given
debug:
msg: "Uninstall cancelled, doing nothing"
when: not confirm_uninstall|bool
- meta: end_play
when: not confirm_uninstall|bool
- name: stop docker-compose
docker_compose:
project_src: /lemmy/
state: absent
- name: delete data
file: path={{item.path}} state=absent
with_items:
- { path: '/lemmy/' }
- { path: '/etc/nginx/sites-enabled/lemmy.conf' }
- name: Remove a volume
docker_volume: name={{item.name}} state=absent
with_items:
- { name: 'lemmy_lemmy_db' }
- { name: 'lemmy_lemmy_pictshare' }
- name: delete entire ecloud folder
file: path='/mnt/repo-base/' state=absent
when: delete_certs|bool
- name: remove certbot cronjob
cron:
name=certbot-renew-lemmy
state=absent

10
docker/dev/.env vendored
View file

@ -1,10 +0,0 @@
DOMAIN=my_domain
DATABASE_PASSWORD=password
DATABASE_URL=postgres://lemmy:password@lemmy_db:5432/lemmy
JWT_SECRET=changeme
RATE_LIMIT_MESSAGE=30
RATE_LIMIT_MESSAGE_PER_SECOND=60
RATE_LIMIT_POST=3
RATE_LIMIT_POST_PER_SECOND=600
RATE_LIMIT_REGISTER=1
RATE_LIMIT_REGISTER_PER_SECOND=3600

28
docker/dev/Dockerfile vendored
View file

@ -10,39 +10,47 @@ RUN yarn install --pure-lockfile
COPY ui /app/ui COPY ui /app/ui
RUN yarn build RUN yarn build
FROM rust:1.38 as rust FROM ekidd/rust-musl-builder:1.40.0-openssl11 as rust
# Install musl
RUN apt-get update
RUN apt-get install musl-tools -y
RUN rustup target add x86_64-unknown-linux-musl
# Cache deps # Cache deps
WORKDIR /app WORKDIR /app
RUN sudo chown -R rust:rust .
RUN USER=root cargo new server RUN USER=root cargo new server
WORKDIR /app/server WORKDIR /app/server
COPY server/Cargo.toml server/Cargo.lock ./ COPY server/Cargo.toml server/Cargo.lock ./
RUN mkdir -p ./src/bin \ RUN sudo chown -R rust:rust .
RUN mkdir -p ./src/bin \
&& echo 'fn main() { println!("Dummy") }' > ./src/bin/main.rs && echo 'fn main() { println!("Dummy") }' > ./src/bin/main.rs
RUN RUSTFLAGS=-Clinker=musl-gcc cargo build --release --target=x86_64-unknown-linux-musl RUN cargo build --release
RUN rm -f ./target/x86_64-unknown-linux-musl/release/deps/lemmy_server* RUN rm -f ./target/x86_64-unknown-linux-musl/release/deps/lemmy_server*
COPY server/src ./src/ COPY server/src ./src/
COPY server/migrations ./migrations/ COPY server/migrations ./migrations/
# build for release # Build for release
RUN RUSTFLAGS=-Clinker=musl-gcc cargo build --frozen --release --target=x86_64-unknown-linux-musl RUN cargo build --frozen --release
# Get diesel-cli on there just in case # Get diesel-cli on there just in case
# RUN cargo install diesel_cli --no-default-features --features postgres # RUN cargo install diesel_cli --no-default-features --features postgres
FROM ekidd/rust-musl-builder:1.40.0-openssl11 as docs
WORKDIR /app
COPY docs ./docs
RUN sudo chown -R rust:rust .
RUN mdbook build docs/
FROM alpine:3.10 FROM alpine:3.10
# Install libpq for postgres # Install libpq for postgres
RUN apk add libpq RUN apk add libpq
# Copy resources # Copy resources
COPY server/config/defaults.hjson /config/defaults.hjson
COPY --from=rust /app/server/target/x86_64-unknown-linux-musl/release/lemmy_server /app/lemmy COPY --from=rust /app/server/target/x86_64-unknown-linux-musl/release/lemmy_server /app/lemmy
COPY --from=docs /app/docs/book/ /app/dist/documentation/
COPY --from=node /app/ui/dist /app/dist COPY --from=node /app/ui/dist /app/dist
RUN addgroup -g 1000 lemmy RUN addgroup -g 1000 lemmy
RUN adduser -D -s /bin/sh -u 1000 -G lemmy lemmy RUN adduser -D -s /bin/sh -u 1000 -G lemmy lemmy
RUN chown lemmy:lemmy /app/lemmy RUN chown lemmy:lemmy /app/lemmy

79
docker/dev/Dockerfile.aarch64 vendored Normal file
View file

@ -0,0 +1,79 @@
FROM node:10-jessie as node
WORKDIR /app/ui
# Cache deps
COPY ui/package.json ui/yarn.lock ./
RUN yarn install --pure-lockfile
# Build
COPY ui /app/ui
RUN yarn build
# contains qemu-*-static for cross-compilation
FROM multiarch/qemu-user-static as qemu
FROM arm64v8/rust:1.40-buster as rust
COPY --from=qemu /usr/bin/qemu-aarch64-static /usr/bin
#COPY --from=qemu /usr/bin/qemu-arm-static /usr/bin
# Install musl
#RUN apt-get update && apt-get install -y mc
#RUN apt-get install -y musl-tools mc
#libpq-dev mc
#RUN rustup target add ${TARGET}
# Cache deps
WORKDIR /app
RUN USER=root cargo new server
WORKDIR /app/server
COPY server/Cargo.toml server/Cargo.lock ./
RUN mkdir -p ./src/bin \
&& echo 'fn main() { println!("Dummy") }' > ./src/bin/main.rs
RUN cargo build --release
# RUN cargo build
COPY server/src ./src/
COPY server/migrations ./migrations/
RUN rm -f ./target/release/deps/lemmy_server* ; rm -f ./target/debug/deps/lemmy_server*
# build for release
RUN cargo build --frozen --release
# RUN cargo build --frozen
# Get diesel-cli on there just in case
# RUN cargo install diesel_cli --no-default-features --features postgres
# RUN cp /app/server/target/debug/lemmy_server /app/server/ready
RUN cp /app/server/target/release/lemmy_server /app/server/ready
#FROM alpine:3.10
# debian because build with dynamic linking with debian:buster
FROM arm64v8/debian:buster-slim as lemmy
#COPY --from=qemu /usr/bin/qemu-arm-static /usr/bin
COPY --from=qemu /usr/bin/qemu-aarch64-static /usr/bin
# Install libpq for postgres
#RUN apk add libpq
RUN apt-get update && apt-get install -y libpq5
RUN addgroup --gid 1000 lemmy
# for alpine
#RUN adduser -D -s /bin/sh -u 1000 -G lemmy lemmy
# for debian
RUN adduser --disabled-password --shell /bin/sh --uid 1000 --ingroup lemmy lemmy
# Copy resources
COPY server/config/defaults.hjson /config/defaults.hjson
COPY --from=rust /app/server/ready /app/lemmy
COPY --from=node /app/ui/dist /app/dist
RUN chown lemmy:lemmy /app/lemmy
USER lemmy
EXPOSE 8536
CMD ["/app/lemmy"]

View file

@ -1,17 +1,31 @@
FROM node:12-buster as node FROM node:10-jessie as node
WORKDIR /app/ui WORKDIR /app/ui
# Cache deps # Cache deps
COPY ui/package.json ui/yarn.lock ./ COPY ui/package.json ui/yarn.lock ./
RUN yarn install --pure-lockfile --network-timeout 100000 RUN yarn install --pure-lockfile
# Build # Build
COPY ui /app/ui COPY ui /app/ui
RUN yarn build RUN yarn build
FROM rust:1.37 as rust # contains qemu-*-static for cross-compilation
FROM multiarch/qemu-user-static as qemu
FROM arm32v7/rust:1.37-buster as rust
#COPY --from=qemu /usr/bin/qemu-aarch64-static /usr/bin
COPY --from=qemu /usr/bin/qemu-arm-static /usr/bin
# Install musl
#RUN apt-get update && apt-get install -y mc
#RUN apt-get install -y musl-tools mc
#libpq-dev mc
#RUN rustup target add ${TARGET}
# Cache deps # Cache deps
WORKDIR /app WORKDIR /app
@ -20,11 +34,13 @@ WORKDIR /app/server
COPY server/Cargo.toml server/Cargo.lock ./ COPY server/Cargo.toml server/Cargo.lock ./
RUN mkdir -p ./src/bin \ RUN mkdir -p ./src/bin \
&& echo 'fn main() { println!("Dummy") }' > ./src/bin/main.rs && echo 'fn main() { println!("Dummy") }' > ./src/bin/main.rs
#RUN cargo build --release
RUN cargo build # RUN cargo build
RUN rm -f ./target/debug/deps/lemmy_server* RUN RUSTFLAGS='-Ccodegen-units=1' cargo build
COPY server/src ./src/ COPY server/src ./src/
COPY server/migrations ./migrations/ COPY server/migrations ./migrations/
RUN rm -f ./target/release/deps/lemmy_server* ; rm -f ./target/debug/deps/lemmy_server*
# build for release # build for release
#RUN cargo build --frozen --release #RUN cargo build --frozen --release
@ -33,27 +49,30 @@ RUN cargo build --frozen
# Get diesel-cli on there just in case # Get diesel-cli on there just in case
# RUN cargo install diesel_cli --no-default-features --features postgres # RUN cargo install diesel_cli --no-default-features --features postgres
# make result place always the same for lemmy container
RUN cp /app/server/target/debug/lemmy_server /app/server/ready RUN cp /app/server/target/debug/lemmy_server /app/server/ready
#RUN cp /app/server/target/release/lemmy_server /app/server/ready
#FROM alpine:3.10 #FROM alpine:3.10
# debian because build with dynamic linking with debian:buster # debian because build with dynamic linking with debian:buster
FROM debian:buster as lemmy FROM arm32v7/debian:buster-slim as lemmy
COPY --from=qemu /usr/bin/qemu-arm-static /usr/bin
# Install libpq for postgres # Install libpq for postgres
#RUN apk add libpq #RUN apk add libpq
RUN apt-get update && apt-get install -y libpq5 RUN apt-get update && apt-get install -y libpq5
# Copy resources
#COPY --from=rust /app/server/target/$TARGET/debug/lemmy_server /app/lemmy
COPY --from=rust /app/server/ready /app/lemmy
COPY --from=node /app/ui/dist /app/dist
RUN addgroup --gid 1000 lemmy RUN addgroup --gid 1000 lemmy
# for alpine # for alpine
#RUN adduser -D -s /bin/sh -u 1000 -G lemmy lemmy #RUN adduser -D -s /bin/sh -u 1000 -G lemmy lemmy
# for debian # for debian
RUN adduser --disabled-password --shell /bin/sh --uid 1000 --ingroup lemmy lemmy RUN adduser --disabled-password --shell /bin/sh --uid 1000 --ingroup lemmy lemmy
# Copy resources
COPY server/config/defaults.hjson /config/defaults.hjson
COPY --from=rust /app/server/ready /app/lemmy
COPY --from=node /app/ui/dist /app/dist
RUN chown lemmy:lemmy /app/lemmy RUN chown lemmy:lemmy /app/lemmy
USER lemmy USER lemmy
EXPOSE 8536 EXPOSE 8536

88
docker/dev/Dockerfile.libc vendored Normal file
View file

@ -0,0 +1,88 @@
# can be build on x64, arm32, arm64 platforms
# to build on target platform run
# docker build -f Dockerfile.libc -t dessalines/lemmy:version ../..
#
# to use docker buildx run
# docker buildx build --platform linux/amd64,linux/arm64 -f Dockerfile.libc -t YOURNAME/lemmy --push ../..
FROM node:12-buster as node
# use this if use docker buildx
#FROM --platform=$BUILDPLATFORM node:12-buster as node
WORKDIR /app/ui
# Cache deps
COPY ui/package.json ui/yarn.lock ./
RUN yarn install --pure-lockfile --network-timeout 100000
# Build
COPY ui /app/ui
RUN yarn build
FROM rust:1.40 as rust
# Cache deps
WORKDIR /app
RUN USER=root cargo new server
WORKDIR /app/server
COPY server/Cargo.toml server/Cargo.lock ./
RUN mkdir -p ./src/bin \
&& echo 'fn main() { println!("Dummy") }' > ./src/bin/main.rs
RUN cargo build --release
#RUN cargo build && \
# rm -f ./target/release/deps/lemmy_server* ; rm -f ./target/debug/deps/lemmy_server*
COPY server/src ./src/
COPY server/migrations ./migrations/
# build for release
# workaround for https://github.com/rust-lang/rust/issues/62896
#RUN RUSTFLAGS='-Ccodegen-units=1' cargo build --release
RUN cargo build --release --frozen
#RUN cargo build --frozen
# Get diesel-cli on there just in case
# RUN cargo install diesel_cli --no-default-features --features postgres
# make result place always the same for lemmy container
RUN cp /app/server/target/release/lemmy_server /app/server/ready
#RUN cp /app/server/target/debug/lemmy_server /app/server/ready
FROM rust:1.40 as docs
WORKDIR /app
# Build docs
COPY docs ./docs
RUN cargo install mdbook
RUN mdbook build docs/
#FROM alpine:3.10
# debian because build with dynamic linking with debian:buster
FROM debian:buster as lemmy
# Install libpq for postgres
#RUN apk add libpq
RUN apt-get update && apt-get install -y libpq5
RUN addgroup --gid 1000 lemmy
# for alpine
#RUN adduser -D -s /bin/sh -u 1000 -G lemmy lemmy
# for debian
RUN adduser --disabled-password --shell /bin/sh --uid 1000 --ingroup lemmy lemmy
# Copy resources
COPY server/config/defaults.hjson /config/defaults.hjson
COPY --from=node /app/ui/dist /app/dist
COPY --from=docs /app/docs/book/ /app/dist/documentation/
COPY --from=rust /app/server/ready /app/lemmy
RUN chown lemmy:lemmy /app/lemmy
USER lemmy
EXPOSE 8536
CMD ["/app/lemmy"]

60
docker/dev/deploy.sh vendored
View file

@ -5,26 +5,68 @@ git checkout master
new_tag="$1" new_tag="$1"
git tag $new_tag git tag $new_tag
third_semver=$(echo $new_tag | cut -d "." -f 3)
# Setting the version on the front end # Setting the version on the front end
pushd ../../ui/ cd ../../
node set_version.js echo "export const version: string = '$(git describe --tags)';" > "ui/src/version.ts"
git add src/version.ts git add "ui/src/version.ts"
popd # Setting the version on the backend
echo "pub const VERSION: &str = \"$(git describe --tags)\";" > "server/src/version.rs"
git add "server/src/version.rs"
# Setting the version for Ansible
git describe --tags > "ansible/VERSION"
git add "ansible/VERSION"
cd docker/dev
# Changing the docker-compose prod # Changing the docker-compose prod
sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" ../prod/docker-compose.yml sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" ../prod/docker-compose.yml
sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" ../../ansible/templates/docker-compose.yml
git add ../prod/docker-compose.yml git add ../prod/docker-compose.yml
git add ../../ansible/templates/docker-compose.yml
# The commit # The commit
git commit -m"Version $new_tag" git commit -m"Version $new_tag"
git push origin $new_tag
git push
# Rebuilding docker # Rebuilding docker
docker-compose build docker-compose build
docker tag dev_lemmy:latest dessalines/lemmy:$new_tag docker tag dev_lemmy:latest dessalines/lemmy:x64-$new_tag
docker push dessalines/lemmy:$new_tag docker push dessalines/lemmy:x64-$new_tag
# Build for Raspberry Pi / other archs
# Arm currently not working
# docker build -t lemmy:armv7hf -f Dockerfile.armv7hf ../../
# docker tag lemmy:armv7hf dessalines/lemmy:armv7hf-$new_tag
# docker push dessalines/lemmy:armv7hf-$new_tag
# aarch64
# Only do this on major releases (IE the third semver is 0)
if [ $third_semver -eq 0 ]; then
# Registering qemu binaries
docker run --rm --privileged multiarch/qemu-user-static:register --reset
docker build -t lemmy:aarch64 -f Dockerfile.aarch64 ../../
docker tag lemmy:aarch64 dessalines/lemmy:arm64-$new_tag
docker push dessalines/lemmy:arm64-$new_tag
fi
# Creating the manifest for the multi-arch build
if [ $third_semver -eq 0 ]; then
docker manifest create dessalines/lemmy:$new_tag \
dessalines/lemmy:x64-$new_tag \
dessalines/lemmy:arm64-$new_tag
else
docker manifest create dessalines/lemmy:$new_tag \
dessalines/lemmy:x64-$new_tag
fi
docker manifest push dessalines/lemmy:$new_tag
# Push
git push origin $new_tag
git push
# Pushing to any ansible deploys # Pushing to any ansible deploys
cd ../../ansible cd ../../ansible

12
docker/dev/dev_deploy.sh vendored Executable file
View file

@ -0,0 +1,12 @@
#!/bin/sh
# Building from the dev branch for dev servers
git checkout dev
# Rebuilding dev docker
docker-compose build
docker tag dev_lemmy:latest dessalines/lemmy:dev
docker push dessalines/lemmy:dev
# SSH and pull it
ssh $LEMMY_USER@$LEMMY_HOST "cd ~/git/lemmy/docker/dev && docker pull dessalines/lemmy:dev && docker-compose up -d"

View file

@ -5,36 +5,29 @@ services:
image: postgres:12-alpine image: postgres:12-alpine
environment: environment:
- POSTGRES_USER=lemmy - POSTGRES_USER=lemmy
- POSTGRES_PASSWORD=${DATABASE_PASSWORD} - POSTGRES_PASSWORD=password
- POSTGRES_DB=lemmy - POSTGRES_DB=lemmy
volumes: volumes:
- lemmy_db:/var/lib/postgresql/data - lemmy_db:/var/lib/postgresql/data
restart: always
lemmy: lemmy:
build: build:
context: ../../ context: ../../
dockerfile: docker/dev/Dockerfile dockerfile: docker/dev/Dockerfile
ports: ports:
- "127.0.0.1:8536:8536" - "127.0.0.1:8536:8536"
environment:
- LEMMY_FRONT_END_DIR=/app/dist
- DATABASE_URL=${DATABASE_URL}
- JWT_SECRET=${JWT_SECRET}
- HOSTNAME=${DOMAIN}
- RATE_LIMIT_MESSAGE=${RATE_LIMIT_MESSAGE}
- RATE_LIMIT_MESSAGE_PER_SECOND=${RATE_LIMIT_MESSAGE_PER_SECOND}
- RATE_LIMIT_POST=${RATE_LIMIT_POST}
- RATE_LIMIT_POST_PER_SECOND=${RATE_LIMIT_POST_PER_SECOND}
- RATE_LIMIT_REGISTER=${RATE_LIMIT_REGISTER}
- RATE_LIMIT_REGISTER_PER_SECOND=${RATE_LIMIT_REGISTER_PER_SECOND}
restart: always restart: always
volumes:
- ../lemmy.hjson:/config/config.hjson:ro
depends_on: depends_on:
- lemmy_db - lemmy_db
lemmy_pictshare: lemmy_pictshare:
image: hascheksolutions/pictshare:latest image: shtripok/pictshare:latest
ports: ports:
- "127.0.0.1:8537:80" - "127.0.0.1:8537:80"
volumes: volumes:
- lemmy_pictshare:/usr/share/nginx/html/data - lemmy_pictshare:/usr/share/nginx/html/data
restart: always
volumes: volumes:
lemmy_db: lemmy_db:
lemmy_pictshare: lemmy_pictshare:

View file

@ -14,13 +14,13 @@ spec:
spec: spec:
containers: containers:
- env: - env:
- name: DATABASE_URL - name: LEMMY_DATABASE_URL
# example: 'postgres://lemmy:password@db:5432/lemmy' # example: 'postgres://lemmy:password@db:5432/lemmy'
value: CHANGE_ME value: CHANGE_ME
- name: HOSTNAME - name: LEMMY_HOSTNAME
# example: 'lemmy.example.com' # example: 'lemmy.example.com'
value: CHANGE_ME value: CHANGE_ME
- name: JWT_SECRET - name: LEMMY_JWT_SECRET
# example: 'very-super-good-secret' # example: 'very-super-good-secret'
value: CHANGE_ME value: CHANGE_ME
- name: LEMMY_FRONT_END_DIR - name: LEMMY_FRONT_END_DIR

56
docker/lemmy.hjson vendored Normal file
View file

@ -0,0 +1,56 @@
{
database: {
# username to connect to postgres
user: "lemmy"
# password to connect to postgres
password: "password"
# host where postgres is running
host: "lemmy_db"
# port where postgres can be accessed
port: 5432
# name of the postgres database for lemmy
database: "lemmy"
# maximum number of active sql connections
pool_size: 5
}
# the domain name of your instance (eg "dev.lemmy.ml")
hostname: "my_domain"
# address where lemmy should listen for incoming requests
bind: "0.0.0.0"
# port where lemmy should listen for incoming requests
port: 8536
# json web token for authorization between server and client
jwt_secret: "changeme"
# The dir for the front end
front_end_dir: "/app/dist"
# whether to enable activitypub federation. this feature is in alpha, do not enable in production, as might
# cause problems like remote instances fetching and permanently storing bad data.
federation_enabled: false
# rate limits for various user actions, by user ip
rate_limit: {
# maximum number of messages created in interval
message: 30
# interval length for message limit
message_per_second: 60
# maximum number of posts created in interval
post: 6
# interval length for post limit
post_per_second: 600
# maximum number of registrations in interval
register: 3
# interval length for registration limit
register_per_second: 3600
}
# # email sending configuration
# email: {
# # hostname of the smtp server
# smtp_server: ""
# # login name for smtp server
# smtp_login: ""
# # password to login to the smtp server
# smtp_password: ""
# # address to send emails from, eg "info@your-instance.com"
# smtp_from_address: ""
# }
}

4
docker/nocross/.env vendored
View file

@ -1,4 +0,0 @@
DOMAIN=my_domain
DATABASE_PASSWORD=password
DATABASE_URL=postgres://lemmy:password@lemmy_db:5432/lemmy
JWT_SECRET=changeme

View file

@ -1,26 +0,0 @@
version: '3.3'
services:
lemmy_db:
image: postgres:12-alpine
environment:
- POSTGRES_USER=lemmy
- POSTGRES_PASSWORD=${DATABASE_PASSWORD}
- POSTGRES_DB=lemmy
volumes:
- lemmy_db:/var/lib/postgresql/data
lemmy:
build:
context: ../../
dockerfile: docker/nocross/Dockerfile
ports:
- "127.0.0.1:8536:8536"
environment:
- LEMMY_FRONT_END_DIR=/app/dist
- DATABASE_URL=${DATABASE_URL}
- JWT_SECRET=${JWT_SECRET}
- HOSTNAME=${DOMAIN}
depends_on:
- lemmy_db
volumes:
lemmy_db:

10
docker/prod/.env vendored
View file

@ -1,10 +0,0 @@
DOMAIN=my_domain
DATABASE_PASSWORD=password
DATABASE_URL=postgres://lemmy:password@lemmy_db:5432/lemmy
JWT_SECRET=changeme
RATE_LIMIT_MESSAGE=30
RATE_LIMIT_MESSAGE_PER_SECOND=60
RATE_LIMIT_POST=3
RATE_LIMIT_POST_PER_SECOND=600
RATE_LIMIT_REGISTER=1
RATE_LIMIT_REGISTER_PER_SECOND=3600

View file

@ -5,34 +5,27 @@ services:
image: postgres:12-alpine image: postgres:12-alpine
environment: environment:
- POSTGRES_USER=lemmy - POSTGRES_USER=lemmy
- POSTGRES_PASSWORD=${DATABASE_PASSWORD} - POSTGRES_PASSWORD=password
- POSTGRES_DB=lemmy - POSTGRES_DB=lemmy
volumes: volumes:
- lemmy_db:/var/lib/postgresql/data - lemmy_db:/var/lib/postgresql/data
restart: always
lemmy: lemmy:
image: dessalines/lemmy:v0.3.0.1 image: dessalines/lemmy:v0.6.10
ports: ports:
- "127.0.0.1:8536:8536" - "127.0.0.1:8536:8536"
environment:
- LEMMY_FRONT_END_DIR=/app/dist
- DATABASE_URL=${DATABASE_URL}
- JWT_SECRET=${JWT_SECRET}
- HOSTNAME=${DOMAIN}
- RATE_LIMIT_MESSAGE=${RATE_LIMIT_MESSAGE}
- RATE_LIMIT_MESSAGE_PER_SECOND=${RATE_LIMIT_MESSAGE_PER_SECOND}
- RATE_LIMIT_POST=${RATE_LIMIT_POST}
- RATE_LIMIT_POST_PER_SECOND=${RATE_LIMIT_POST_PER_SECOND}
- RATE_LIMIT_REGISTER=${RATE_LIMIT_REGISTER}
- RATE_LIMIT_REGISTER_PER_SECOND=${RATE_LIMIT_REGISTER_PER_SECOND}
restart: always restart: always
volumes:
- ./lemmy.hjson:/config/config.hjson:ro
depends_on: depends_on:
- lemmy_db - lemmy_db
lemmy_pictshare: lemmy_pictshare:
image: hascheksolutions/pictshare:latest image: shtripok/pictshare:latest
ports: ports:
- "127.0.0.1:8537:80" - "127.0.0.1:8537:80"
volumes: volumes:
- lemmy_pictshare:/usr/share/nginx/html/data - lemmy_pictshare:/usr/share/nginx/html/data
restart: always
volumes: volumes:
lemmy_db: lemmy_db:
lemmy_pictshare: lemmy_pictshare:

1
docs/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
book

852
docs/api.md vendored
View file

@ -1,852 +0,0 @@
# Lemmy WebSocket API
*Note: this may lag behind the actual API endpoints [here](../server/src/api).*
## Data types
- `i16`, `i32` and `i64` are respectively [16-bit](https://en.wikipedia.org/wiki/16-bit), [32-bit](https://en.wikipedia.org/wiki/32-bit) and [64-bit](https://en.wikipedia.org/wiki/64-bit_computing) integers.
- <code>Option<***SomeType***></code> designates an option which may be omitted in requests and not be present in responses. It will be of type ***SomeType***.
- <code>Vec<***SomeType***></code> is a list which contains objects of type ***SomeType***.
- `chrono::NaiveDateTime` is a timestamp string in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format. Timestamps will be UTC.
- Other data types are listed [here](../server/src/db).
## Basic usage
Request and response strings are in [JSON format](https://www.json.org).
### Endpoint
Connect to <code>ws://***host***/api/v1/ws</code> to get started.
If the ***`host`*** supports secure connections, you can use <code>wss://***host***/api/v1/ws</code>.
### Testing with [Websocat](https://github.com/vi/websocat)
`websocat ws://127.0.0.1:8536/api/v1/ws -nt`
A simple test command:
`{"op": "ListCategories"}`
### Testing with the [WebSocket JavaScript API](https://developer.mozilla.org/en-US/docs/Web/API/WebSockets_API)
```javascript
var ws = new WebSocket("ws://" + host + "/api/v1/ws");
ws.onopen = function () {
console.log("Connection succeed!");
ws.send(JSON.stringify({
op: "ListCategories"
}));
};
```
## Rate limits
- 1 per hour for signups and community creation.
- 1 per 10 minutes for post creation.
- 30 actions per minute for post voting and comment creation.
- Everything else is not rate-limited.
## Errors
```rust
{
op: String,
message: String,
}
```
## API documentation
### Sort Types
These go wherever there is a `sort` field. The available sort types are:
- `Hot` - the hottest posts/communities, depending on votes, views, comments and publish date
- `New` - the newest posts/communities
- `TopDay` - the most upvoted posts/communities of the current day.
- `TopWeek` - the most upvoted posts/communities of the current week.
- `TopMonth` - the most upvoted posts/communities of the current month.
- `TopYear` - the most upvoted posts/communities of the current year.
- `TopAll` - the most upvoted posts/communities on the current instance.
### User / Authentication / Admin actions
#### Login
The `jwt` string should be stored and used anywhere `auth` is called for.
##### Request
```rust
{
op: "Login",
data: {
username_or_email: String,
password: String
}
}
```
##### Response
```rust
{
op: String,
jwt: String
}
```
#### Register
Only the first user will be able to be the admin.
##### Request
```rust
{
op: "Register",
data: {
username: String,
email: Option<String>,
password: String,
password_verify: String,
admin: bool
}
}
```
##### Response
```rust
{
op: String,
jwt: String
}
```
#### Get User Details
##### Request
```rust
{
op: "GetUserDetails",
data: {
user_id: Option<i32>,
username: Option<String>,
sort: String,
page: Option<i64>,
limit: Option<i64>,
community_id: Option<i32>,
saved_only: bool,
auth: Option<String>,
}
}
```
##### Response
```rust
{
op: String,
user: UserView,
follows: Vec<CommunityFollowerView>,
moderates: Vec<CommunityModeratorView>,
comments: Vec<CommentView>,
posts: Vec<PostView>,
}
```
#### Save User Settings
##### Request
```rust
{
op: "SaveUserSettings",
data: {
show_nsfw: bool,
theme: String, // Default 'darkly'
auth: String
}
}
```
##### Response
```rust
{
op: String,
jwt: String
}
```
#### Get Replies / Inbox
##### Request
```rust
{
op: "GetReplies",
data: {
sort: String,
page: Option<i64>,
limit: Option<i64>,
unread_only: bool,
auth: String
}
}
```
##### Response
```rust
{
op: String,
replies: Vec<ReplyView>,
}
```
#### Mark all replies as read
##### Request
```rust
{
op: "MarkAllAsRead",
data: {
auth: String
}
}
```
##### Response
```rust
{
op: String,
replies: Vec<ReplyView>,
}
```
#### Delete Account
*Permananently deletes your posts and comments*
##### Request
```rust
{
op: "DeleteAccount",
data: {
auth: String
}
}
```
##### Response
```rust
{
op: String,
jwt: String,
}
```
#### Add admin
##### Request
```rust
{
op: "AddAdmin",
data: {
user_id: i32,
added: bool,
auth: String
}
}
```
##### Response
```rust
{
op: String,
admins: Vec<UserView>,
}
```
#### Ban user
##### Request
```rust
{
op: "BanUser",
data: {
user_id: i32,
ban: bool,
reason: Option<String>,
expires: Option<i64>,
auth: String
}
}
```
##### Response
```rust
{
op: String,
user: UserView,
banned: bool,
}
```
### Site
#### List Categories
##### Request
```rust
{
op: "ListCategories"
}
```
##### Response
```rust
{
op: String,
categories: Vec<Category>
}
```
#### Search
Search types are `Both, Comments, Posts`.
##### Request
```rust
{
op: "Search",
data: {
q: String,
type_: String,
community_id: Option<i32>,
sort: String,
page: Option<i64>,
limit: Option<i64>,
}
}
```
##### Response
```rust
{
op: String,
comments: Vec<CommentView>,
posts: Vec<PostView>,
}
```
#### Get Modlog
##### Request
```rust
{
op: "GetModlog",
data: {
mod_user_id: Option<i32>,
community_id: Option<i32>,
page: Option<i64>,
limit: Option<i64>,
}
}
```
##### Response
```rust
{
op: String,
removed_posts: Vec<ModRemovePostView>,
locked_posts: Vec<ModLockPostView>,
removed_comments: Vec<ModRemoveCommentView>,
removed_communities: Vec<ModRemoveCommunityView>,
banned_from_community: Vec<ModBanFromCommunityView>,
banned: Vec<ModBanView>,
added_to_community: Vec<ModAddCommunityView>,
added: Vec<ModAddView>,
}
```
#### Create Site
##### Request
```rust
{
op: "CreateSite",
data: {
name: String,
description: Option<String>,
auth: String
}
}
```
##### Response
```rust
{
op: String,
site: SiteView,
}
```
#### Edit Site
##### Request
```rust
{
op: "EditSite",
data: {
name: String,
description: Option<String>,
auth: String
}
}
```
##### Response
```rust
{
op: String,
site: SiteView,
}
```
#### Get Site
##### Request
```rust
{
op: "GetSite"
}
```
##### Response
```rust
{
op: String,
site: Option<SiteView>,
admins: Vec<UserView>,
banned: Vec<UserView>,
}
```
#### Transfer Site
##### Request
```rust
{
op: "TransferSite",
data: {
user_id: i32,
auth: String
}
}
```
##### Response
```rust
{
op: String,
site: Option<SiteView>,
admins: Vec<UserView>,
banned: Vec<UserView>,
}
```
### Community
#### Get Community
##### Request
```rust
{
op: "GetCommunity",
data: {
id: Option<i32>,
name: Option<String>,
auth: Option<String>
}
}
```
##### Response
```rust
{
op: String,
community: CommunityView,
moderators: Vec<CommunityModeratorView>,
admins: Vec<UserView>,
}
```
#### Create Community
##### Request
```rust
{
op: "CreateCommunity",
data: {
name: String,
title: String,
description: Option<String>,
category_id: i32 ,
auth: String
}
}
```
##### Response
```rust
{
op: String,
community: CommunityView
}
```
#### List Communities
##### Request
```rust
{
op: "ListCommunities",
data: {
sort: String,
page: Option<i64>,
limit: Option<i64>,
auth: Option<String>
}
}
```
##### Response
```rust
{
op: String,
communities: Vec<CommunityView>
}
```
#### Ban from Community
##### Request
```rust
{
op: "BanFromCommunity",
data: {
community_id: i32,
user_id: i32,
ban: bool,
reason: Option<String>,
expires: Option<i64>,
auth: String
}
}
```
##### Response
```rust
{
op: String,
user: UserView,
banned: bool,
}
```
#### Add Mod to Community
##### Request
```rust
{
op: "AddModToCommunity",
data: {
community_id: i32,
user_id: i32,
added: bool,
auth: String
}
}
```
##### Response
```rust
{
op: String,
moderators: Vec<CommunityModeratorView>,
}
```
#### Edit Community
Mods and admins can remove and lock a community, creators can delete it.
##### Request
```rust
{
op: "EditCommunity",
data: {
edit_id: i32,
name: String,
title: String,
description: Option<String>,
category_id: i32,
removed: Option<bool>,
deleted: Option<bool>,
reason: Option<String>,
expires: Option<i64>,
auth: String
}
}
```
##### Response
```rust
{
op: String,
community: CommunityView
}
```
#### Follow Community
##### Request
```rust
{
op: "FollowCommunity",
data: {
community_id: i32,
follow: bool,
auth: String
}
}
```
##### Response
```rust
{
op: String,
community: CommunityView
}
```
#### Get Followed Communities
##### Request
```rust
{
op: "GetFollowedCommunities",
data: {
auth: String
}
}
```
##### Response
```rust
{
op: String,
communities: Vec<CommunityFollowerView>
}
```
#### Transfer Community
##### Request
```rust
{
op: "TransferCommunity",
data: {
community_id: i32,
user_id: i32,
auth: String
}
}
```
##### Response
```rust
{
op: String,
community: CommunityView,
moderators: Vec<CommunityModeratorView>,
admins: Vec<UserView>,
}
```
### Post
#### Create Post
##### Request
```rust
{
op: "CreatePost",
data: {
name: String,
url: Option<String>,
body: Option<String>,
community_id: i32,
auth: String
}
}
```
##### Response
```rust
{
op: String,
post: PostView
}
```
#### Get Post
##### Request
```rust
{
op: "GetPost",
data: {
id: i32,
auth: Option<String>
}
}
```
##### Response
```rust
{
op: String,
post: PostView,
comments: Vec<CommentView>,
community: CommunityView,
moderators: Vec<CommunityModeratorView>,
admins: Vec<UserView>,
}
```
#### Get Posts
Post listing types are `All, Subscribed, Community`
##### Request
```rust
{
op: "GetPosts",
data: {
type_: String,
sort: String,
page: Option<i64>,
limit: Option<i64>,
community_id: Option<i32>,
auth: Option<String>
}
}
```
##### Response
```rust
{
op: String,
posts: Vec<PostView>,
}
```
#### Create Post Like
`score` can be 0, -1, or 1
##### Request
```rust
{
op: "CreatePostLike",
data: {
post_id: i32,
score: i16,
auth: String
}
}
```
##### Response
```rust
{
op: String,
post: PostView
}
```
#### Edit Post
Mods and admins can remove and lock a post, creators can delete it.
##### Request
```rust
{
op: "EditPost",
data: {
edit_id: i32,
creator_id: i32,
community_id: i32,
name: String,
url: Option<String>,
body: Option<String>,
removed: Option<bool>,
deleted: Option<bool>,
locked: Option<bool>,
reason: Option<String>,
auth: String
}
}
```
##### Response
```rust
{
op: String,
post: PostView
}
```
#### Save Post
##### Request
```rust
{
op: "SavePost",
data: {
post_id: i32,
save: bool,
auth: String
}
}
```
##### Response
```rust
{
op: String,
post: PostView
}
```
### Comment
#### Create Comment
##### Request
```rust
{
op: "CreateComment",
data: {
content: String,
parent_id: Option<i32>,
edit_id: Option<i32>,
post_id: i32,
auth: String
}
}
```
##### Response
```rust
{
op: String,
comment: CommentView
}
```
#### Edit Comment
Mods and admins can remove a comment, creators can delete it.
##### Request
```rust
{
op: "EditComment",
data: {
content: String,
parent_id: Option<i32>,
edit_id: i32,
creator_id: i32,
post_id: i32,
removed: Option<bool>,
deleted: Option<bool>,
reason: Option<String>,
read: Option<bool>,
auth: String
}
}
```
##### Response
```rust
{
op: String,
comment: CommentView
}
```
#### Save Comment
##### Request
```rust
{
op: "SaveComment",
data: {
comment_id: i32,
save: bool,
auth: String
}
}
```
##### Response
```rust
{
op: String,
comment: CommentView
}
```
#### Create Comment Like
`score` can be 0, -1, or 1
##### Request
```rust
{
op: "CreateCommentLike",
data: {
comment_id: i32,
post_id: i32,
score: i16,
auth: String
}
}
```
##### Response
```rust
{
op: String,
comment: CommentView
}
```

6
docs/book.toml vendored Normal file
View file

@ -0,0 +1,6 @@
[book]
authors = ["Felix Ableitner"]
language = "en"
multilingual = false
src = "src"
title = "Lemmy Documentation"

16
docs/src/SUMMARY.md vendored Normal file
View file

@ -0,0 +1,16 @@
# Summary
- [About](about.md)
- [Features](about_features.md)
- [Goals](about_goals.md)
- [Post and Comment Ranking](about_ranking.md)
- [Administration](administration.md)
- [Install with Docker](administration_install_docker.md)
- [Install with Ansible](administration_install_ansible.md)
- [Install with Kubernetes](administration_install_kubernetes.md)
- [Configuration](administration_configuration.md)
- [Contributing](contributing.md)
- [Docker Development](contributing_docker_development.md)
- [Local Development](contributing_local_development.md)
- [Websocket/HTTP API](contributing_websocket_http_api.md)
- [ActivityPub API Outline](contributing_apub_api_outline.md)

20
docs/src/about.md vendored Normal file
View file

@ -0,0 +1,20 @@
# Lemmy - A link aggregator / reddit clone for the fediverse.
[Lemmy Dev instance](https://dev.lemmy.ml) *for testing purposes only*
[Lemmy](https://github.com/dessalines/lemmy) is similar to sites like [Reddit](https://reddit.com), [Lobste.rs](https://lobste.rs), [Raddle](https://raddle.me), or [Hacker News](https://news.ycombinator.com/): you subscribe to forums you're interested in, post links and discussions, then vote, and comment on them. Behind the scenes, it is very different; anyone can easily run a server, and all these servers are federated (think email), and connected to the same universe, called the [Fediverse](https://en.wikipedia.org/wiki/Fediverse).
For a link aggregator, this means a user registered on one server can subscribe to forums on any other server, and can have discussions with users registered elsewhere.
The overall goal is to create an easily self-hostable, decentralized alternative to reddit and other link aggregators, outside of their corporate control and meddling.
Each lemmy server can set its own moderation policy; appointing site-wide admins, and community moderators to keep out the trolls, and foster a healthy, non-toxic environment where all can feel comfortable contributing.
### Why's it called Lemmy?
- Lead singer from [Motörhead](https://invidio.us/watch?v=pWB5JZRGl0U).
- The old school [video game](<https://en.wikipedia.org/wiki/Lemmings_(video_game)>).
- The [Koopa from Super Mario](https://www.mariowiki.com/Lemmy_Koopa).
- The [furry rodents](http://sunchild.fpwc.org/lemming-the-little-giant-of-the-north/).
Made with [Rust](https://www.rust-lang.org), [Actix](https://actix.rs/), [Inferno](https://infernojs.org), [Typescript](https://www.typescriptlang.org/) and [Diesel](http://diesel.rs/).

27
docs/src/about_features.md vendored Normal file
View file

@ -0,0 +1,27 @@
# Features
- Open source, [AGPL License](/LICENSE).
- Self hostable, easy to deploy.
- Comes with [Docker](#docker), [Ansible](#ansible), [Kubernetes](#kubernetes).
- Clean, mobile-friendly interface.
- Live-updating Comment threads.
- Full vote scores `(+/-)` like old reddit.
- Themes, including light, dark, and solarized.
- Emojis with autocomplete support. Start typing `:`
- User tagging using `@`, Community tagging using `#`.
- Notifications, on comment replies and when you're tagged.
- i18n / internationalization support.
- RSS / Atom feeds for `All`, `Subscribed`, `Inbox`, `User`, and `Community`.
- Cross-posting support.
- A *similar post search* when creating new posts. Great for question / answer communities.
- Moderation abilities.
- Public Moderation Logs.
- Both site admins, and community moderators, who can appoint other moderators.
- Can lock, remove, and restore posts and comments.
- Can ban and unban users from communities and the site.
- Can transfer site and communities to others.
- Can fully erase your data, replacing all posts and comments.
- NSFW post / community support.
- High performance.
- Server is written in rust.
- Front end is `~80kB` gzipped.
- Supports arm64 / Raspberry Pi.

View file

@ -23,7 +23,6 @@
- [Activitypub main](https://www.w3.org/TR/activitypub/) - [Activitypub main](https://www.w3.org/TR/activitypub/)
- [Diesel to Postgres data types](https://kotiri.com/2018/01/31/postgresql-diesel-rust-types.html) - [Diesel to Postgres data types](https://kotiri.com/2018/01/31/postgresql-diesel-rust-types.html)
- [helpful diesel examples](http://siciarz.net/24-days-rust-diesel/) - [helpful diesel examples](http://siciarz.net/24-days-rust-diesel/)
- [Mastodon public key server example](https://blog.joinmastodon.org/2018/06/how-to-implement-a-basic-activitypub-server/)
- [Recursive query for adjacency list for nested comments](https://stackoverflow.com/questions/192220/what-is-the-most-efficient-elegant-way-to-parse-a-flat-table-into-a-tree/192462#192462) - [Recursive query for adjacency list for nested comments](https://stackoverflow.com/questions/192220/what-is-the-most-efficient-elegant-way-to-parse-a-flat-table-into-a-tree/192462#192462)
- https://github.com/sparksuite/simplemde-markdown-editor - https://github.com/sparksuite/simplemde-markdown-editor
- [Markdown-it](https://github.com/markdown-it/markdown-it) - [Markdown-it](https://github.com/markdown-it/markdown-it)

1
docs/src/administration.md vendored Normal file
View file

@ -0,0 +1 @@
Information for Lemmy instance admins, and those who want to start an instance.

View file

@ -0,0 +1,6 @@
The configuration is based on the file [defaults.hjson](server/config/defaults.hjson). This file also contains documentation for all the available options. To override the defaults, you can copy the options you want to change into your local `config.hjson` file.
Additionally, you can override any config files with environment variables. These have the same name as the config options, and are prefixed with `LEMMY_`. For example, you can override the `database.password` with
`LEMMY__DATABASE__POOL_SIZE=10`.
An additional option `LEMMY_DATABASE_URL` is available, which can be used with a PostgreSQL connection string like `postgres://lemmy:password@lemmy_db:5432/lemmy`, passing all connection details at once.

View file

@ -0,0 +1,11 @@
First, you need to [install Ansible on your local computer](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) (e.g. using `sudo apt install ansible`) or the equivalent for you platform.
Then run the following commands on your local computer:
```bash
git clone https://github.com/dessalines/lemmy.git
cd lemmy/ansible/
cp inventory.example inventory
nano inventory # enter your server, domain, contact email
ansible-playbook lemmy.yml --become
```

View file

@ -0,0 +1,28 @@
Make sure you have both docker and docker-compose(>=`1.24.0`) installed:
```bash
mkdir lemmy/
cd lemmy/
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/prod/docker-compose.yml
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/lemmy.hjson
# Edit lemmy.hjson to do more configuration
docker-compose up -d
```
and go to http://localhost:8536.
[A sample nginx config](/ansible/templates/nginx.conf), could be setup with:
```bash
wget https://raw.githubusercontent.com/dessalines/lemmy/master/ansible/templates/nginx.conf
# Replace the {{ vars }}
sudo mv nginx.conf /etc/nginx/sites-enabled/lemmy.conf
```
#### Updating
To update to the newest version, run:
```bash
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/prod/docker-compose.yml
docker-compose up -d
```

View file

@ -0,0 +1,22 @@
You'll need to have an existing Kubernetes cluster and [storage class](https://kubernetes.io/docs/concepts/storage/storage-classes/).
Setting this up will vary depending on your provider.
To try it locally, you can use [MicroK8s](https://microk8s.io/) or [Minikube](https://kubernetes.io/docs/tasks/tools/install-minikube/).
Once you have a working cluster, edit the environment variables and volume sizes in `docker/k8s/*.yml`.
You may also want to change the service types to use `LoadBalancer`s depending on where you're running your cluster (add `type: LoadBalancer` to `ports)`, or `NodePort`s.
By default they will use `ClusterIP`s, which will allow access only within the cluster. See the [docs](https://kubernetes.io/docs/concepts/services-networking/service/) for more on networking in Kubernetes.
**Important** Running a database in Kubernetes will work, but is generally not recommended.
If you're deploying on any of the common cloud providers, you should consider using their managed database service instead (RDS, Cloud SQL, Azure Databse, etc.).
Now you can deploy:
```bash
# Add `-n foo` if you want to deploy into a specific namespace `foo`;
# otherwise your resources will be created in the `default` namespace.
kubectl apply -f docker/k8s/db.yml
kubectl apply -f docker/k8s/pictshare.yml
kubectl apply -f docker/k8s/lemmy.yml
```
If you used a `LoadBalancer`, you should see it in your cloud provider's console.

32
docs/src/contributing.md vendored Normal file
View file

@ -0,0 +1,32 @@
# Contributing
Information about contributing to Lemmy, whether it is translating, testing, designing or programming.
## Translating
Go [here](https://github.com/dessalines/lemmy#translations) for translation instructions.
## Architecture
### Front end
- The front end is written in `typescript`, using a react-like framework called [inferno](https://infernojs.org/). All UI elements are reusable `.tsx` components.
- The main page and routing are in `ui/src/index.tsx`.
- The components are located in `ui/src/components`.
### Back end
- The back end is written in `rust`, using `diesel`, and `actix`.
- The server source code is split into main sections in `server/src`. These include:
- `db` - The low level database actions.
- Database additions are done using diesel migrations. Run `diesel migration generate xxxxx` to add new things.
- `api` - The high level user interactions (things like `CreateComment`)
- `routes` - The server endpoints .
- `apub` - The activitypub conversions.
- `websocket` - Creates the websocket server.
## Linting / Formatting
- Every front and back end commit is automatically formatted then linted using `husky`, and `lint-staged`.
- Rust with `cargo fmt` and `cargo clippy`.
- Typescript with `prettier` and `eslint`.

View file

@ -0,0 +1,11 @@
Run:
```bash
git clone https://github.com/dessalines/lemmy
cd lemmy/docker/dev
./docker_update.sh # This builds and runs it, updating for your changes
```
and go to http://localhost:8536.
Note that compile times are relatively long with Docker, because builds can't be properly cached. If this is a problem for you, you should use [Local Development](contributing_local_development.md).

View file

@ -0,0 +1,24 @@
#### Requirements
- [Rust](https://www.rust-lang.org/)
- [Yarn](https://yarnpkg.com/en/)
- [Postgres](https://www.postgresql.org/)
#### Set up Postgres DB
```bash
psql -c "create user lemmy with password 'password' superuser;" -U postgres
psql -c 'create database lemmy with owner lemmy;' -U postgres
export LEMMY_DATABASE_URL=postgres://lemmy:password@localhost:5432/lemmy
```
#### Running
```bash
git clone https://github.com/dessalines/lemmy
cd lemmy
./install.sh
# For live coding, where both the front and back end, automagically reload on any save, do:
# cd ui && yarn start
# cd server && cargo watch -x run
```

File diff suppressed because it is too large Load diff

2
install.sh vendored
View file

@ -1,7 +1,7 @@
#!/bin/sh #!/bin/sh
set -e set -e
export DATABASE_URL=postgres://rrr:rrr@localhost/rrr export LEMMY_DATABASE_URL=postgres://lemmy:password@localhost:5432/lemmy
export JWT_SECRET=changeme export JWT_SECRET=changeme
export HOSTNAME=rrr export HOSTNAME=rrr

3
server/.gitignore vendored
View file

@ -1,3 +1,6 @@
/target /target
.env .env
.idea .idea
env_setup.sh
query_testing/*.json
query_testing/*.json.old

View file

@ -1 +1,2 @@
tab_spaces = 2 tab_spaces = 2
edition="2018"

2748
server/Cargo.lock generated vendored

File diff suppressed because it is too large Load diff

39
server/Cargo.toml vendored
View file

@ -5,24 +5,31 @@ authors = ["Dessalines <happydooby@gmail.com>"]
edition = "2018" edition = "2018"
[dependencies] [dependencies]
diesel = { version = "1.4.2", features = ["postgres","chrono"] } diesel = { version = "1.4.2", features = ["postgres","chrono", "r2d2"] }
diesel_migrations = "1.4.0" diesel_migrations = "1.4.0"
dotenv = "0.14.1" dotenv = "0.15.0"
bcrypt = "0.5.0" bcrypt = "0.6.1"
activitypub = "0.1.5" activitypub = "0.2.0"
chrono = { version = "0.4.7", features = ["serde"] } chrono = { version = "0.4.7", features = ["serde"] }
failure = "0.1.5" failure = "0.1.5"
serde_json = "1.0.40" serde_json = { version = "1.0.45", features = ["preserve_order"]}
serde = { version = "1.0.94", features = ["derive"] } serde = { version = "1.0.94", features = ["derive"] }
actix = "0.8.3" actix = "0.9.0"
actix-web = "1.0" actix-web = "2.0.0"
actix-files = "0.1.3" actix-files = "0.2.1"
actix-web-actors = "1.0" actix-web-actors = "2.0.0"
env_logger = "0.6.2" actix-rt = "1.0.0"
rand = "0.7.0" env_logger = "0.7.1"
strum = "0.15.0" rand = "0.7.3"
strum_macros = "0.15.0" strum = "0.17.1"
jsonwebtoken = "6.0.1" strum_macros = "0.17.1"
regex = "1.1.9" jsonwebtoken = "7.0.1"
regex = "1.3.4"
lazy_static = "1.3.0" lazy_static = "1.3.0"
lettre = "0.9.2"
lettre_email = "0.9.2"
sha2 = "0.8.1"
rss = "1.9.0"
htmlescape = "0.3.1"
config = "0.10.1"
hjson = "0.8.2"

7
server/clean.sh vendored Executable file
View file

@ -0,0 +1,7 @@
#!/bin/sh
cargo update
cargo fmt
cargo check
cargo clippy
cargo outdated -R

56
server/config/defaults.hjson vendored Normal file
View file

@ -0,0 +1,56 @@
{
# settings related to the postgresql database
database: {
# username to connect to postgres
user: "lemmy"
# password to connect to postgres
password: "password"
# host where postgres is running
host: "localhost"
# port where postgres can be accessed
port: 5432
# name of the postgres database for lemmy
database: "lemmy"
# maximum number of active sql connections
pool_size: 5
}
# the domain name of your instance (eg "dev.lemmy.ml")
hostname: "my_domain"
# address where lemmy should listen for incoming requests
bind: "0.0.0.0"
# port where lemmy should listen for incoming requests
port: 8536
# json web token for authorization between server and client
jwt_secret: "changeme"
# The dir for the front end
front_end_dir: "../ui/dist"
# whether to enable activitypub federation. this feature is in alpha, do not enable in production, as might
# cause problems like remote instances fetching and permanently storing bad data.
federation_enabled: false
# rate limits for various user actions, by user ip
rate_limit: {
# maximum number of messages created in interval
message: 30
# interval length for message limit
message_per_second: 60
# maximum number of posts created in interval
post: 6
# interval length for post limit
post_per_second: 600
# maximum number of registrations in interval
register: 3
# interval length for registration limit
register_per_second: 3600
}
# # email sending configuration
# email: {
# # hostname of the smtp server
# smtp_server: ""
# # login name for smtp server
# smtp_login: ""
# # password to login to the smtp server
# smtp_password: ""
# # address to send emails from, eg "info@your-instance.com"
# smtp_from_address: ""
# }
}

View file

@ -0,0 +1,2 @@
drop view user_mention_view;
drop table user_mention;

View file

@ -0,0 +1,35 @@
create table user_mention (
id serial primary key,
recipient_id int references user_ on update cascade on delete cascade not null,
comment_id int references comment on update cascade on delete cascade not null,
read boolean default false not null,
published timestamp not null default now(),
unique(recipient_id, comment_id)
);
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;

View file

@ -0,0 +1,2 @@
alter table user_ drop column default_sort_type;
alter table user_ drop column default_listing_type;

View file

@ -0,0 +1,2 @@
alter table user_ add column default_sort_type smallint default 0 not null;
alter table user_ add column default_listing_type smallint default 1 not null;

View file

@ -0,0 +1 @@
drop table password_reset_request;

View file

@ -0,0 +1,6 @@
create table password_reset_request (
id serial primary key,
user_id int references user_ on update cascade on delete cascade not null,
token_encrypted text not null,
published timestamp not null default now()
);

View file

@ -0,0 +1 @@
alter table user_ drop column lang;

View file

@ -0,0 +1 @@
alter table user_ add column lang varchar(20) default 'browser' not null;

View file

@ -0,0 +1,16 @@
-- Drop the columns
drop view site_view;
alter table site drop column enable_downvotes;
alter table site drop column open_registration;
alter table site drop column enable_nsfw;
-- Rebuild the views
create view site_view as
select *,
(select name from user_ u where s.creator_id = u.id) as creator_name,
(select count(*) from user_) as number_of_users,
(select count(*) from post) as number_of_posts,
(select count(*) from comment) as number_of_comments,
(select count(*) from community) as number_of_communities
from site s;

View file

@ -0,0 +1,16 @@
-- Add the column
alter table site add column enable_downvotes boolean default true not null;
alter table site add column open_registration boolean default true not null;
alter table site add column enable_nsfw boolean default true not null;
-- Reload the view
drop view site_view;
create view site_view as
select *,
(select name from user_ u where s.creator_id = u.id) as creator_name,
(select count(*) from user_) as number_of_users,
(select count(*) from post) as number_of_posts,
(select count(*) from comment) as number_of_comments,
(select count(*) from community) as number_of_communities
from site s;

View file

@ -0,0 +1,224 @@
-- the views
drop view user_mention_view;
drop view reply_view;
drop view comment_view;
drop view user_view;
-- user
create view user_view as
select id,
name,
fedi_name,
admin,
banned,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
-- post
-- Recreate the view
drop view post_view;
create view post_view as
with all_post as
(
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
-- community
drop view community_view;
create view community_view as
with all_community as
(
select *,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
-- Reply and comment view
create view comment_view as
with all_comment as
(
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;
-- community tables
drop view community_moderator_view;
drop view community_follower_view;
drop view community_user_ban_view;
drop view site_view;
create view community_moderator_view as
select *,
(select name from user_ u where cm.user_id = u.id) as user_name,
(select name from community c where cm.community_id = c.id) as community_name
from community_moderator cm;
create view community_follower_view as
select *,
(select name from user_ u where cf.user_id = u.id) as user_name,
(select name from community c where cf.community_id = c.id) as community_name
from community_follower cf;
create view community_user_ban_view as
select *,
(select name from user_ u where cm.user_id = u.id) as user_name,
(select name from community c where cm.community_id = c.id) as community_name
from community_user_ban cm;
create view site_view as
select *,
(select name from user_ u where s.creator_id = u.id) as creator_name,
(select count(*) from user_) as number_of_users,
(select count(*) from post) as number_of_posts,
(select count(*) from comment) as number_of_comments,
(select count(*) from community) as number_of_communities
from site s;
alter table user_ rename column avatar to icon;
alter table user_ alter column icon type bytea using icon::bytea;

View file

@ -0,0 +1,234 @@
-- Rename to avatar
alter table user_ rename column icon to avatar;
alter table user_ alter column avatar type text;
-- Rebuild nearly all the views, to include the creator avatars
-- user
drop view user_view;
create view user_view as
select id,
name,
avatar,
fedi_name,
admin,
banned,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
-- post
-- Recreate the view
drop view post_view;
create view post_view as
with all_post as
(
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
-- community
drop view community_view;
create view community_view as
with all_community as
(
select *,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select avatar from user_ u where c.creator_id = u.id) as creator_avatar,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
-- reply and comment view
drop view reply_view;
drop view user_mention_view;
drop view comment_view;
create view comment_view as
with all_comment as
(
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;
-- community views
drop view community_moderator_view;
drop view community_follower_view;
drop view community_user_ban_view;
drop view site_view;
create view community_moderator_view as
select *,
(select name from user_ u where cm.user_id = u.id) as user_name,
(select avatar from user_ u where cm.user_id = u.id),
(select name from community c where cm.community_id = c.id) as community_name
from community_moderator cm;
create view community_follower_view as
select *,
(select name from user_ u where cf.user_id = u.id) as user_name,
(select avatar from user_ u where cf.user_id = u.id),
(select name from community c where cf.community_id = c.id) as community_name
from community_follower cf;
create view community_user_ban_view as
select *,
(select name from user_ u where cm.user_id = u.id) as user_name,
(select avatar from user_ u where cm.user_id = u.id),
(select name from community c where cm.community_id = c.id) as community_name
from community_user_ban cm;
create view site_view as
select *,
(select name from user_ u where s.creator_id = u.id) as creator_name,
(select avatar from user_ u where s.creator_id = u.id) as creator_avatar,
(select count(*) from user_) as number_of_users,
(select count(*) from post) as number_of_posts,
(select count(*) from comment) as number_of_comments,
(select count(*) from community) as number_of_communities
from site s;

View file

@ -0,0 +1,15 @@
-- user
drop view user_view;
create view user_view as
select id,
name,
avatar,
fedi_name,
admin,
banned,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;

View file

@ -0,0 +1,16 @@
-- user
drop view user_view;
create view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;

View file

@ -0,0 +1,20 @@
-- Drop the columns
drop view user_view;
alter table user_ drop column show_avatars;
alter table user_ drop column send_notifications_to_email;
-- Rebuild the view
create view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;

View file

@ -0,0 +1,22 @@
-- Add columns
alter table user_ add column show_avatars boolean default true not null;
alter table user_ add column send_notifications_to_email boolean default false not null;
-- Rebuild the user_view
drop view user_view;
create view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;

View file

@ -0,0 +1,16 @@
drop index idx_post_creator;
drop index idx_post_community;
drop index idx_post_like_post;
drop index idx_post_like_user;
drop index idx_comment_creator;
drop index idx_comment_parent;
drop index idx_comment_post;
drop index idx_comment_like_comment;
drop index idx_comment_like_user;
drop index idx_comment_like_post;
drop index idx_community_creator;
drop index idx_community_category;

View file

@ -0,0 +1,17 @@
-- Go through all the tables joins, optimize every view, CTE, etc.
create index idx_post_creator on post (creator_id);
create index idx_post_community on post (community_id);
create index idx_post_like_post on post_like (post_id);
create index idx_post_like_user on post_like (user_id);
create index idx_comment_creator on comment (creator_id);
create index idx_comment_parent on comment (parent_id);
create index idx_comment_post on comment (post_id);
create index idx_comment_like_comment on comment_like (comment_id);
create index idx_comment_like_user on comment_like (user_id);
create index idx_comment_like_post on comment_like (post_id);
create index idx_community_creator on community (creator_id);
create index idx_community_category on community (category_id);

View file

@ -0,0 +1,223 @@
-- functions and triggers
drop trigger refresh_user on user_;
drop function refresh_user();
drop trigger refresh_post on post;
drop function refresh_post();
drop trigger refresh_post_like on post_like;
drop function refresh_post_like();
drop trigger refresh_community on community;
drop function refresh_community();
drop trigger refresh_community_follower on community_follower;
drop function refresh_community_follower();
drop trigger refresh_community_user_ban on community_user_ban;
drop function refresh_community_user_ban();
drop trigger refresh_comment on comment;
drop function refresh_comment();
drop trigger refresh_comment_like on comment_like;
drop function refresh_comment_like();
-- post
-- Recreate the view
drop view post_view;
create view post_view as
with all_post as
(
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
drop view post_mview;
drop materialized view post_aggregates_mview;
drop view post_aggregates_view;
-- user
drop materialized view user_mview;
drop view user_view;
create view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
-- community
drop view community_mview;
drop materialized view community_aggregates_mview;
drop view community_view;
drop view community_aggregates_view;
create view community_view as
with all_community as
(
select *,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select avatar from user_ u where c.creator_id = u.id) as creator_avatar,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
-- reply and comment view
drop view reply_view;
drop view user_mention_view;
drop view comment_view;
drop view comment_mview;
drop materialized view comment_aggregates_mview;
drop view comment_aggregates_view;
create view comment_view as
with all_comment as
(
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;

View file

@ -0,0 +1,437 @@
-- post
create view post_aggregates_view as
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id;
create materialized view post_aggregates_mview as select * from post_aggregates_view;
create unique index idx_post_aggregates_mview_id on post_aggregates_mview (id);
drop view post_view;
create view post_view as
with all_post as (
select
pa.*
from post_aggregates_view pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
create view post_mview as
with all_post as (
select
pa.*
from post_aggregates_mview pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
-- user_view
drop view user_view;
create view user_view as
select
u.id,
u.name,
u.avatar,
u.email,
u.fedi_name,
u.admin,
u.banned,
u.show_avatars,
u.send_notifications_to_email,
u.published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
create materialized view user_mview as select * from user_view;
create unique index idx_user_mview_id on user_mview (id);
-- community
create view community_aggregates_view as
select c.*,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select avatar from user_ u where c.creator_id = u.id) as creator_avatar,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c;
create materialized view community_aggregates_mview as select * from community_aggregates_view;
create unique index idx_community_aggregates_mview_id on community_aggregates_mview (id);
drop view community_view;
create view community_view as
with all_community as
(
select
ca.*
from community_aggregates_view ca
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
create view community_mview as
with all_community as
(
select
ca.*
from community_aggregates_mview ca
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
-- reply and comment view
create view comment_aggregates_view as
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id;
create materialized view comment_aggregates_mview as select * from comment_aggregates_view;
create unique index idx_comment_aggregates_mview_id on comment_aggregates_mview (id);
drop view reply_view;
drop view user_mention_view;
drop view comment_view;
create view comment_view as
with all_comment as
(
select
ca.*
from comment_aggregates_view ca
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view comment_mview as
with all_comment as
(
select
ca.*
from comment_aggregates_mview ca
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;
-- user
create or replace function refresh_user()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently user_mview;
refresh materialized view concurrently comment_aggregates_mview; -- cause of bans
refresh materialized view concurrently post_aggregates_mview;
return null;
end $$;
create trigger refresh_user
after insert or update or delete or truncate
on user_
for each statement
execute procedure refresh_user();
-- post
create or replace function refresh_post()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_aggregates_mview;
refresh materialized view concurrently user_mview;
return null;
end $$;
create trigger refresh_post
after insert or update or delete or truncate
on post
for each statement
execute procedure refresh_post();
-- post_like
create or replace function refresh_post_like()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_aggregates_mview;
refresh materialized view concurrently user_mview;
return null;
end $$;
create trigger refresh_post_like
after insert or update or delete or truncate
on post_like
for each statement
execute procedure refresh_post_like();
-- community
create or replace function refresh_community()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_aggregates_mview;
refresh materialized view concurrently community_aggregates_mview;
refresh materialized view concurrently user_mview;
return null;
end $$;
create trigger refresh_community
after insert or update or delete or truncate
on community
for each statement
execute procedure refresh_community();
-- community_follower
create or replace function refresh_community_follower()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently community_aggregates_mview;
refresh materialized view concurrently post_aggregates_mview;
return null;
end $$;
create trigger refresh_community_follower
after insert or update or delete or truncate
on community_follower
for each statement
execute procedure refresh_community_follower();
-- community_user_ban
create or replace function refresh_community_user_ban()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently comment_aggregates_mview;
refresh materialized view concurrently post_aggregates_mview;
return null;
end $$;
create trigger refresh_community_user_ban
after insert or update or delete or truncate
on community_user_ban
for each statement
execute procedure refresh_community_user_ban();
-- comment
create or replace function refresh_comment()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_aggregates_mview;
refresh materialized view concurrently comment_aggregates_mview;
refresh materialized view concurrently community_aggregates_mview;
refresh materialized view concurrently user_mview;
return null;
end $$;
create trigger refresh_comment
after insert or update or delete or truncate
on comment
for each statement
execute procedure refresh_comment();
-- comment_like
create or replace function refresh_comment_like()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently comment_aggregates_mview;
refresh materialized view concurrently user_mview;
return null;
end $$;
create trigger refresh_comment_like
after insert or update or delete or truncate
on comment_like
for each statement
execute procedure refresh_comment_like();

View file

@ -0,0 +1,34 @@
-- Drop the triggers
drop trigger refresh_private_message on private_message;
drop function refresh_private_message();
-- Drop the view and table
drop view private_message_view cascade;
drop table private_message;
-- Rebuild the old views
drop view user_view cascade;
create view user_view as
select
u.id,
u.name,
u.avatar,
u.email,
u.fedi_name,
u.admin,
u.banned,
u.show_avatars,
u.send_notifications_to_email,
u.published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
create materialized view user_mview as select * from user_view;
create unique index idx_user_mview_id on user_mview (id);
-- Drop the columns
alter table user_ drop column matrix_user_id;

View file

@ -0,0 +1,90 @@
-- Creating private message
create table private_message (
id serial primary key,
creator_id int references user_ on update cascade on delete cascade not null,
recipient_id int references user_ on update cascade on delete cascade not null,
content text not null,
deleted boolean default false not null,
read boolean default false not null,
published timestamp not null default now(),
updated timestamp
);
-- Create the view and materialized view which has the avatar and creator name
create view private_message_view as
select
pm.*,
u.name as creator_name,
u.avatar as creator_avatar,
u2.name as recipient_name,
u2.avatar as recipient_avatar
from private_message pm
inner join user_ u on u.id = pm.creator_id
inner join user_ u2 on u2.id = pm.recipient_id;
create materialized view private_message_mview as select * from private_message_view;
create unique index idx_private_message_mview_id on private_message_mview (id);
-- Create the triggers
create or replace function refresh_private_message()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently private_message_mview;
return null;
end $$;
create trigger refresh_private_message
after insert or update or delete or truncate
on private_message
for each statement
execute procedure refresh_private_message();
-- Update user to include matrix id
alter table user_ add column matrix_user_id text unique;
drop view user_view cascade;
create view user_view as
select
u.id,
u.name,
u.avatar,
u.email,
u.matrix_user_id,
u.fedi_name,
u.admin,
u.banned,
u.show_avatars,
u.send_notifications_to_email,
u.published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
create materialized view user_mview as select * from user_view;
create unique index idx_user_mview_id on user_mview (id);
-- This is what a group pm table would look like
-- Not going to do it now because of the complications
--
-- create table private_message (
-- id serial primary key,
-- creator_id int references user_ on update cascade on delete cascade not null,
-- content text not null,
-- deleted boolean default false not null,
-- published timestamp not null default now(),
-- updated timestamp
-- );
--
-- create table private_message_recipient (
-- id serial primary key,
-- private_message_id int references private_message on update cascade on delete cascade not null,
-- recipient_id int references user_ on update cascade on delete cascade not null,
-- read boolean default false not null,
-- published timestamp not null default now(),
-- unique(private_message_id, recipient_id)
-- )

View file

@ -0,0 +1,25 @@
-- Drop the materialized / built views
drop view reply_view;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;

View file

@ -0,0 +1,27 @@
-- https://github.com/dessalines/lemmy/issues/197
drop view reply_view;
-- Do the reply_view referencing the comment_mview
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_mview cv, closereply
where closereply.id = cv.id
;

View file

@ -0,0 +1 @@
drop view user_mention_mview;

View file

@ -0,0 +1,67 @@
create view user_mention_mview as
with all_comment as
(
select
ca.*
from comment_aggregates_mview ca
)
select
ac.id,
um.id as user_mention_id,
ac.creator_id,
ac.post_id,
ac.parent_id,
ac.content,
ac.removed,
um.read,
ac.published,
ac.updated,
ac.deleted,
ac.community_id,
ac.banned,
ac.banned_from_community,
ac.creator_name,
ac.creator_avatar,
ac.score,
ac.upvotes,
ac.downvotes,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved,
um.recipient_id
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
left join user_mention um on um.comment_id = ac.id
union all
select
ac.id,
um.id as user_mention_id,
ac.creator_id,
ac.post_id,
ac.parent_id,
ac.content,
ac.removed,
um.read,
ac.published,
ac.updated,
ac.deleted,
ac.community_id,
ac.banned,
ac.banned_from_community,
ac.creator_name,
ac.creator_avatar,
ac.score,
ac.upvotes,
ac.downvotes,
null as user_id,
null as my_vote,
null as saved,
um.recipient_id
from all_comment ac
left join user_mention um on um.comment_id = ac.id
;

View file

@ -0,0 +1,2 @@
drop index idx_user_name_lower;
drop index idx_user_email_lower;

View file

@ -0,0 +1,29 @@
-- Add case insensitive username and email uniqueness
-- An example of showing the dupes:
-- select
-- max(id) as id,
-- lower(name) as lname,
-- count(*)
-- from user_
-- group by lower(name)
-- having count(*) > 1;
-- Delete username dupes, keeping the first one
delete
from user_
where id not in (
select min(id)
from user_
group by lower(name), lower(fedi_name)
);
-- The user index
create unique index idx_user_name_lower on user_ (lower(name));
-- Email lower
create unique index idx_user_email_lower on user_ (lower(email));
-- Set empty emails properly to null
update user_ set email = null where email = '';

View file

@ -0,0 +1,211 @@
-- functions and triggers
drop trigger refresh_user on user_;
drop function refresh_user();
drop trigger refresh_post on post;
drop function refresh_post();
drop trigger refresh_post_like on post_like;
drop function refresh_post_like();
drop trigger refresh_community on community;
drop function refresh_community();
drop trigger refresh_community_follower on community_follower;
drop function refresh_community_follower();
drop trigger refresh_comment on comment;
drop function refresh_comment();
drop trigger refresh_comment_like on comment_like;
drop function refresh_comment_like();
-- post
-- Recreate the view
drop materialized view post_view;
create view post_view as
with all_post as
(
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
drop materialized view user_view;
create view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
-- community
drop materialized view community_view;
create view community_view as
with all_community as
(
select *,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select avatar from user_ u where c.creator_id = u.id) as creator_avatar,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
-- reply and comment view
drop view reply_view;
drop view user_mention_view;
drop materialized view comment_view;
create view comment_view as
with all_comment as
(
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;

View file

@ -0,0 +1,324 @@
-- post
drop view post_view;
create materialized view post_view as
with all_post as
(
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
with data
;
create unique index idx_post_view_unique on post_view (id, user_id);
create index idx_post_view_user_id on post_view (user_id);
create index idx_post_view_hot_rank_published on post_view (hot_rank desc, published desc);
create index idx_post_view_published on post_view (published desc);
create index idx_post_view_score on post_view (score desc);
-- user_view
drop view user_view;
create materialized view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
create unique index idx_user_view_unique on user_view (id);
create index idx_user_view_comment_published on user_view (comment_score desc, published desc);
create index idx_user_view_admin on user_view (admin);
create index idx_user_view_banned on user_view (banned);
-- community
drop view community_view;
create materialized view community_view as
with all_community as
(
select *,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select avatar from user_ u where c.creator_id = u.id) as creator_avatar,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
create unique index idx_community_view_unique on community_view (id, user_id);
create index idx_community_view_user_id on community_view (user_id);
create index idx_community_view_hot_rank_subscribed on community_view (hot_rank desc, number_of_subscribers desc);
-- reply and comment view
drop view reply_view;
drop view user_mention_view;
drop view comment_view;
create materialized view comment_view as
with all_comment as
(
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create unique index idx_comment_view_unique on comment_view (id, user_id);
create index idx_comment_view_user_id on comment_view (user_id);
create index idx_comment_view_creator_id on comment_view (creator_id);
create index idx_comment_view_post_id on comment_view (post_id);
create index idx_comment_view_score on comment_view (score desc);
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;
-- user
create or replace function refresh_user()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently comment_view; -- cause of bans
refresh materialized view concurrently post_view;
return null;
end $$;
create trigger refresh_user
after insert or update or delete or truncate
on user_
for each statement
execute procedure refresh_user();
-- post
create or replace function refresh_post()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_view;
return null;
end $$;
create trigger refresh_post
after insert or update or delete or truncate
on post
for each statement
execute procedure refresh_post();
-- post_like
create or replace function refresh_post_like()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_view;
return null;
end $$;
create trigger refresh_post_like
after insert or update or delete or truncate
on post_like
for each statement
execute procedure refresh_post_like();
-- community
create or replace function refresh_community()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_view;
refresh materialized view concurrently community_view;
return null;
end $$;
create trigger refresh_community
after insert or update or delete or truncate
on community
for each statement
execute procedure refresh_community();
-- community_follower
create or replace function refresh_community_follower()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently community_view;
refresh materialized view concurrently post_view;
return null;
end $$;
create trigger refresh_community_follower
after insert or update or delete or truncate
on community_follower
for each statement
execute procedure refresh_community_follower();
-- comment
create or replace function refresh_comment()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_view;
refresh materialized view concurrently comment_view;
return null;
end $$;
create trigger refresh_comment
after insert or update or delete or truncate
on comment
for each statement
execute procedure refresh_comment();
-- comment_like
create or replace function refresh_comment_like()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently comment_view;
return null;
end $$;
create trigger refresh_comment_like
after insert or update or delete or truncate
on comment_like
for each statement
execute procedure refresh_comment_like();

26
server/query_testing/apache_bench_report.sh vendored Executable file
View file

@ -0,0 +1,26 @@
#!/bin/bash
set -e
declare -a arr=(
"https://mastodon.social/"
"https://peertube.social/"
"https://dev.lemmy.ml/"
"https://dev.lemmy.ml/feeds/all.xml"
"https://dev.lemmy.ml/.well-known/nodeinfo"
"https://fediverse.blog/.well-known/nodeinfo"
"https://torrents-csv.ml/service/search?q=wheel&page=1&type_=torrent"
)
## now loop through the above array
for i in "${arr[@]}"
do
ab -c 10 -t 10 "$i" > out.abtest
grep "Server Hostname:" out.abtest
grep "Document Path:" out.abtest
grep "Requests per second" out.abtest
grep "(mean, across all concurrent requests)" out.abtest
grep "Transfer rate:" out.abtest
echo "---"
done
rm *.abtest

34
server/query_testing/api_benchmark.sh vendored Executable file
View file

@ -0,0 +1,34 @@
#!/bin/bash
set -e
# By default, this script runs against `http://127.0.0.1:8536`, but you can pass a different Lemmy instance,
# eg `./api_benchmark.sh "https://example.com"`.
DOMAIN=${1:-"http://127.0.0.1:8536"}
declare -a arr=(
"/api/v1/site"
"/api/v1/categories"
"/api/v1/modlog"
"/api/v1/search?q=test&type_=Posts&sort=Hot"
"/api/v1/community"
"/api/v1/community/list?sort=Hot"
"/api/v1/post/list?sort=Hot&type_=All"
)
## now loop through the above array
for path in "${arr[@]}"
do
URL="$DOMAIN$path"
printf "\n\n\n"
echo "testing $URL"
curl --show-error --fail --silent "$URL" >/dev/null
ab -c 64 -t 10 "$URL" > out.abtest
grep "Server Hostname:" out.abtest
grep "Document Path:" out.abtest
grep "Requests per second" out.abtest
grep "(mean, across all concurrent requests)" out.abtest
grep "Transfer rate:" out.abtest
echo "---"
done
rm *.abtest

View file

@ -0,0 +1,32 @@
#!/bin/bash
set -e
# Do the views first
echo "explain (analyze, format json) select * from user_mview" > explain.sql
psql -qAt -U lemmy -f explain.sql > user_view.json
echo "explain (analyze, format json) select * from post_mview where user_id is null order by hot_rank desc, published desc" > explain.sql
psql -qAt -U lemmy -f explain.sql > post_view.json
echo "explain (analyze, format json) select * from comment_mview where user_id is null" > explain.sql
psql -qAt -U lemmy -f explain.sql > comment_view.json
echo "explain (analyze, format json) select * from community_mview where user_id is null order by hot_rank desc" > explain.sql
psql -qAt -U lemmy -f explain.sql > community_view.json
echo "explain (analyze, format json) select * from site_view limit 1" > explain.sql
psql -qAt -U lemmy -f explain.sql > site_view.json
echo "explain (analyze, format json) select * from reply_view where user_id = 34 and recipient_id = 34" > explain.sql
psql -qAt -U lemmy -f explain.sql > reply_view.json
echo "explain (analyze, format json) select * from user_mention_view where user_id = 34 and recipient_id = 34" > explain.sql
psql -qAt -U lemmy -f explain.sql > user_mention_view.json
echo "explain (analyze, format json) select * from user_mention_mview where user_id = 34 and recipient_id = 34" > explain.sql
psql -qAt -U lemmy -f explain.sql > user_mention_mview.json
grep "Execution Time" *.json
rm explain.sql

View file

@ -1,10 +1,13 @@
use super::*; use super::*;
use crate::send_email;
use crate::settings::Settings;
use diesel::PgConnection;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct CreateComment { pub struct CreateComment {
content: String, content: String,
parent_id: Option<i32>, parent_id: Option<i32>,
edit_id: Option<i32>, edit_id: Option<i32>, // TODO this isn't used
pub post_id: i32, pub post_id: i32,
auth: String, auth: String,
} }
@ -12,7 +15,7 @@ pub struct CreateComment {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct EditComment { pub struct EditComment {
content: String, content: String,
parent_id: Option<i32>, parent_id: Option<i32>, // TODO why are the parent_id, creator_id, post_id, etc fields required? They aren't going to change
edit_id: i32, edit_id: i32,
creator_id: i32, creator_id: i32,
pub post_id: i32, pub post_id: i32,
@ -32,8 +35,8 @@ pub struct SaveComment {
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct CommentResponse { pub struct CommentResponse {
op: String,
pub comment: CommentView, pub comment: CommentView,
pub recipient_ids: Vec<i32>,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -45,26 +48,27 @@ pub struct CreateCommentLike {
} }
impl Perform<CommentResponse> for Oper<CreateComment> { impl Perform<CommentResponse> for Oper<CreateComment> {
fn perform(&self) -> Result<CommentResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommentResponse, Error> {
let data: &CreateComment = &self.data; let data: &CreateComment = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
let hostname = &format!("https://{}", Settings::get().hostname);
// Check for a community ban // Check for a community ban
let post = Post::read(&conn, data.post_id)?; let post = Post::read(&conn, data.post_id)?;
if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
let content_slurs_removed = remove_slurs(&data.content.to_owned()); let content_slurs_removed = remove_slurs(&data.content.to_owned());
@ -82,39 +86,140 @@ impl Perform<CommentResponse> for Oper<CreateComment> {
let inserted_comment = match Comment::create(&conn, &comment_form) { let inserted_comment = match Comment::create(&conn, &comment_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_create_comment"))?, Err(_e) => return Err(APIError::err("couldnt_create_comment").into()),
};
let mut recipient_ids = Vec::new();
// Scan the comment for user mentions, add those rows
let extracted_usernames = extract_usernames(&comment_form.content);
for username_mention in &extracted_usernames {
if let Ok(mention_user) = User_::read_from_name(&conn, (*username_mention).to_string()) {
// You can't mention yourself
// At some point, make it so you can't tag the parent creator either
// This can cause two notifications, one for reply and the other for mention
if mention_user.id != user_id {
recipient_ids.push(mention_user.id);
let user_mention_form = UserMentionForm {
recipient_id: mention_user.id,
comment_id: inserted_comment.id,
read: None,
};
// Allow this to fail softly, since comment edits might re-update or replace it
// Let the uniqueness handle this fail
match UserMention::create(&conn, &user_mention_form) {
Ok(_mention) => (),
Err(_e) => eprintln!("{}", &_e),
};
// Send an email to those users that have notifications on
if mention_user.send_notifications_to_email {
if let Some(mention_email) = mention_user.email {
let subject = &format!(
"{} - Mentioned by {}",
Settings::get().hostname,
claims.username
);
let html = &format!(
"<h1>User Mention</h1><br><div>{} - {}</div><br><a href={}/inbox>inbox</a>",
claims.username, comment_form.content, hostname
);
match send_email(subject, &mention_email, &mention_user.name, html) {
Ok(_o) => _o,
Err(e) => eprintln!("{}", e),
};
}
}
}
}
}
// Send notifs to the parent commenter / poster
match data.parent_id {
Some(parent_id) => {
let parent_comment = Comment::read(&conn, parent_id)?;
if parent_comment.creator_id != user_id {
let parent_user = User_::read(&conn, parent_comment.creator_id)?;
recipient_ids.push(parent_user.id);
if parent_user.send_notifications_to_email {
if let Some(comment_reply_email) = parent_user.email {
let subject = &format!(
"{} - Reply from {}",
Settings::get().hostname,
claims.username
);
let html = &format!(
"<h1>Comment Reply</h1><br><div>{} - {}</div><br><a href={}/inbox>inbox</a>",
claims.username, comment_form.content, hostname
);
match send_email(subject, &comment_reply_email, &parent_user.name, html) {
Ok(_o) => _o,
Err(e) => eprintln!("{}", e),
};
}
}
}
}
// Its a post
None => {
if post.creator_id != user_id {
let parent_user = User_::read(&conn, post.creator_id)?;
recipient_ids.push(parent_user.id);
if parent_user.send_notifications_to_email {
if let Some(post_reply_email) = parent_user.email {
let subject = &format!(
"{} - Reply from {}",
Settings::get().hostname,
claims.username
);
let html = &format!(
"<h1>Post Reply</h1><br><div>{} - {}</div><br><a href={}/inbox>inbox</a>",
claims.username, comment_form.content, hostname
);
match send_email(subject, &post_reply_email, &parent_user.name, html) {
Ok(_o) => _o,
Err(e) => eprintln!("{}", e),
};
}
}
}
}
}; };
// You like your own comment by default // You like your own comment by default
let like_form = CommentLikeForm { let like_form = CommentLikeForm {
comment_id: inserted_comment.id, comment_id: inserted_comment.id,
post_id: data.post_id, post_id: data.post_id,
user_id: user_id, user_id,
score: 1, score: 1,
}; };
let _inserted_like = match CommentLike::like(&conn, &like_form) { let _inserted_like = match CommentLike::like(&conn, &like_form) {
Ok(like) => like, Ok(like) => like,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_like_comment"))?, Err(_e) => return Err(APIError::err("couldnt_like_comment").into()),
}; };
let comment_view = CommentView::read(&conn, inserted_comment.id, Some(user_id))?; let comment_view = CommentView::read(&conn, inserted_comment.id, Some(user_id))?;
Ok(CommentResponse { Ok(CommentResponse {
op: self.op.to_string(),
comment: comment_view, comment: comment_view,
recipient_ids,
}) })
} }
} }
impl Perform<CommentResponse> for Oper<EditComment> { impl Perform<CommentResponse> for Oper<EditComment> {
fn perform(&self) -> Result<CommentResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommentResponse, Error> {
let data: &EditComment = &self.data; let data: &EditComment = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -134,17 +239,17 @@ impl Perform<CommentResponse> for Oper<EditComment> {
editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect()); editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect());
if !editors.contains(&user_id) { if !editors.contains(&user_id) {
return Err(APIError::err(&self.op, "no_comment_edit_allowed"))?; return Err(APIError::err("no_comment_edit_allowed").into());
} }
// Check for a community ban // Check for a community ban
if CommunityUserBanView::get(&conn, user_id, orig_comment.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, orig_comment.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
} }
@ -167,9 +272,57 @@ impl Perform<CommentResponse> for Oper<EditComment> {
let _updated_comment = match Comment::update(&conn, data.edit_id, &comment_form) { let _updated_comment = match Comment::update(&conn, data.edit_id, &comment_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_comment"))?, Err(_e) => return Err(APIError::err("couldnt_update_comment").into()),
}; };
let mut recipient_ids = Vec::new();
// Scan the comment for user mentions, add those rows
let extracted_usernames = extract_usernames(&comment_form.content);
for username_mention in &extracted_usernames {
let mention_user = User_::read_from_name(&conn, (*username_mention).to_string());
if mention_user.is_ok() {
let mention_user_id = mention_user?.id;
// You can't mention yourself
// At some point, make it so you can't tag the parent creator either
// This can cause two notifications, one for reply and the other for mention
if mention_user_id != user_id {
recipient_ids.push(mention_user_id);
let user_mention_form = UserMentionForm {
recipient_id: mention_user_id,
comment_id: data.edit_id,
read: None,
};
// Allow this to fail softly, since comment edits might re-update or replace it
// Let the uniqueness handle this fail
match UserMention::create(&conn, &user_mention_form) {
Ok(_mention) => (),
Err(_e) => eprintln!("{}", &_e),
}
}
}
}
// Add to recipient ids
match data.parent_id {
Some(parent_id) => {
let parent_comment = Comment::read(&conn, parent_id)?;
if parent_comment.creator_id != user_id {
let parent_user = User_::read(&conn, parent_comment.creator_id)?;
recipient_ids.push(parent_user.id);
}
}
None => {
let post = Post::read(&conn, data.post_id)?;
recipient_ids.push(post.creator_id);
}
}
// Mod tables // Mod tables
if let Some(removed) = data.removed.to_owned() { if let Some(removed) = data.removed.to_owned() {
let form = ModRemoveCommentForm { let form = ModRemoveCommentForm {
@ -184,77 +337,101 @@ impl Perform<CommentResponse> for Oper<EditComment> {
let comment_view = CommentView::read(&conn, data.edit_id, Some(user_id))?; let comment_view = CommentView::read(&conn, data.edit_id, Some(user_id))?;
Ok(CommentResponse { Ok(CommentResponse {
op: self.op.to_string(),
comment: comment_view, comment: comment_view,
recipient_ids,
}) })
} }
} }
impl Perform<CommentResponse> for Oper<SaveComment> { impl Perform<CommentResponse> for Oper<SaveComment> {
fn perform(&self) -> Result<CommentResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommentResponse, Error> {
let data: &SaveComment = &self.data; let data: &SaveComment = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
let comment_saved_form = CommentSavedForm { let comment_saved_form = CommentSavedForm {
comment_id: data.comment_id, comment_id: data.comment_id,
user_id: user_id, user_id,
}; };
if data.save { if data.save {
match CommentSaved::save(&conn, &comment_saved_form) { match CommentSaved::save(&conn, &comment_saved_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_save_comment"))?, Err(_e) => return Err(APIError::err("couldnt_save_comment").into()),
}; };
} else { } else {
match CommentSaved::unsave(&conn, &comment_saved_form) { match CommentSaved::unsave(&conn, &comment_saved_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_save_comment"))?, Err(_e) => return Err(APIError::err("couldnt_save_comment").into()),
}; };
} }
let comment_view = CommentView::read(&conn, data.comment_id, Some(user_id))?; let comment_view = CommentView::read(&conn, data.comment_id, Some(user_id))?;
Ok(CommentResponse { Ok(CommentResponse {
op: self.op.to_string(),
comment: comment_view, comment: comment_view,
recipient_ids: Vec::new(),
}) })
} }
} }
impl Perform<CommentResponse> for Oper<CreateCommentLike> { impl Perform<CommentResponse> for Oper<CreateCommentLike> {
fn perform(&self) -> Result<CommentResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommentResponse, Error> {
let data: &CreateCommentLike = &self.data; let data: &CreateCommentLike = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
let mut recipient_ids = Vec::new();
// Don't do a downvote if site has downvotes disabled
if data.score == -1 {
let site = SiteView::read(&conn)?;
if !site.enable_downvotes {
return Err(APIError::err("downvotes_disabled").into());
}
}
// Check for a community ban // Check for a community ban
let post = Post::read(&conn, data.post_id)?; let post = Post::read(&conn, data.post_id)?;
if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
}
let comment = Comment::read(&conn, data.comment_id)?;
// Add to recipient ids
match comment.parent_id {
Some(parent_id) => {
let parent_comment = Comment::read(&conn, parent_id)?;
if parent_comment.creator_id != user_id {
let parent_user = User_::read(&conn, parent_comment.creator_id)?;
recipient_ids.push(parent_user.id);
}
}
None => {
recipient_ids.push(post.creator_id);
}
} }
let like_form = CommentLikeForm { let like_form = CommentLikeForm {
comment_id: data.comment_id, comment_id: data.comment_id,
post_id: data.post_id, post_id: data.post_id,
user_id: user_id, user_id,
score: data.score, score: data.score,
}; };
@ -262,11 +439,11 @@ impl Perform<CommentResponse> for Oper<CreateCommentLike> {
CommentLike::remove(&conn, &like_form)?; CommentLike::remove(&conn, &like_form)?;
// Only add the like if the score isnt 0 // Only add the like if the score isnt 0
let do_add = &like_form.score != &0 && (&like_form.score == &1 || &like_form.score == &-1); let do_add = like_form.score != 0 && (like_form.score == 1 || like_form.score == -1);
if do_add { if do_add {
let _inserted_like = match CommentLike::like(&conn, &like_form) { let _inserted_like = match CommentLike::like(&conn, &like_form) {
Ok(like) => like, Ok(like) => like,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_like_comment"))?, Err(_e) => return Err(APIError::err("couldnt_like_comment").into()),
}; };
} }
@ -274,8 +451,8 @@ impl Perform<CommentResponse> for Oper<CreateCommentLike> {
let liked_comment = CommentView::read(&conn, data.comment_id, Some(user_id))?; let liked_comment = CommentView::read(&conn, data.comment_id, Some(user_id))?;
Ok(CommentResponse { Ok(CommentResponse {
op: self.op.to_string(),
comment: liked_comment, comment: liked_comment,
recipient_ids,
}) })
} }
} }

View file

@ -1,4 +1,5 @@
use super::*; use super::*;
use diesel::PgConnection;
use std::str::FromStr; use std::str::FromStr;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -10,10 +11,10 @@ pub struct GetCommunity {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetCommunityResponse { pub struct GetCommunityResponse {
op: String, pub community: CommunityView,
community: CommunityView,
moderators: Vec<CommunityModeratorView>, moderators: Vec<CommunityModeratorView>,
admins: Vec<UserView>, admins: Vec<UserView>,
pub online: usize,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -28,7 +29,6 @@ pub struct CreateCommunity {
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct CommunityResponse { pub struct CommunityResponse {
op: String,
pub community: CommunityView, pub community: CommunityView,
} }
@ -42,7 +42,6 @@ pub struct ListCommunities {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct ListCommunitiesResponse { pub struct ListCommunitiesResponse {
op: String,
communities: Vec<CommunityView>, communities: Vec<CommunityView>,
} }
@ -58,7 +57,6 @@ pub struct BanFromCommunity {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct BanFromCommunityResponse { pub struct BanFromCommunityResponse {
op: String,
user: UserView, user: UserView,
banned: bool, banned: bool,
} }
@ -73,7 +71,6 @@ pub struct AddModToCommunity {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct AddModToCommunityResponse { pub struct AddModToCommunityResponse {
op: String,
moderators: Vec<CommunityModeratorView>, moderators: Vec<CommunityModeratorView>,
} }
@ -106,7 +103,6 @@ pub struct GetFollowedCommunities {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetFollowedCommunitiesResponse { pub struct GetFollowedCommunitiesResponse {
op: String,
communities: Vec<CommunityFollowerView>, communities: Vec<CommunityFollowerView>,
} }
@ -118,9 +114,8 @@ pub struct TransferCommunity {
} }
impl Perform<GetCommunityResponse> for Oper<GetCommunity> { impl Perform<GetCommunityResponse> for Oper<GetCommunity> {
fn perform(&self) -> Result<GetCommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetCommunityResponse, Error> {
let data: &GetCommunity = &self.data; let data: &GetCommunity = &self.data;
let conn = establish_connection();
let user_id: Option<i32> = match &data.auth { let user_id: Option<i32> = match &data.auth {
Some(auth) => match Claims::decode(&auth) { Some(auth) => match Claims::decode(&auth) {
@ -136,18 +131,24 @@ impl Perform<GetCommunityResponse> for Oper<GetCommunity> {
let community_id = match data.id { let community_id = match data.id {
Some(id) => id, Some(id) => id,
None => { None => {
Community::read_from_name(&conn, data.name.to_owned().unwrap_or("main".to_string()))?.id match Community::read_from_name(
&conn,
data.name.to_owned().unwrap_or_else(|| "main".to_string()),
) {
Ok(community) => community.id,
Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
}
} }
}; };
let community_view = match CommunityView::read(&conn, community_id, user_id) { let community_view = match CommunityView::read(&conn, community_id, user_id) {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_community"))?, Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
}; };
let moderators = match CommunityModeratorView::for_community(&conn, community_id) { let moderators = match CommunityModeratorView::for_community(&conn, community_id) {
Ok(moderators) => moderators, Ok(moderators) => moderators,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_community"))?, Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
}; };
let site_creator_id = Site::read(&conn, 1)?.creator_id; let site_creator_id = Site::read(&conn, 1)?.creator_id;
@ -158,36 +159,42 @@ impl Perform<GetCommunityResponse> for Oper<GetCommunity> {
// Return the jwt // Return the jwt
Ok(GetCommunityResponse { Ok(GetCommunityResponse {
op: self.op.to_string(),
community: community_view, community: community_view,
moderators: moderators, moderators,
admins: admins, admins,
online: 0,
}) })
} }
} }
impl Perform<CommunityResponse> for Oper<CreateCommunity> { impl Perform<CommunityResponse> for Oper<CreateCommunity> {
fn perform(&self) -> Result<CommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommunityResponse, Error> {
let data: &CreateCommunity = &self.data; let data: &CreateCommunity = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
if has_slurs(&data.name) if let Err(slurs) = slur_check(&data.name) {
|| has_slurs(&data.title) return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
|| (data.description.is_some() && has_slurs(&data.description.to_owned().unwrap())) }
{
return Err(APIError::err(&self.op, "no_slurs"))?; if let Err(slurs) = slur_check(&data.title) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
if let Some(description) = &data.description {
if let Err(slurs) = slur_check(description) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
} }
let user_id = claims.id; let user_id = claims.id;
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
// When you create a community, make sure the user becomes a moderator and a follower // When you create a community, make sure the user becomes a moderator and a follower
@ -205,65 +212,67 @@ impl Perform<CommunityResponse> for Oper<CreateCommunity> {
let inserted_community = match Community::create(&conn, &community_form) { let inserted_community = match Community::create(&conn, &community_form) {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err(&self.op, "community_already_exists"))?, Err(_e) => return Err(APIError::err("community_already_exists").into()),
}; };
let community_moderator_form = CommunityModeratorForm { let community_moderator_form = CommunityModeratorForm {
community_id: inserted_community.id, community_id: inserted_community.id,
user_id: user_id, user_id,
}; };
let _inserted_community_moderator = let _inserted_community_moderator =
match CommunityModerator::join(&conn, &community_moderator_form) { match CommunityModerator::join(&conn, &community_moderator_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("community_moderator_already_exists").into()),
return Err(APIError::err(
&self.op,
"community_moderator_already_exists",
))?
}
}; };
let community_follower_form = CommunityFollowerForm { let community_follower_form = CommunityFollowerForm {
community_id: inserted_community.id, community_id: inserted_community.id,
user_id: user_id, user_id,
}; };
let _inserted_community_follower = let _inserted_community_follower =
match CommunityFollower::follow(&conn, &community_follower_form) { match CommunityFollower::follow(&conn, &community_follower_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_follower_already_exists"))?, Err(_e) => return Err(APIError::err("community_follower_already_exists").into()),
}; };
let community_view = CommunityView::read(&conn, inserted_community.id, Some(user_id))?; let community_view = CommunityView::read(&conn, inserted_community.id, Some(user_id))?;
Ok(CommunityResponse { Ok(CommunityResponse {
op: self.op.to_string(),
community: community_view, community: community_view,
}) })
} }
} }
impl Perform<CommunityResponse> for Oper<EditCommunity> { impl Perform<CommunityResponse> for Oper<EditCommunity> {
fn perform(&self) -> Result<CommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommunityResponse, Error> {
let data: &EditCommunity = &self.data; let data: &EditCommunity = &self.data;
if has_slurs(&data.name) || has_slurs(&data.title) { if let Err(slurs) = slur_check(&data.name) {
return Err(APIError::err(&self.op, "no_slurs"))?; return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
} }
let conn = establish_connection(); if let Err(slurs) = slur_check(&data.title) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
if let Some(description) = &data.description {
if let Err(slurs) = slur_check(description) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
}
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
// Verify its a mod // Verify its a mod
@ -276,7 +285,7 @@ impl Perform<CommunityResponse> for Oper<EditCommunity> {
); );
editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect()); editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect());
if !editors.contains(&user_id) { if !editors.contains(&user_id) {
return Err(APIError::err(&self.op, "no_community_edit_allowed"))?; return Err(APIError::err("no_community_edit_allowed").into());
} }
let community_form = CommunityForm { let community_form = CommunityForm {
@ -293,7 +302,7 @@ impl Perform<CommunityResponse> for Oper<EditCommunity> {
let _updated_community = match Community::update(&conn, data.edit_id, &community_form) { let _updated_community = match Community::update(&conn, data.edit_id, &community_form) {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_community"))?, Err(_e) => return Err(APIError::err("couldnt_update_community").into()),
}; };
// Mod tables // Mod tables
@ -307,7 +316,7 @@ impl Perform<CommunityResponse> for Oper<EditCommunity> {
community_id: data.edit_id, community_id: data.edit_id,
removed: Some(removed), removed: Some(removed),
reason: data.reason.to_owned(), reason: data.reason.to_owned(),
expires: expires, expires,
}; };
ModRemoveCommunity::create(&conn, &form)?; ModRemoveCommunity::create(&conn, &form)?;
} }
@ -315,16 +324,14 @@ impl Perform<CommunityResponse> for Oper<EditCommunity> {
let community_view = CommunityView::read(&conn, data.edit_id, Some(user_id))?; let community_view = CommunityView::read(&conn, data.edit_id, Some(user_id))?;
Ok(CommunityResponse { Ok(CommunityResponse {
op: self.op.to_string(),
community: community_view, community: community_view,
}) })
} }
} }
impl Perform<ListCommunitiesResponse> for Oper<ListCommunities> { impl Perform<ListCommunitiesResponse> for Oper<ListCommunities> {
fn perform(&self) -> Result<ListCommunitiesResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<ListCommunitiesResponse, Error> {
let data: &ListCommunities = &self.data; let data: &ListCommunities = &self.data;
let conn = establish_connection();
let user_claims: Option<Claims> = match &data.auth { let user_claims: Option<Claims> = match &data.auth {
Some(auth) => match Claims::decode(&auth) { Some(auth) => match Claims::decode(&auth) {
@ -346,64 +353,62 @@ impl Perform<ListCommunitiesResponse> for Oper<ListCommunities> {
let sort = SortType::from_str(&data.sort)?; let sort = SortType::from_str(&data.sort)?;
let communities: Vec<CommunityView> = CommunityView::list( let communities = CommunityQueryBuilder::create(&conn)
&conn, &sort, user_id, show_nsfw, None, data.page, data.limit, .sort(&sort)
)?; .for_user(user_id)
.show_nsfw(show_nsfw)
.page(data.page)
.limit(data.limit)
.list()?;
// Return the jwt // Return the jwt
Ok(ListCommunitiesResponse { Ok(ListCommunitiesResponse { communities })
op: self.op.to_string(),
communities: communities,
})
} }
} }
impl Perform<CommunityResponse> for Oper<FollowCommunity> { impl Perform<CommunityResponse> for Oper<FollowCommunity> {
fn perform(&self) -> Result<CommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommunityResponse, Error> {
let data: &FollowCommunity = &self.data; let data: &FollowCommunity = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
let community_follower_form = CommunityFollowerForm { let community_follower_form = CommunityFollowerForm {
community_id: data.community_id, community_id: data.community_id,
user_id: user_id, user_id,
}; };
if data.follow { if data.follow {
match CommunityFollower::follow(&conn, &community_follower_form) { match CommunityFollower::follow(&conn, &community_follower_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_follower_already_exists"))?, Err(_e) => return Err(APIError::err("community_follower_already_exists").into()),
}; };
} else { } else {
match CommunityFollower::ignore(&conn, &community_follower_form) { match CommunityFollower::ignore(&conn, &community_follower_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_follower_already_exists"))?, Err(_e) => return Err(APIError::err("community_follower_already_exists").into()),
}; };
} }
let community_view = CommunityView::read(&conn, data.community_id, Some(user_id))?; let community_view = CommunityView::read(&conn, data.community_id, Some(user_id))?;
Ok(CommunityResponse { Ok(CommunityResponse {
op: self.op.to_string(),
community: community_view, community: community_view,
}) })
} }
} }
impl Perform<GetFollowedCommunitiesResponse> for Oper<GetFollowedCommunities> { impl Perform<GetFollowedCommunitiesResponse> for Oper<GetFollowedCommunities> {
fn perform(&self) -> Result<GetFollowedCommunitiesResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetFollowedCommunitiesResponse, Error> {
let data: &GetFollowedCommunities = &self.data; let data: &GetFollowedCommunities = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -411,25 +416,21 @@ impl Perform<GetFollowedCommunitiesResponse> for Oper<GetFollowedCommunities> {
let communities: Vec<CommunityFollowerView> = let communities: Vec<CommunityFollowerView> =
match CommunityFollowerView::for_user(&conn, user_id) { match CommunityFollowerView::for_user(&conn, user_id) {
Ok(communities) => communities, Ok(communities) => communities,
Err(_e) => return Err(APIError::err(&self.op, "system_err_login"))?, Err(_e) => return Err(APIError::err("system_err_login").into()),
}; };
// Return the jwt // Return the jwt
Ok(GetFollowedCommunitiesResponse { Ok(GetFollowedCommunitiesResponse { communities })
op: self.op.to_string(),
communities: communities,
})
} }
} }
impl Perform<BanFromCommunityResponse> for Oper<BanFromCommunity> { impl Perform<BanFromCommunityResponse> for Oper<BanFromCommunity> {
fn perform(&self) -> Result<BanFromCommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<BanFromCommunityResponse, Error> {
let data: &BanFromCommunity = &self.data; let data: &BanFromCommunity = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -442,12 +443,12 @@ impl Perform<BanFromCommunityResponse> for Oper<BanFromCommunity> {
if data.ban { if data.ban {
match CommunityUserBan::ban(&conn, &community_user_ban_form) { match CommunityUserBan::ban(&conn, &community_user_ban_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_user_already_banned"))?, Err(_e) => return Err(APIError::err("community_user_already_banned").into()),
}; };
} else { } else {
match CommunityUserBan::unban(&conn, &community_user_ban_form) { match CommunityUserBan::unban(&conn, &community_user_ban_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_user_already_banned"))?, Err(_e) => return Err(APIError::err("community_user_already_banned").into()),
}; };
} }
@ -463,14 +464,13 @@ impl Perform<BanFromCommunityResponse> for Oper<BanFromCommunity> {
community_id: data.community_id, community_id: data.community_id,
reason: data.reason.to_owned(), reason: data.reason.to_owned(),
banned: Some(data.ban), banned: Some(data.ban),
expires: expires, expires,
}; };
ModBanFromCommunity::create(&conn, &form)?; ModBanFromCommunity::create(&conn, &form)?;
let user_view = UserView::read(&conn, data.user_id)?; let user_view = UserView::read(&conn, data.user_id)?;
Ok(BanFromCommunityResponse { Ok(BanFromCommunityResponse {
op: self.op.to_string(),
user: user_view, user: user_view,
banned: data.ban, banned: data.ban,
}) })
@ -478,13 +478,12 @@ impl Perform<BanFromCommunityResponse> for Oper<BanFromCommunity> {
} }
impl Perform<AddModToCommunityResponse> for Oper<AddModToCommunity> { impl Perform<AddModToCommunityResponse> for Oper<AddModToCommunity> {
fn perform(&self) -> Result<AddModToCommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<AddModToCommunityResponse, Error> {
let data: &AddModToCommunity = &self.data; let data: &AddModToCommunity = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -497,22 +496,12 @@ impl Perform<AddModToCommunityResponse> for Oper<AddModToCommunity> {
if data.added { if data.added {
match CommunityModerator::join(&conn, &community_moderator_form) { match CommunityModerator::join(&conn, &community_moderator_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("community_moderator_already_exists").into()),
return Err(APIError::err(
&self.op,
"community_moderator_already_exists",
))?
}
}; };
} else { } else {
match CommunityModerator::leave(&conn, &community_moderator_form) { match CommunityModerator::leave(&conn, &community_moderator_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("community_moderator_already_exists").into()),
return Err(APIError::err(
&self.op,
"community_moderator_already_exists",
))?
}
}; };
} }
@ -527,21 +516,17 @@ impl Perform<AddModToCommunityResponse> for Oper<AddModToCommunity> {
let moderators = CommunityModeratorView::for_community(&conn, data.community_id)?; let moderators = CommunityModeratorView::for_community(&conn, data.community_id)?;
Ok(AddModToCommunityResponse { Ok(AddModToCommunityResponse { moderators })
op: self.op.to_string(),
moderators: moderators,
})
} }
} }
impl Perform<GetCommunityResponse> for Oper<TransferCommunity> { impl Perform<GetCommunityResponse> for Oper<TransferCommunity> {
fn perform(&self) -> Result<GetCommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetCommunityResponse, Error> {
let data: &TransferCommunity = &self.data; let data: &TransferCommunity = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -555,14 +540,8 @@ impl Perform<GetCommunityResponse> for Oper<TransferCommunity> {
admins.insert(0, creator_user); admins.insert(0, creator_user);
// Make sure user is the creator, or an admin // Make sure user is the creator, or an admin
if user_id != read_community.creator_id if user_id != read_community.creator_id && !admins.iter().map(|a| a.id).any(|x| x == user_id) {
&& !admins return Err(APIError::err("not_an_admin").into());
.iter()
.map(|a| a.id)
.collect::<Vec<i32>>()
.contains(&user_id)
{
return Err(APIError::err(&self.op, "not_an_admin"))?;
} }
let community_form = CommunityForm { let community_form = CommunityForm {
@ -579,7 +558,7 @@ impl Perform<GetCommunityResponse> for Oper<TransferCommunity> {
let _updated_community = match Community::update(&conn, data.community_id, &community_form) { let _updated_community = match Community::update(&conn, data.community_id, &community_form) {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_community"))?, Err(_e) => return Err(APIError::err("couldnt_update_community").into()),
}; };
// You also have to re-do the community_moderator table, reordering it. // You also have to re-do the community_moderator table, reordering it.
@ -602,12 +581,7 @@ impl Perform<GetCommunityResponse> for Oper<TransferCommunity> {
let _inserted_community_moderator = let _inserted_community_moderator =
match CommunityModerator::join(&conn, &community_moderator_form) { match CommunityModerator::join(&conn, &community_moderator_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("community_moderator_already_exists").into()),
return Err(APIError::err(
&self.op,
"community_moderator_already_exists",
))?
}
}; };
} }
@ -622,20 +596,20 @@ impl Perform<GetCommunityResponse> for Oper<TransferCommunity> {
let community_view = match CommunityView::read(&conn, data.community_id, Some(user_id)) { let community_view = match CommunityView::read(&conn, data.community_id, Some(user_id)) {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_community"))?, Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
}; };
let moderators = match CommunityModeratorView::for_community(&conn, data.community_id) { let moderators = match CommunityModeratorView::for_community(&conn, data.community_id) {
Ok(moderators) => moderators, Ok(moderators) => moderators,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_community"))?, Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
}; };
// Return the jwt // Return the jwt
Ok(GetCommunityResponse { Ok(GetCommunityResponse {
op: self.op.to_string(),
community: community_view, community: community_view,
moderators: moderators, moderators,
admins: admins, admins,
online: 0,
}) })
} }
} }

View file

@ -5,12 +5,22 @@ use crate::db::community::*;
use crate::db::community_view::*; use crate::db::community_view::*;
use crate::db::moderator::*; use crate::db::moderator::*;
use crate::db::moderator_views::*; use crate::db::moderator_views::*;
use crate::db::password_reset_request::*;
use crate::db::post::*; use crate::db::post::*;
use crate::db::post_view::*; use crate::db::post_view::*;
use crate::db::private_message::*;
use crate::db::private_message_view::*;
use crate::db::site::*;
use crate::db::site_view::*;
use crate::db::user::*; use crate::db::user::*;
use crate::db::user_mention::*;
use crate::db::user_mention_view::*;
use crate::db::user_view::*; use crate::db::user_view::*;
use crate::db::*; use crate::db::*;
use crate::{has_slurs, naive_from_unix, naive_now, remove_slurs, Settings}; use crate::{
extract_usernames, naive_from_unix, naive_now, remove_slurs, slur_check, slurs_vec_to_str,
};
use diesel::PgConnection;
use failure::Error; use failure::Error;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -20,74 +30,32 @@ pub mod post;
pub mod site; pub mod site;
pub mod user; pub mod user;
#[derive(EnumString, ToString, Debug)]
pub enum UserOperation {
Login,
Register,
CreateCommunity,
CreatePost,
ListCommunities,
ListCategories,
GetPost,
GetCommunity,
CreateComment,
EditComment,
SaveComment,
CreateCommentLike,
GetPosts,
CreatePostLike,
EditPost,
SavePost,
EditCommunity,
FollowCommunity,
GetFollowedCommunities,
GetUserDetails,
GetReplies,
GetModlog,
BanFromCommunity,
AddModToCommunity,
CreateSite,
EditSite,
GetSite,
AddAdmin,
BanUser,
Search,
MarkAllAsRead,
SaveUserSettings,
TransferCommunity,
TransferSite,
DeleteAccount,
}
#[derive(Fail, Debug)] #[derive(Fail, Debug)]
#[fail(display = "{{\"op\":\"{}\", \"error\":\"{}\"}}", op, message)] #[fail(display = "{{\"error\":\"{}\"}}", message)]
pub struct APIError { pub struct APIError {
pub op: String,
pub message: String, pub message: String,
} }
impl APIError { impl APIError {
pub fn err(op: &UserOperation, msg: &str) -> Self { pub fn err(msg: &str) -> Self {
APIError { APIError {
op: op.to_string(),
message: msg.to_string(), message: msg.to_string(),
} }
} }
} }
pub struct Oper<T> { pub struct Oper<T> {
op: UserOperation,
data: T, data: T,
} }
impl<T> Oper<T> { impl<T> Oper<T> {
pub fn new(op: UserOperation, data: T) -> Oper<T> { pub fn new(data: T) -> Oper<T> {
Oper { op: op, data: data } Oper { data }
} }
} }
pub trait Perform<T> { pub trait Perform<T> {
fn perform(&self) -> Result<T, Error> fn perform(&self, conn: &PgConnection) -> Result<T, Error>
where where
T: Sized; T: Sized;
} }

View file

@ -1,4 +1,5 @@
use super::*; use super::*;
use diesel::PgConnection;
use std::str::FromStr; use std::str::FromStr;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -7,13 +8,12 @@ pub struct CreatePost {
url: Option<String>, url: Option<String>,
body: Option<String>, body: Option<String>,
nsfw: bool, nsfw: bool,
community_id: i32, pub community_id: i32,
auth: String, auth: String,
} }
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct PostResponse { pub struct PostResponse {
op: String,
pub post: PostView, pub post: PostView,
} }
@ -25,12 +25,12 @@ pub struct GetPost {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetPostResponse { pub struct GetPostResponse {
op: String,
post: PostView, post: PostView,
comments: Vec<CommentView>, comments: Vec<CommentView>,
community: CommunityView, community: CommunityView,
moderators: Vec<CommunityModeratorView>, moderators: Vec<CommunityModeratorView>,
admins: Vec<UserView>, admins: Vec<UserView>,
pub online: usize,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -39,13 +39,12 @@ pub struct GetPosts {
sort: String, sort: String,
page: Option<i64>, page: Option<i64>,
limit: Option<i64>, limit: Option<i64>,
community_id: Option<i32>, pub community_id: Option<i32>,
auth: Option<String>, auth: Option<String>,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetPostsResponse { pub struct GetPostsResponse {
op: String,
posts: Vec<PostView>, posts: Vec<PostView>,
} }
@ -56,12 +55,6 @@ pub struct CreatePostLike {
auth: String, auth: String,
} }
#[derive(Serialize, Deserialize)]
pub struct CreatePostLikeResponse {
op: String,
post: PostView,
}
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct EditPost { pub struct EditPost {
pub edit_id: i32, pub edit_id: i32,
@ -87,29 +80,34 @@ pub struct SavePost {
} }
impl Perform<PostResponse> for Oper<CreatePost> { impl Perform<PostResponse> for Oper<CreatePost> {
fn perform(&self) -> Result<PostResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<PostResponse, Error> {
let data: &CreatePost = &self.data; let data: &CreatePost = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
if has_slurs(&data.name) || (data.body.is_some() && has_slurs(&data.body.to_owned().unwrap())) { if let Err(slurs) = slur_check(&data.name) {
return Err(APIError::err(&self.op, "no_slurs"))?; return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
if let Some(body) = &data.body {
if let Err(slurs) = slur_check(body) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
} }
let user_id = claims.id; let user_id = claims.id;
// Check for a community ban // Check for a community ban
if CommunityUserBanView::get(&conn, user_id, data.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, data.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
let post_form = PostForm { let post_form = PostForm {
@ -128,39 +126,35 @@ impl Perform<PostResponse> for Oper<CreatePost> {
let inserted_post = match Post::create(&conn, &post_form) { let inserted_post = match Post::create(&conn, &post_form) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_create_post"))?, Err(_e) => return Err(APIError::err("couldnt_create_post").into()),
}; };
// They like their own post by default // They like their own post by default
let like_form = PostLikeForm { let like_form = PostLikeForm {
post_id: inserted_post.id, post_id: inserted_post.id,
user_id: user_id, user_id,
score: 1, score: 1,
}; };
// Only add the like if the score isnt 0 // Only add the like if the score isnt 0
let _inserted_like = match PostLike::like(&conn, &like_form) { let _inserted_like = match PostLike::like(&conn, &like_form) {
Ok(like) => like, Ok(like) => like,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_like_post"))?, Err(_e) => return Err(APIError::err("couldnt_like_post").into()),
}; };
// Refetch the view // Refetch the view
let post_view = match PostView::read(&conn, inserted_post.id, Some(user_id)) { let post_view = match PostView::read(&conn, inserted_post.id, Some(user_id)) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_post"))?, Err(_e) => return Err(APIError::err("couldnt_find_post").into()),
}; };
Ok(PostResponse { Ok(PostResponse { post: post_view })
op: self.op.to_string(),
post: post_view,
})
} }
} }
impl Perform<GetPostResponse> for Oper<GetPost> { impl Perform<GetPostResponse> for Oper<GetPost> {
fn perform(&self) -> Result<GetPostResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetPostResponse, Error> {
let data: &GetPost = &self.data; let data: &GetPost = &self.data;
let conn = establish_connection();
let user_id: Option<i32> = match &data.auth { let user_id: Option<i32> = match &data.auth {
Some(auth) => match Claims::decode(&auth) { Some(auth) => match Claims::decode(&auth) {
@ -175,20 +169,14 @@ impl Perform<GetPostResponse> for Oper<GetPost> {
let post_view = match PostView::read(&conn, data.id, user_id) { let post_view = match PostView::read(&conn, data.id, user_id) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_post"))?, Err(_e) => return Err(APIError::err("couldnt_find_post").into()),
}; };
let comments = CommentView::list( let comments = CommentQueryBuilder::create(&conn)
&conn, .for_post_id(data.id)
&SortType::New, .my_user_id(user_id)
Some(data.id), .limit(9999)
None, .list()?;
None,
user_id,
false,
None,
Some(9999),
)?;
let community = CommunityView::read(&conn, post_view.community_id, user_id)?; let community = CommunityView::read(&conn, post_view.community_id, user_id)?;
@ -202,20 +190,19 @@ impl Perform<GetPostResponse> for Oper<GetPost> {
// Return the jwt // Return the jwt
Ok(GetPostResponse { Ok(GetPostResponse {
op: self.op.to_string(),
post: post_view, post: post_view,
comments: comments, comments,
community: community, community,
moderators: moderators, moderators,
admins: admins, admins,
online: 0,
}) })
} }
} }
impl Perform<GetPostsResponse> for Oper<GetPosts> { impl Perform<GetPostsResponse> for Oper<GetPosts> {
fn perform(&self) -> Result<GetPostsResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetPostsResponse, Error> {
let data: &GetPosts = &self.data; let data: &GetPosts = &self.data;
let conn = establish_connection();
let user_claims: Option<Claims> = match &data.auth { let user_claims: Option<Claims> = match &data.auth {
Some(auth) => match Claims::decode(&auth) { Some(auth) => match Claims::decode(&auth) {
@ -235,61 +222,60 @@ impl Perform<GetPostsResponse> for Oper<GetPosts> {
None => false, None => false,
}; };
let type_ = PostListingType::from_str(&data.type_)?; let type_ = ListingType::from_str(&data.type_)?;
let sort = SortType::from_str(&data.sort)?; let sort = SortType::from_str(&data.sort)?;
let posts = match PostView::list( let posts = match PostQueryBuilder::create(&conn)
&conn, .listing_type(type_)
type_, .sort(&sort)
&sort, .show_nsfw(show_nsfw)
data.community_id, .for_community_id(data.community_id)
None, .my_user_id(user_id)
None, .page(data.page)
None, .limit(data.limit)
user_id, .list()
show_nsfw, {
false,
false,
data.page,
data.limit,
) {
Ok(posts) => posts, Ok(posts) => posts,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_get_posts"))?, Err(_e) => return Err(APIError::err("couldnt_get_posts").into()),
}; };
Ok(GetPostsResponse { Ok(GetPostsResponse { posts })
op: self.op.to_string(),
posts: posts,
})
} }
} }
impl Perform<CreatePostLikeResponse> for Oper<CreatePostLike> { impl Perform<PostResponse> for Oper<CreatePostLike> {
fn perform(&self) -> Result<CreatePostLikeResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<PostResponse, Error> {
let data: &CreatePostLike = &self.data; let data: &CreatePostLike = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
// Don't do a downvote if site has downvotes disabled
if data.score == -1 {
let site = SiteView::read(&conn)?;
if !site.enable_downvotes {
return Err(APIError::err("downvotes_disabled").into());
}
}
// Check for a community ban // Check for a community ban
let post = Post::read(&conn, data.post_id)?; let post = Post::read(&conn, data.post_id)?;
if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
let like_form = PostLikeForm { let like_form = PostLikeForm {
post_id: data.post_id, post_id: data.post_id,
user_id: user_id, user_id,
score: data.score, score: data.score,
}; };
@ -297,39 +283,41 @@ impl Perform<CreatePostLikeResponse> for Oper<CreatePostLike> {
PostLike::remove(&conn, &like_form)?; PostLike::remove(&conn, &like_form)?;
// Only add the like if the score isnt 0 // Only add the like if the score isnt 0
let do_add = &like_form.score != &0 && (&like_form.score == &1 || &like_form.score == &-1); let do_add = like_form.score != 0 && (like_form.score == 1 || like_form.score == -1);
if do_add { if do_add {
let _inserted_like = match PostLike::like(&conn, &like_form) { let _inserted_like = match PostLike::like(&conn, &like_form) {
Ok(like) => like, Ok(like) => like,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_like_post"))?, Err(_e) => return Err(APIError::err("couldnt_like_post").into()),
}; };
} }
let post_view = match PostView::read(&conn, data.post_id, Some(user_id)) { let post_view = match PostView::read(&conn, data.post_id, Some(user_id)) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_post"))?, Err(_e) => return Err(APIError::err("couldnt_find_post").into()),
}; };
// just output the score // just output the score
Ok(CreatePostLikeResponse { Ok(PostResponse { post: post_view })
op: self.op.to_string(),
post: post_view,
})
} }
} }
impl Perform<PostResponse> for Oper<EditPost> { impl Perform<PostResponse> for Oper<EditPost> {
fn perform(&self) -> Result<PostResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<PostResponse, Error> {
let data: &EditPost = &self.data; let data: &EditPost = &self.data;
if has_slurs(&data.name) || (data.body.is_some() && has_slurs(&data.body.to_owned().unwrap())) {
return Err(APIError::err(&self.op, "no_slurs"))?; if let Err(slurs) = slur_check(&data.name) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
} }
let conn = establish_connection(); if let Some(body) = &data.body {
if let Err(slurs) = slur_check(body) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
}
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -344,17 +332,17 @@ impl Perform<PostResponse> for Oper<EditPost> {
); );
editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect()); editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect());
if !editors.contains(&user_id) { if !editors.contains(&user_id) {
return Err(APIError::err(&self.op, "no_post_edit_allowed"))?; return Err(APIError::err("no_post_edit_allowed").into());
} }
// Check for a community ban // Check for a community ban
if CommunityUserBanView::get(&conn, user_id, data.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, data.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
let post_form = PostForm { let post_form = PostForm {
@ -373,7 +361,7 @@ impl Perform<PostResponse> for Oper<EditPost> {
let _updated_post = match Post::update(&conn, data.edit_id, &post_form) { let _updated_post = match Post::update(&conn, data.edit_id, &post_form) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_post"))?, Err(_e) => return Err(APIError::err("couldnt_update_post").into()),
}; };
// Mod tables // Mod tables
@ -407,47 +395,40 @@ impl Perform<PostResponse> for Oper<EditPost> {
let post_view = PostView::read(&conn, data.edit_id, Some(user_id))?; let post_view = PostView::read(&conn, data.edit_id, Some(user_id))?;
Ok(PostResponse { Ok(PostResponse { post: post_view })
op: self.op.to_string(),
post: post_view,
})
} }
} }
impl Perform<PostResponse> for Oper<SavePost> { impl Perform<PostResponse> for Oper<SavePost> {
fn perform(&self) -> Result<PostResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<PostResponse, Error> {
let data: &SavePost = &self.data; let data: &SavePost = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
let post_saved_form = PostSavedForm { let post_saved_form = PostSavedForm {
post_id: data.post_id, post_id: data.post_id,
user_id: user_id, user_id,
}; };
if data.save { if data.save {
match PostSaved::save(&conn, &post_saved_form) { match PostSaved::save(&conn, &post_saved_form) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_save_post"))?, Err(_e) => return Err(APIError::err("couldnt_save_post").into()),
}; };
} else { } else {
match PostSaved::unsave(&conn, &post_saved_form) { match PostSaved::unsave(&conn, &post_saved_form) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_save_post"))?, Err(_e) => return Err(APIError::err("couldnt_save_post").into()),
}; };
} }
let post_view = PostView::read(&conn, data.post_id, Some(user_id))?; let post_view = PostView::read(&conn, data.post_id, Some(user_id))?;
Ok(PostResponse { Ok(PostResponse { post: post_view })
op: self.op.to_string(),
post: post_view,
})
} }
} }

View file

@ -1,12 +1,12 @@
use super::*; use super::*;
use diesel::PgConnection;
use std::str::FromStr; use std::str::FromStr;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct ListCategories; pub struct ListCategories {}
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct ListCategoriesResponse { pub struct ListCategoriesResponse {
op: String,
categories: Vec<Category>, categories: Vec<Category>,
} }
@ -18,11 +18,11 @@ pub struct Search {
sort: String, sort: String,
page: Option<i64>, page: Option<i64>,
limit: Option<i64>, limit: Option<i64>,
auth: Option<String>,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct SearchResponse { pub struct SearchResponse {
op: String,
type_: String, type_: String,
comments: Vec<CommentView>, comments: Vec<CommentView>,
posts: Vec<PostView>, posts: Vec<PostView>,
@ -40,7 +40,6 @@ pub struct GetModlog {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetModlogResponse { pub struct GetModlogResponse {
op: String,
removed_posts: Vec<ModRemovePostView>, removed_posts: Vec<ModRemovePostView>,
locked_posts: Vec<ModLockPostView>, locked_posts: Vec<ModLockPostView>,
stickied_posts: Vec<ModStickyPostView>, stickied_posts: Vec<ModStickyPostView>,
@ -56,6 +55,9 @@ pub struct GetModlogResponse {
pub struct CreateSite { pub struct CreateSite {
name: String, name: String,
description: Option<String>, description: Option<String>,
enable_downvotes: bool,
open_registration: bool,
enable_nsfw: bool,
auth: String, auth: String,
} }
@ -63,21 +65,22 @@ pub struct CreateSite {
pub struct EditSite { pub struct EditSite {
name: String, name: String,
description: Option<String>, description: Option<String>,
enable_downvotes: bool,
open_registration: bool,
enable_nsfw: bool,
auth: String, auth: String,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetSite; pub struct GetSite {}
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct SiteResponse { pub struct SiteResponse {
op: String,
site: SiteView, site: SiteView,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetSiteResponse { pub struct GetSiteResponse {
op: String,
site: Option<SiteView>, site: Option<SiteView>,
admins: Vec<UserView>, admins: Vec<UserView>,
banned: Vec<UserView>, banned: Vec<UserView>,
@ -91,24 +94,19 @@ pub struct TransferSite {
} }
impl Perform<ListCategoriesResponse> for Oper<ListCategories> { impl Perform<ListCategoriesResponse> for Oper<ListCategories> {
fn perform(&self) -> Result<ListCategoriesResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<ListCategoriesResponse, Error> {
let _data: &ListCategories = &self.data; let _data: &ListCategories = &self.data;
let conn = establish_connection();
let categories: Vec<Category> = Category::list_all(&conn)?; let categories: Vec<Category> = Category::list_all(&conn)?;
// Return the jwt // Return the jwt
Ok(ListCategoriesResponse { Ok(ListCategoriesResponse { categories })
op: self.op.to_string(),
categories: categories,
})
} }
} }
impl Perform<GetModlogResponse> for Oper<GetModlog> { impl Perform<GetModlogResponse> for Oper<GetModlog> {
fn perform(&self) -> Result<GetModlogResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetModlogResponse, Error> {
let data: &GetModlog = &self.data; let data: &GetModlog = &self.data;
let conn = establish_connection();
let removed_posts = ModRemovePostView::list( let removed_posts = ModRemovePostView::list(
&conn, &conn,
@ -154,98 +152,102 @@ impl Perform<GetModlogResponse> for Oper<GetModlog> {
)?; )?;
// These arrays are only for the full modlog, when a community isn't given // These arrays are only for the full modlog, when a community isn't given
let mut removed_communities = Vec::new(); let (removed_communities, banned, added) = if data.community_id.is_none() {
let mut banned = Vec::new(); (
let mut added = Vec::new(); ModRemoveCommunityView::list(&conn, data.mod_user_id, data.page, data.limit)?,
ModBanView::list(&conn, data.mod_user_id, data.page, data.limit)?,
if data.community_id.is_none() { ModAddView::list(&conn, data.mod_user_id, data.page, data.limit)?,
removed_communities = )
ModRemoveCommunityView::list(&conn, data.mod_user_id, data.page, data.limit)?; } else {
banned = ModBanView::list(&conn, data.mod_user_id, data.page, data.limit)?; (Vec::new(), Vec::new(), Vec::new())
added = ModAddView::list(&conn, data.mod_user_id, data.page, data.limit)?; };
}
// Return the jwt // Return the jwt
Ok(GetModlogResponse { Ok(GetModlogResponse {
op: self.op.to_string(), removed_posts,
removed_posts: removed_posts, locked_posts,
locked_posts: locked_posts, stickied_posts,
stickied_posts: stickied_posts, removed_comments,
removed_comments: removed_comments, removed_communities,
removed_communities: removed_communities, banned_from_community,
banned_from_community: banned_from_community, banned,
banned: banned, added_to_community,
added_to_community: added_to_community, added,
added: added,
}) })
} }
} }
impl Perform<SiteResponse> for Oper<CreateSite> { impl Perform<SiteResponse> for Oper<CreateSite> {
fn perform(&self) -> Result<SiteResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<SiteResponse, Error> {
let data: &CreateSite = &self.data; let data: &CreateSite = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
if has_slurs(&data.name) if let Err(slurs) = slur_check(&data.name) {
|| (data.description.is_some() && has_slurs(&data.description.to_owned().unwrap())) return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
{ }
return Err(APIError::err(&self.op, "no_slurs"))?;
if let Some(description) = &data.description {
if let Err(slurs) = slur_check(description) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
} }
let user_id = claims.id; let user_id = claims.id;
// Make sure user is an admin // Make sure user is an admin
if !UserView::read(&conn, user_id)?.admin { if !UserView::read(&conn, user_id)?.admin {
return Err(APIError::err(&self.op, "not_an_admin"))?; return Err(APIError::err("not_an_admin").into());
} }
let site_form = SiteForm { let site_form = SiteForm {
name: data.name.to_owned(), name: data.name.to_owned(),
description: data.description.to_owned(), description: data.description.to_owned(),
creator_id: user_id, creator_id: user_id,
enable_downvotes: data.enable_downvotes,
open_registration: data.open_registration,
enable_nsfw: data.enable_nsfw,
updated: None, updated: None,
}; };
match Site::create(&conn, &site_form) { match Site::create(&conn, &site_form) {
Ok(site) => site, Ok(site) => site,
Err(_e) => return Err(APIError::err(&self.op, "site_already_exists"))?, Err(_e) => return Err(APIError::err("site_already_exists").into()),
}; };
let site_view = SiteView::read(&conn)?; let site_view = SiteView::read(&conn)?;
Ok(SiteResponse { Ok(SiteResponse { site: site_view })
op: self.op.to_string(),
site: site_view,
})
} }
} }
impl Perform<SiteResponse> for Oper<EditSite> { impl Perform<SiteResponse> for Oper<EditSite> {
fn perform(&self) -> Result<SiteResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<SiteResponse, Error> {
let data: &EditSite = &self.data; let data: &EditSite = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
if has_slurs(&data.name) if let Err(slurs) = slur_check(&data.name) {
|| (data.description.is_some() && has_slurs(&data.description.to_owned().unwrap())) return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
{ }
return Err(APIError::err(&self.op, "no_slurs"))?;
if let Some(description) = &data.description {
if let Err(slurs) = slur_check(description) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
} }
let user_id = claims.id; let user_id = claims.id;
// Make sure user is an admin // Make sure user is an admin
if UserView::read(&conn, user_id)?.admin == false { if !UserView::read(&conn, user_id)?.admin {
return Err(APIError::err(&self.op, "not_an_admin"))?; return Err(APIError::err("not_an_admin").into());
} }
let found_site = Site::read(&conn, 1)?; let found_site = Site::read(&conn, 1)?;
@ -255,26 +257,25 @@ impl Perform<SiteResponse> for Oper<EditSite> {
description: data.description.to_owned(), description: data.description.to_owned(),
creator_id: found_site.creator_id, creator_id: found_site.creator_id,
updated: Some(naive_now()), updated: Some(naive_now()),
enable_downvotes: data.enable_downvotes,
open_registration: data.open_registration,
enable_nsfw: data.enable_nsfw,
}; };
match Site::update(&conn, 1, &site_form) { match Site::update(&conn, 1, &site_form) {
Ok(site) => site, Ok(site) => site,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_site"))?, Err(_e) => return Err(APIError::err("couldnt_update_site").into()),
}; };
let site_view = SiteView::read(&conn)?; let site_view = SiteView::read(&conn)?;
Ok(SiteResponse { Ok(SiteResponse { site: site_view })
op: self.op.to_string(),
site: site_view,
})
} }
} }
impl Perform<GetSiteResponse> for Oper<GetSite> { impl Perform<GetSiteResponse> for Oper<GetSite> {
fn perform(&self) -> Result<GetSiteResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetSiteResponse, Error> {
let _data: &GetSite = &self.data; let _data: &GetSite = &self.data;
let conn = establish_connection();
// It can return a null site in order to redirect // It can return a null site in order to redirect
let site_view = match Site::read(&conn, 1) { let site_view = match Site::read(&conn, 1) {
@ -293,19 +294,28 @@ impl Perform<GetSiteResponse> for Oper<GetSite> {
let banned = UserView::banned(&conn)?; let banned = UserView::banned(&conn)?;
Ok(GetSiteResponse { Ok(GetSiteResponse {
op: self.op.to_string(),
site: site_view, site: site_view,
admins: admins, admins,
banned: banned, banned,
online: 0, online: 0,
}) })
} }
} }
impl Perform<SearchResponse> for Oper<Search> { impl Perform<SearchResponse> for Oper<Search> {
fn perform(&self) -> Result<SearchResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<SearchResponse, Error> {
let data: &Search = &self.data; let data: &Search = &self.data;
let conn = establish_connection();
let user_id: Option<i32> = match &data.auth {
Some(auth) => match Claims::decode(&auth) {
Ok(claims) => {
let user_id = claims.claims.id;
Some(user_id)
}
Err(_e) => None,
},
None => None,
};
let sort = SortType::from_str(&data.sort)?; let sort = SortType::from_str(&data.sort)?;
let type_ = SearchType::from_str(&data.type_)?; let type_ = SearchType::from_str(&data.type_)?;
@ -319,126 +329,104 @@ impl Perform<SearchResponse> for Oper<Search> {
match type_ { match type_ {
SearchType::Posts => { SearchType::Posts => {
posts = PostView::list( posts = PostQueryBuilder::create(&conn)
&conn, .sort(&sort)
PostListingType::All, .show_nsfw(true)
&sort, .for_community_id(data.community_id)
data.community_id, .search_term(data.q.to_owned())
None, .my_user_id(user_id)
Some(data.q.to_owned()), .page(data.page)
None, .limit(data.limit)
None, .list()?;
true,
false,
false,
data.page,
data.limit,
)?;
} }
SearchType::Comments => { SearchType::Comments => {
comments = CommentView::list( comments = CommentQueryBuilder::create(&conn)
&conn, .sort(&sort)
&sort, .search_term(data.q.to_owned())
None, .my_user_id(user_id)
None, .page(data.page)
Some(data.q.to_owned()), .limit(data.limit)
None, .list()?;
false,
data.page,
data.limit,
)?;
} }
SearchType::Communities => { SearchType::Communities => {
communities = CommunityView::list( communities = CommunityQueryBuilder::create(&conn)
&conn, .sort(&sort)
&sort, .search_term(data.q.to_owned())
None, .page(data.page)
true, .limit(data.limit)
Some(data.q.to_owned()), .list()?;
data.page,
data.limit,
)?;
} }
SearchType::Users => { SearchType::Users => {
users = UserView::list(&conn, &sort, Some(data.q.to_owned()), data.page, data.limit)?; users = UserQueryBuilder::create(&conn)
.sort(&sort)
.search_term(data.q.to_owned())
.page(data.page)
.limit(data.limit)
.list()?;
} }
SearchType::All => { SearchType::All => {
posts = PostView::list( posts = PostQueryBuilder::create(&conn)
&conn, .sort(&sort)
PostListingType::All, .show_nsfw(true)
&sort, .for_community_id(data.community_id)
data.community_id, .search_term(data.q.to_owned())
None, .my_user_id(user_id)
Some(data.q.to_owned()), .page(data.page)
None, .limit(data.limit)
None, .list()?;
true,
false, comments = CommentQueryBuilder::create(&conn)
false, .sort(&sort)
data.page, .search_term(data.q.to_owned())
data.limit, .my_user_id(user_id)
)?; .page(data.page)
comments = CommentView::list( .limit(data.limit)
&conn, .list()?;
&sort,
None, communities = CommunityQueryBuilder::create(&conn)
None, .sort(&sort)
Some(data.q.to_owned()), .search_term(data.q.to_owned())
None, .page(data.page)
false, .limit(data.limit)
data.page, .list()?;
data.limit,
)?; users = UserQueryBuilder::create(&conn)
communities = CommunityView::list( .sort(&sort)
&conn, .search_term(data.q.to_owned())
&sort, .page(data.page)
None, .limit(data.limit)
true, .list()?;
Some(data.q.to_owned()),
data.page,
data.limit,
)?;
users = UserView::list(&conn, &sort, Some(data.q.to_owned()), data.page, data.limit)?;
} }
SearchType::Url => { SearchType::Url => {
posts = PostView::list( posts = PostQueryBuilder::create(&conn)
&conn, .sort(&sort)
PostListingType::All, .show_nsfw(true)
&sort, .for_community_id(data.community_id)
data.community_id, .url_search(data.q.to_owned())
None, .page(data.page)
None, .limit(data.limit)
Some(data.q.to_owned()), .list()?;
None,
true,
false,
false,
data.page,
data.limit,
)?;
} }
}; };
// Return the jwt // Return the jwt
Ok(SearchResponse { Ok(SearchResponse {
op: self.op.to_string(),
type_: data.type_.to_owned(), type_: data.type_.to_owned(),
comments: comments, comments,
posts: posts, posts,
communities: communities, communities,
users: users, users,
}) })
} }
} }
impl Perform<GetSiteResponse> for Oper<TransferSite> { impl Perform<GetSiteResponse> for Oper<TransferSite> {
fn perform(&self) -> Result<GetSiteResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetSiteResponse, Error> {
let data: &TransferSite = &self.data; let data: &TransferSite = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -447,7 +435,7 @@ impl Perform<GetSiteResponse> for Oper<TransferSite> {
// Make sure user is the creator // Make sure user is the creator
if read_site.creator_id != user_id { if read_site.creator_id != user_id {
return Err(APIError::err(&self.op, "not_an_admin"))?; return Err(APIError::err("not_an_admin").into());
} }
let site_form = SiteForm { let site_form = SiteForm {
@ -455,11 +443,14 @@ impl Perform<GetSiteResponse> for Oper<TransferSite> {
description: read_site.description, description: read_site.description,
creator_id: data.user_id, creator_id: data.user_id,
updated: Some(naive_now()), updated: Some(naive_now()),
enable_downvotes: read_site.enable_downvotes,
open_registration: read_site.open_registration,
enable_nsfw: read_site.enable_nsfw,
}; };
match Site::update(&conn, 1, &site_form) { match Site::update(&conn, 1, &site_form) {
Ok(site) => site, Ok(site) => site,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_site"))?, Err(_e) => return Err(APIError::err("couldnt_update_site").into()),
}; };
// Mod tables // Mod tables
@ -484,10 +475,9 @@ impl Perform<GetSiteResponse> for Oper<TransferSite> {
let banned = UserView::banned(&conn)?; let banned = UserView::banned(&conn)?;
Ok(GetSiteResponse { Ok(GetSiteResponse {
op: self.op.to_string(),
site: Some(site_view), site: Some(site_view),
admins: admins, admins,
banned: banned, banned,
online: 0, online: 0,
}) })
} }

File diff suppressed because it is too large Load diff

View file

@ -1,86 +0,0 @@
extern crate activitypub;
use self::activitypub::{actor::Person, context};
use crate::db::user::User_;
impl User_ {
pub fn person(&self) -> Person {
use crate::{to_datetime_utc, Settings};
let base_url = &format!("{}/user/{}", Settings::get().api_endpoint(), self.name);
let mut person = Person::default();
person.object_props.set_context_object(context()).ok();
person.object_props.set_id_string(base_url.to_string()).ok();
person
.object_props
.set_name_string(self.name.to_owned())
.ok();
person
.object_props
.set_published_utctime(to_datetime_utc(self.published))
.ok();
if let Some(i) = self.updated {
person
.object_props
.set_updated_utctime(to_datetime_utc(i))
.ok();
}
// person.object_props.summary = self.summary;
person
.ap_actor_props
.set_inbox_string(format!("{}/inbox", &base_url))
.ok();
person
.ap_actor_props
.set_outbox_string(format!("{}/outbox", &base_url))
.ok();
person
.ap_actor_props
.set_following_string(format!("{}/following", &base_url))
.ok();
person
.ap_actor_props
.set_liked_string(format!("{}/liked", &base_url))
.ok();
if let Some(i) = &self.preferred_username {
person
.ap_actor_props
.set_preferred_username_string(i.to_string())
.ok();
}
person
}
}
#[cfg(test)]
mod tests {
use super::User_;
use crate::naive_now;
#[test]
fn test_person() {
let expected_user = User_ {
id: 52,
name: "thom".into(),
fedi_name: "rrf".into(),
preferred_username: None,
password_encrypted: "here".into(),
email: None,
icon: None,
published: naive_now(),
admin: false,
banned: false,
updated: None,
show_nsfw: false,
theme: "darkly".into(),
};
let person = expected_user.person();
assert_eq!(
"rrr/api/v1/user/thom",
person.object_props.id_string().unwrap()
);
let json = serde_json::to_string_pretty(&person).unwrap();
println!("{}", json);
}
}

View file

@ -0,0 +1,109 @@
use crate::apub::make_apub_endpoint;
use crate::db::community::Community;
use crate::db::community_view::CommunityFollowerView;
use crate::db::establish_unpooled_connection;
use crate::to_datetime_utc;
use activitypub::{actor::Group, collection::UnorderedCollection, context};
use actix_web::body::Body;
use actix_web::web::Path;
use actix_web::HttpResponse;
use serde::Deserialize;
impl Community {
pub fn as_group(&self) -> Group {
let base_url = make_apub_endpoint("c", &self.name);
let mut group = Group::default();
group.object_props.set_context_object(context()).ok();
group.object_props.set_id_string(base_url.to_string()).ok();
group
.object_props
.set_name_string(self.name.to_owned())
.ok();
group
.object_props
.set_published_utctime(to_datetime_utc(self.published))
.ok();
if let Some(updated) = self.updated {
group
.object_props
.set_updated_utctime(to_datetime_utc(updated))
.ok();
}
if let Some(description) = &self.description {
group
.object_props
.set_summary_string(description.to_string())
.ok();
}
group
.ap_actor_props
.set_inbox_string(format!("{}/inbox", &base_url))
.ok();
group
.ap_actor_props
.set_outbox_string(format!("{}/outbox", &base_url))
.ok();
group
.ap_actor_props
.set_followers_string(format!("{}/followers", &base_url))
.ok();
group
}
pub fn followers_as_collection(&self) -> UnorderedCollection {
let base_url = make_apub_endpoint("c", &self.name);
let mut collection = UnorderedCollection::default();
collection.object_props.set_context_object(context()).ok();
collection.object_props.set_id_string(base_url).ok();
let connection = establish_unpooled_connection();
//As we are an object, we validated that the community id was valid
let community_followers = CommunityFollowerView::for_community(&connection, self.id).unwrap();
let ap_followers = community_followers
.iter()
.map(|follower| make_apub_endpoint("u", &follower.user_name))
.collect();
collection
.collection_props
.set_items_string_vec(ap_followers)
.unwrap();
collection
}
}
#[derive(Deserialize)]
pub struct CommunityQuery {
community_name: String,
}
pub async fn get_apub_community(info: Path<CommunityQuery>) -> HttpResponse<Body> {
let connection = establish_unpooled_connection();
if let Ok(community) = Community::read_from_name(&connection, info.community_name.to_owned()) {
HttpResponse::Ok()
.content_type("application/activity+json")
.body(serde_json::to_string(&community.as_group()).unwrap())
} else {
HttpResponse::NotFound().finish()
}
}
pub async fn get_apub_community_followers(info: Path<CommunityQuery>) -> HttpResponse<Body> {
let connection = establish_unpooled_connection();
if let Ok(community) = Community::read_from_name(&connection, info.community_name.to_owned()) {
HttpResponse::Ok()
.content_type("application/activity+json")
.body(serde_json::to_string(&community.followers_as_collection()).unwrap())
} else {
HttpResponse::NotFound().finish()
}
}

103
server/src/apub/mod.rs Normal file
View file

@ -0,0 +1,103 @@
pub mod community;
pub mod post;
pub mod user;
use crate::Settings;
use std::fmt::Display;
#[cfg(test)]
mod tests {
use crate::db::community::Community;
use crate::db::post::Post;
use crate::db::user::User_;
use crate::db::{ListingType, SortType};
use crate::{naive_now, Settings};
#[test]
fn test_person() {
let user = User_ {
id: 52,
name: "thom".into(),
fedi_name: "rrf".into(),
preferred_username: None,
password_encrypted: "here".into(),
email: None,
matrix_user_id: None,
avatar: None,
published: naive_now(),
admin: false,
banned: false,
updated: None,
show_nsfw: false,
theme: "darkly".into(),
default_sort_type: SortType::Hot as i16,
default_listing_type: ListingType::Subscribed as i16,
lang: "browser".into(),
show_avatars: true,
send_notifications_to_email: false,
};
let person = user.as_person();
assert_eq!(
format!("https://{}/federation/u/thom", Settings::get().hostname),
person.object_props.id_string().unwrap()
);
}
#[test]
fn test_community() {
let community = Community {
id: 42,
name: "Test".into(),
title: "Test Title".into(),
description: Some("Test community".into()),
category_id: 32,
creator_id: 52,
removed: false,
published: naive_now(),
updated: Some(naive_now()),
deleted: false,
nsfw: false,
};
let group = community.as_group();
assert_eq!(
format!("https://{}/federation/c/Test", Settings::get().hostname),
group.object_props.id_string().unwrap()
);
}
#[test]
fn test_post() {
let post = Post {
id: 62,
name: "A test post".into(),
url: None,
body: None,
creator_id: 52,
community_id: 42,
published: naive_now(),
removed: false,
locked: false,
stickied: false,
nsfw: false,
deleted: false,
updated: None,
};
let page = post.as_page();
assert_eq!(
format!("https://{}/federation/post/62", Settings::get().hostname),
page.object_props.id_string().unwrap()
);
}
}
pub fn make_apub_endpoint<S: Display, T: Display>(point: S, value: T) -> String {
format!(
"https://{}/federation/{}/{}",
Settings::get().hostname,
point,
value
)
}

38
server/src/apub/post.rs Normal file
View file

@ -0,0 +1,38 @@
use crate::apub::make_apub_endpoint;
use crate::db::post::Post;
use crate::to_datetime_utc;
use activitypub::{context, object::Page};
impl Post {
pub fn as_page(&self) -> Page {
let base_url = make_apub_endpoint("post", self.id);
let mut page = Page::default();
page.object_props.set_context_object(context()).ok();
page.object_props.set_id_string(base_url).ok();
page.object_props.set_name_string(self.name.to_owned()).ok();
if let Some(body) = &self.body {
page.object_props.set_content_string(body.to_owned()).ok();
}
if let Some(url) = &self.url {
page.object_props.set_url_string(url.to_owned()).ok();
}
//page.object_props.set_attributed_to_string
page
.object_props
.set_published_utctime(to_datetime_utc(self.published))
.ok();
if let Some(updated) = self.updated {
page
.object_props
.set_updated_utctime(to_datetime_utc(updated))
.ok();
}
page
}
}

74
server/src/apub/user.rs Normal file
View file

@ -0,0 +1,74 @@
use crate::apub::make_apub_endpoint;
use crate::db::establish_unpooled_connection;
use crate::db::user::User_;
use crate::to_datetime_utc;
use activitypub::{actor::Person, context};
use actix_web::body::Body;
use actix_web::web::Path;
use actix_web::HttpResponse;
use serde::Deserialize;
impl User_ {
pub fn as_person(&self) -> Person {
let base_url = make_apub_endpoint("u", &self.name);
let mut person = Person::default();
person.object_props.set_context_object(context()).ok();
person.object_props.set_id_string(base_url.to_string()).ok();
person
.object_props
.set_name_string(self.name.to_owned())
.ok();
person
.object_props
.set_published_utctime(to_datetime_utc(self.published))
.ok();
if let Some(updated) = self.updated {
person
.object_props
.set_updated_utctime(to_datetime_utc(updated))
.ok();
}
person
.ap_actor_props
.set_inbox_string(format!("{}/inbox", &base_url))
.ok();
person
.ap_actor_props
.set_outbox_string(format!("{}/outbox", &base_url))
.ok();
person
.ap_actor_props
.set_following_string(format!("{}/following", &base_url))
.ok();
person
.ap_actor_props
.set_liked_string(format!("{}/liked", &base_url))
.ok();
if let Some(i) = &self.preferred_username {
person
.ap_actor_props
.set_preferred_username_string(i.to_string())
.ok();
}
person
}
}
#[derive(Deserialize)]
pub struct UserQuery {
user_name: String,
}
pub async fn get_apub_user(info: Path<UserQuery>) -> HttpResponse<Body> {
let connection = establish_unpooled_connection();
if let Ok(user) = User_::find_by_email_or_username(&connection, &info.user_name) {
HttpResponse::Ok()
.content_type("application/activity+json")
.body(serde_json::to_string(&user.as_person()).unwrap())
} else {
HttpResponse::NotFound().finish()
}
}

View file

@ -52,7 +52,7 @@ mod tests {
use super::*; use super::*;
#[test] #[test]
fn test_crud() { fn test_crud() {
let conn = establish_connection(); let conn = establish_unpooled_connection();
let categories = Category::list_all(&conn).unwrap(); let categories = Category::list_all(&conn).unwrap();
let expected_first_category = Category { let expected_first_category = Category {

View file

@ -166,7 +166,7 @@ mod tests {
use super::*; use super::*;
#[test] #[test]
fn test_crud() { fn test_crud() {
let conn = establish_connection(); let conn = establish_unpooled_connection();
let new_user = UserForm { let new_user = UserForm {
name: "terry".into(), name: "terry".into(),
@ -174,11 +174,18 @@ mod tests {
preferred_username: None, preferred_username: None,
password_encrypted: "nope".into(), password_encrypted: "nope".into(),
email: None, email: None,
matrix_user_id: None,
avatar: None,
admin: false, admin: false,
banned: false, banned: false,
updated: None, updated: None,
show_nsfw: false, show_nsfw: false,
theme: "darkly".into(), theme: "darkly".into(),
default_sort_type: SortType::Hot as i16,
default_listing_type: ListingType::Subscribed as i16,
lang: "browser".into(),
show_avatars: true,
send_notifications_to_email: false,
}; };
let inserted_user = User_::create(&conn, &new_user).unwrap(); let inserted_user = User_::create(&conn, &new_user).unwrap();

Some files were not shown because too many files have changed in this diff Show more