Compare commits

..

514 commits

Author SHA1 Message Date
49e46322e2 Multiple Lemmy instances on same server 2020-03-11 00:20:43 +01:00
0880e597c0 Change ports and container names 2020-03-09 15:16:52 +01:00
17e307973f Move volumes into subfolder (ref #474) 2020-03-09 01:35:11 +01:00
Dessalines
9c1bcd6b26
Merge pull request #587 from StaticallyTypedRice/patch-2
Clarify that content violating this code of conduct will likely be removed.
2020-03-08 18:36:34 -04:00
Richie Zhang
8c17e694ef
Clarify that content violating this code of conduct will likely be removed.
Lemmy has a general policy of removing any content that violates the rules whenever possible, which is typically a good thing, and I felt it important to mention that in the code of conduct.
2020-03-08 12:51:07 -07:00
Dessalines
a0e497b793 Hide user karma on details page. Fixes #585 2020-03-08 14:33:04 -04:00
Dessalines
22d75990b7 Show ago for user details. Fixes #586 2020-03-08 14:29:17 -04:00
Dessalines
c5e9e9b674 Version v0.6.33 2020-03-08 00:46:15 -05:00
Dessalines
9d78760ebf Line break pre / view_source blocks. 2020-03-08 00:45:59 -05:00
Dessalines
08986241b6 Add icons for private message actions. Fixes #582 2020-03-08 00:27:58 -05:00
Dessalines
cfdf33b9d5 Version v0.6.32 2020-03-07 22:02:25 -05:00
Dessalines
fed75ae420 Merge remote-tracking branch 'weblate/master' 2020-03-07 22:02:20 -05:00
Dessalines
70ba959413 Iframely card now opens embed on title if available. 2020-03-07 20:01:31 -05:00
Dessalines
072952e1a6 Running cargo update. 2020-03-07 19:52:00 -05:00
Dessalines
966a6fc70b Iframely and pictshare backend mostly done. 2020-03-07 18:31:13 -05:00
Kitaiti Makoto
6adaa067c8 Translated using Weblate (Japanese)
Currently translated at 85.4% (200 of 234 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/ja/
2020-03-07 21:38:39 +00:00
Dessalines
52f5178649 Version v0.6.31 2020-03-06 15:08:04 -05:00
Dessalines
3b82f4887b Ask for confirmation on leaving pages with incomplete forms. Fixes #529 2020-03-06 14:57:52 -05:00
Dessalines
c3d4538219 Adding a sorting help. Fixes #532 2020-03-06 14:18:40 -05:00
Dessalines
585da4e911 Version v0.6.30 2020-03-06 13:24:12 -05:00
Dessalines
ebbc09672c Merge remote-tracking branch 'weblate/master' 2020-03-06 12:49:21 -05:00
Dessalines
10a18145fa Merge branch 'icons' into dev 2020-03-06 12:48:57 -05:00
Dessalines
6513a1d120 Reordering activitypub resources. 2020-03-06 12:48:51 -05:00
Dessalines
784bcc646b Adding post listing separators. Hiding full score unless downvotes. 2020-03-06 11:51:25 -05:00
Dessalines
64cad8bc3e Some comment-node additions
- Hiding extra vote counts if no downvotes.
- Showing numbers on actions if there are.
2020-03-06 09:57:15 -05:00
Policarp
9481695682 Translated using Weblate (Russian)
Currently translated at 87.3% (201 of 230 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/ru/
2020-03-06 10:58:36 +00:00
ButterflyOfFire
cade656081 Translated using Weblate (Arabic)
Currently translated at 77.3% (178 of 230 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/ar/
2020-03-06 10:58:36 +00:00
olivia maia
c7a8b528a1 Translated using Weblate (Portuguese (Brazil))
Currently translated at 100.0% (230 of 230 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/pt_BR/
2020-03-06 10:58:36 +00:00
Riccardo Mazzon
a87c101ee7 Translated using Weblate (Italian)
Currently translated at 91.7% (211 of 230 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/it/
2020-03-06 10:58:36 +00:00
Felipe Forte
fa3941a6a4 Translated using Weblate (Portuguese (Brazil))
Currently translated at 100.0% (230 of 230 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/pt_BR/

Translated using Weblate (Portuguese (Brazil))

Currently translated at 99.5% (229 of 230 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/pt_BR/
2020-03-06 10:58:36 +00:00
Dessalines
c0a293c268 Merge branch 'dev' into icons 2020-03-05 15:48:14 -05:00
Dessalines
5418d45a82 Remove email from GetUserDetails when not same user. Fixes #579 2020-03-05 15:46:33 -05:00
Dessalines
d903ecb6d3 Proper comment-node depth coloring. 2020-03-05 15:10:46 -05:00
Dessalines
a5c58eb090 Thinner blockquotes. Corrected user on private messaging. 2020-03-05 10:18:48 -05:00
Dessalines
36ab3b67bf Adding hr separator for top level comment groups on mobile. 2020-03-05 10:02:23 -05:00
Dessalines
715ddc2c99 Change post-listing vote colors. 2020-03-05 00:39:22 -05:00
Dessalines
cdb2799191 Changing user names to bold text-body. Removing color lines on first comment. 2020-03-05 00:02:08 -05:00
Dessalines
8ecca704a2 Adding left border color, removing indent. 2020-03-04 23:36:42 -05:00
Dessalines
e013553ec1 Better tippy loading. Fixes #577 2020-03-04 22:35:55 -05:00
Dessalines
fc9d80e17c Adding separators for comment node title line. 2020-03-04 14:25:23 -05:00
Dessalines
dab85b09d0 Removing suffix on from now. 2020-03-04 14:24:45 -05:00
Dessalines
d6b8c68b11 Removing comment node other score colors for clarity. 2020-03-04 14:18:13 -05:00
Dessalines
e590b95a31 Switching from heart to zap symbol for points. 2020-03-04 14:13:08 -05:00
Dessalines
fe66a336e6 Add score color to bar 2020-03-04 14:06:03 -05:00
Dessalines
7015332d97 Moving comment voting to action bar. Adding plurals. 2020-03-04 13:52:11 -05:00
Dessalines
0dae5e910a Fixing post sorting by stickied on front end. Fixes #575 2020-03-04 11:46:34 -05:00
Dessalines
c74d8bfc64 Fixing select alignment. 2020-03-04 09:42:50 -05:00
Dessalines
216863a51f Changing view source icon. 2020-03-04 09:37:19 -05:00
Dessalines
219d728955 Adding icons to stickied, deleted, and locked. 2020-03-04 09:30:49 -05:00
Dessalines
0ce63d6ffa Adding icon-plus and minus-square. 2020-03-03 19:14:36 -05:00
Dessalines
342d226ca3 Updating tributejs 2020-03-03 17:37:27 -05:00
Dessalines
876bab17eb Only show tribute menu after a minlength of 3 characters.
- Fixes #562
2020-03-03 16:36:11 -05:00
Dessalines
c430bdfcd7 Removing icon-inline from message icon. 2020-03-03 11:46:29 -05:00
Dessalines
6da1e1b931 Change action sizes. Add better tippy updating. 2020-03-03 11:07:07 -05:00
Dessalines
ae9242a5c3 More additions to icons.
- Adding edit and trash icons for sidebars
- Adding pencil edit instead of modified.
2020-03-03 10:14:50 -05:00
Dessalines
da2cad4ebb Merge branch 'dev' into icons 2020-03-03 09:24:24 -05:00
Dessalines
9a5da04eb7 Add Lemmy Council governance document. 2020-03-03 09:23:43 -05:00
Dessalines
d92cd2f1d4 Some formatting. 2020-03-03 09:16:36 -05:00
Dessalines
65779be906 A first pass at adding icons, and tippy tooltips.
- Adding icons for post-listing, comment-node, and navbar.
- Adding html titles.
- Updating moment expand to use users locale.
2020-03-03 02:29:45 -05:00
Dessalines
2a3b866577 Moving link out of more menu. 2020-03-02 11:01:00 -05:00
Dessalines
50b768d39b Upgrading inferno. 2020-03-02 11:00:34 -05:00
Dessalines
3e58375fcc Fix image expand width on mobile. 2020-03-02 10:48:06 -05:00
Dessalines
d9f87f1bf5 Merge branch 'patch-1' of https://github.com/BoFFire/lemmy into BoFFire-patch-1 2020-03-01 18:36:35 -05:00
Dessalines
6ebdf610d4 Version v0.6.29 2020-03-01 18:26:17 -05:00
Dessalines
f4968b56ab Merge branch 'dev' 2020-03-01 18:22:31 -05:00
Dessalines
594ce2888f Fix weblate deploy. 2020-03-01 18:21:52 -05:00
Dessalines
1afd03580d Changing mobile columns. 2020-03-01 18:19:48 -05:00
Dessalines
f4191a52c3 Fixing weblate. 2020-03-01 18:19:05 -05:00
Felix Ableitner
45592bc466
remove utm parameter 2020-03-01 19:29:20 +00:00
Kitaiti Makoto
d04dcd88fb Translated using Weblate (Japanese)
Currently translated at 85.2% (196 of 230 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/ja/
2020-03-01 18:20:22 +00:00
ButterflyOfFire
3b37fcaefd
Update README.md
Adding translation widget %
2020-03-01 10:59:08 +01:00
ButterflyOfFire
c8b2feb2d0 Translated using Weblate (Arabic)
Currently translated at 61.7% (142 of 230 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/ar/
2020-03-01 09:20:29 +00:00
Dessalines
0a09d07231 Version v0.6.28 2020-02-29 22:24:00 -05:00
ButterflyOfFire
ad09df640e Translated using Weblate (Arabic)
Currently translated at 55.0% (126 of 229 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/ar/
2020-03-01 03:22:50 +00:00
ButterflyOfFire
b3b1210196 Added translation using Weblate (Arabic) 2020-03-01 03:22:50 +00:00
ButterflyOfFire
d8078cc9eb Translated using Weblate (French)
Currently translated at 100.0% (229 of 229 strings)

Translation: Lemmy/lemmy
Translate-URL: http://weblate.yerbamate.dev/projects/lemmy/lemmy/fr/
2020-03-01 03:22:50 +00:00
ButterflyOfFire
ff03b2dbb6 Translated using Weblate (French)
Currently translated at 100.0% (229 of 229 strings)

Translation: Lemmy/lemmy
Translate-URL: http://127.0.0.1/projects/lemmy/lemmy/fr/
2020-03-01 03:22:50 +00:00
Dessalines
71cd3e3f3b Moving view source. 2020-02-29 22:16:52 -05:00
Dessalines
8390744391 Adding more for advanced actions on comments and posts.
- Fixes #561
2020-02-29 22:06:42 -05:00
Dessalines
a5bfc837ea Some fixes for mobile view. 2020-02-29 21:04:42 -05:00
Dessalines
6deb41e3d9 Increasing mini-overlay size. #564 2020-02-29 17:16:42 -05:00
Dessalines
c6a6ca68d0 Merge branch 'master' into dev 2020-02-29 16:53:47 -05:00
Dessalines
68445a48a5 Adding git add to deploy. 2020-02-29 15:49:09 -05:00
Dessalines
d892a4f28c Version v0.6.27 2020-02-29 15:28:30 -05:00
Dessalines
f997eeca0a Translation additions.
- Adding Japanese. Fixes #566
- Adding some German translations. Fixes #567
- Fixing pt-br
2020-02-29 15:18:56 -05:00
Dessalines
429738c9b4 Merge branch 'proper-i18next' 2020-02-29 14:41:08 -05:00
Dessalines
a866d2d283 Adding generate report for yarn start. 2020-02-29 14:40:27 -05:00
e3111431aa remove translation report from readme 2020-02-29 20:35:25 +01:00
467cd41bd3 update deploy script to pull translations from weblate 2020-02-29 20:25:35 +01:00
Dessalines
f61f4a944d Changing to a better discussion icon. 2020-02-29 13:21:11 -05:00
2794b8b36a generate typescript during compilation 2020-02-29 19:16:23 +01:00
Dessalines
a308b3579d Version v0.6.26 2020-02-29 13:10:28 -05:00
ae16a4b1a5 Make i18n compatible with weblate (ref #387) 2020-02-29 19:08:59 +01:00
7d94cd5c96 make i18n compatible with weblate 2020-02-29 19:08:19 +01:00
Dessalines
5623f11b05 Refactoring thumbnails. Fixes #564
- Adding a default discussion thumbnail
- Adding a cropping max-height, and consistent width.
- Getting rid of hover overlays, in favor of top right content-type icon.
2020-02-29 13:03:41 -05:00
Dessalines
6365439c16 Merge branch 'master' into dev 2020-02-28 14:40:25 -05:00
Dessalines
1595b9406d Add line for private messaging support. 2020-02-28 14:40:12 -05:00
Dessalines
ff8d82d82f Adding thumbnail class. 2020-02-28 13:03:16 -05:00
Dessalines
b277d92226 New nsfw posts. 2020-02-27 16:24:27 -05:00
Dessalines
fab10ef792 Fix image testing regex. 2020-02-27 15:00:41 -05:00
Dessalines
76662ca557 Check for pictshare status ok. 2020-02-27 14:06:29 -05:00
Dessalines
6e6cfa9cf8 Version v0.6.25 2020-02-27 13:16:30 -05:00
Dessalines
2092cf3f4e Use image thumbnails from pictshare. Fixes #555 2020-02-27 12:55:23 -05:00
Dessalines
e784cc8b72 Merge branch 'master' of https://github.com/dessalines/lemmy 2020-02-24 17:42:15 -05:00
Dessalines
cf66addc62 Adding missing image thumbnail. Fixes #553 2020-02-24 17:38:05 -05:00
Dessalines
d667d35a7c
Merge pull request #552 from MyNameIsTroll/patch-2
Translation error in the FR version
2020-02-21 21:57:39 -05:00
MyNameIsTroll
4cad614c49
Translation error in the FR version
I corrected Reddit by Lemmy.
2020-02-21 18:44:38 +01:00
Dessalines
c896e01b9e Adding a link overlay. Fixes #549 2020-02-21 11:26:42 -05:00
Dessalines
6cde97836b Version v0.6.24 2020-02-19 13:35:58 -05:00
Dessalines
b147ca462c Don't show post url if its local. 2020-02-19 13:35:15 -05:00
Dessalines
484118ca6c Merge branch 'master' of https://github.com/dessalines/lemmy 2020-02-18 12:43:27 -05:00
Dessalines
05a8c61f67
Merge pull request #548 from AndreVallestero/master
Fixed incorrect keys
2020-02-18 12:41:39 -05:00
Dessalines
beec263ae6 Version v0.6.23 2020-02-18 12:37:35 -05:00
Andre Vallestero
6ac491c587 Fixed incorrect keys 2020-02-18 10:18:50 -05:00
Dessalines
81f1aaf298 Fix iframely always refetching bug. 2020-02-18 09:00:17 -05:00
Dessalines
8505329889 Fixing about guide. 2020-02-17 20:54:45 -05:00
Dessalines
8c81dc04da Add a user guide. Fixes #543 2020-02-17 20:48:17 -05:00
Dessalines
8a16497eef Updating translation report. 2020-02-17 18:26:39 -05:00
Dessalines
fe3f733032
Merge pull request #546 from AndreVallestero/master
Fixed french translations and password change text
2020-02-17 18:24:13 -05:00
Andre Vallestero
e8735e4edc Corrections made, added text for password change 2020-02-17 17:30:42 -05:00
Andre Vallestero
6cb10d266a Corrected show_avatar to show_avatars 2020-02-17 17:20:25 -05:00
Dessalines
b79ec0cc1b Version v0.6.22 2020-02-17 16:11:49 -05:00
Dessalines
87969e7024 Fix dynamic post url changing issue. 2020-02-17 16:09:05 -05:00
Dessalines
a0e0c3edab Version v0.6.21 2020-02-17 14:38:16 -05:00
Dessalines
3a598b7af2 Removing images from iframely cards. 2020-02-17 14:37:36 -05:00
Dessalines
d629f26ea7 Add oembed to readme. 2020-02-17 14:06:19 -05:00
Dessalines
7641ff0117 Merge branch 'AndreVallestero-master' 2020-02-17 14:05:25 -05:00
Dessalines
30891fe384 Merge branch 'master' of https://github.com/AndreVallestero/lemmy into AndreVallestero-master 2020-02-17 14:04:41 -05:00
Andre Vallestero
ae473c2b0d Completed french translations, verified syntax 2020-02-17 13:54:22 -05:00
Dessalines
d6eeabd73b Version v0.6.20 2020-02-17 13:04:53 -05:00
Dessalines
51bc048955 Remove the responsive bootstrap utils. 2020-02-17 12:55:43 -05:00
Dessalines
97fccb5615 Only show it if it has a title. 2020-02-17 12:45:08 -05:00
Dessalines
2e38f934fe Merge branch 'master' into iframely 2020-02-17 11:31:57 -05:00
Dessalines
225bc0cca7 Merge branch 'master' of https://github.com/dessalines/lemmy 2020-02-17 11:21:04 -05:00
Dessalines
9e5cf8f272 First pass at adding oembeds / iframely. 2020-02-17 11:18:01 -05:00
Dessalines
58bc1d8347
Merge pull request #537 from AndreVallestero/master
Added french translation for general sponsors
2020-02-16 19:54:54 -05:00
Andre Vallestero
4b4e84b309 Added french translation for sponsor message 2020-02-16 16:04:30 -05:00
Dessalines
be83e99334 Version v0.6.19 2020-02-16 14:26:39 -05:00
Dessalines
cf0c0a5824 Some front end fixes. 2020-02-15 20:29:57 -05:00
Dessalines
10223d5aef Fix minor issue with selector. Fix issue with truncate wrapping. 2020-02-15 16:09:53 -05:00
Dessalines
f98b2bd4ba Adding an external link icon. #530 2020-02-14 22:29:17 -05:00
Dessalines
28a3140523
Merge pull request #534 from richardj/feature/permissions-db-init
changed permissions from db-init.sh to be able to run
2020-02-14 15:32:45 -05:00
Richard
a04b360d91 changed permissions from db-init.sh to be able to run 2020-02-14 21:30:06 +01:00
Dessalines
ee2c6e0850 Version v0.6.18 2020-02-14 10:12:40 -05:00
Dessalines
a95f282936 Making links go to post page. 2020-02-14 10:02:37 -05:00
Dessalines
deae2b6ade Fix minor loading bug. 2020-02-13 12:35:22 -05:00
Dessalines
940ea569c7 Merge branch 'dev' 2020-02-12 12:16:33 -05:00
Dessalines
d3f8e55124 Add community refine by searching on new post creation. Fixes #521 2020-02-12 12:12:19 -05:00
Dessalines
92af8be12c Merge branch 'dev' 2020-02-11 10:22:44 -05:00
Dessalines
6bddeecdb6 Fixing unread indicator on link click. Fixes #527 2020-02-11 10:14:09 -05:00
Dessalines
d6867fa791 Fixing ansible certbot renew. 2020-02-10 14:45:50 -05:00
Dessalines
9237226585 Version v0.6.17 2020-02-09 15:08:51 -05:00
Dessalines
036d6260bb Adding instant voting / vote animations. Fixes #526 2020-02-09 15:04:41 -05:00
Dessalines
f494835e8a Merge branch 'tech_debt' into dev 2020-02-09 11:45:46 -05:00
Dessalines
54b215a587 Live post and comment resorting. Fixes #522
- Moving sorting to utils.
2020-02-09 11:44:24 -05:00
Dessalines
8803bf97dd Merge branch 'master' into dev 2020-02-08 23:32:48 -05:00
Dessalines
a72d0ae42f
Merge pull request #525 from StaticallyTypedRice/minor_changes
Add an option to initialize the database from install.sh
2020-02-08 23:30:10 -05:00
Dessalines
469594fac8 Fixing some technical debt. Fixes #524 2020-02-08 23:20:11 -05:00
Richie Zhang
49a35b1271
Add line breaks after user prompts in install.sh 2020-02-08 13:02:40 -08:00
Richie Zhang
69c22b17a0
Add a semicolon. 2020-02-08 12:56:13 -08:00
Richie Zhang
7f92d82b1b
Fix a user prompt in install.sh 2020-02-08 12:54:41 -08:00
Richie Zhang
52b65bda69
Add an option to initialize the database from install.sh 2020-02-08 12:53:46 -08:00
Richie Zhang
d3e23bc90a
Merge pull request #5 from StaticallyTypedRice/master
Merge upstream
2020-02-08 12:17:02 -08:00
Richie Zhang
b5b9705152
Merge pull request #4 from dessalines/master
Merge upstream
2020-02-08 12:15:30 -08:00
Dessalines
841a86a666 Version v0.6.16 2020-02-08 11:44:16 -05:00
Dessalines
74aa161ff6 Change post sorting hot rank to use newest comment time. Fixes #517 2020-02-08 11:16:58 -05:00
Dessalines
8c5a510cfc Version v0.6.15 2020-02-07 23:50:33 -05:00
Dessalines
049556f146 Add new comments views to main and community pages. Fixes #480 2020-02-07 23:05:15 -05:00
Dessalines
1e157decec Version v0.6.14 2020-02-07 11:28:20 -05:00
Dessalines
65145b719c Adding post body searching. Fixes #507 2020-02-07 11:17:15 -05:00
Dessalines
68ac96147c Fix issue with post creating redirecting other posts. Fixes #520 2020-02-07 10:12:05 -05:00
Dessalines
1c182e381b Version v0.6.13 2020-02-06 23:02:26 -05:00
Dessalines
514c1ab298 Fixing rate limiting. 2020-02-06 22:48:43 -05:00
Dessalines
bbc7159ede Fix expanded image height. Fixes #455 2020-02-06 22:24:23 -05:00
Dessalines
c9060f76b4 Minor fixes to docs. 2020-02-06 16:26:03 -05:00
Dessalines
e900313f6e Merge branch 'minor_changes' of https://github.com/StaticallyTypedRice/lemmy into StaticallyTypedRice-minor_changes 2020-02-06 16:23:35 -05:00
Dessalines
9fb5d55569 Version v0.6.12 2020-02-06 16:16:51 -05:00
Dessalines
018fe531be Merge branch 'post_title_length' into rate_limit_fixes 2020-02-06 16:10:19 -05:00
Dessalines
d5b483d4d1 Fixing rate limit checking to only ping after a success. Fixes #516 2020-02-06 16:07:59 -05:00
Richie Zhang
1d4dc19d6f
Implement password verification in db-init.sh. 2020-02-06 13:07:34 -08:00
Richie Zhang
5103878741
Add instructions for unning db-init.sh to administration_configuration.md 2020-02-06 12:26:01 -08:00
Richie Zhang
7612a54894
Add instructions for unning db-init.sh to contributing_local_development.md 2020-02-06 12:25:13 -08:00
Dessalines
6fce9911f9 Increasing post title length limit to 200. Fixes #515 2020-02-06 13:10:43 -05:00
Dessalines
fc81cd98d2 Merge branch 'master' into dev 2020-02-06 11:27:02 -05:00
Dessalines
b5143e0919 Merge branch 'master' of https://github.com/dessalines/lemmy 2020-02-06 11:26:56 -05:00
Dessalines
ec9e48cbaa Merge branch 'master' into dev 2020-02-06 11:26:49 -05:00
Dessalines
5614ed7a93 Reworking README.md. Fixes #513. 2020-02-06 11:26:11 -05:00
Dessalines
0fe4e22acd
Merge pull request #511 from StaticallyTypedRice/StaticallyTypedRice-translations
Translate support_on_liberapay
2020-02-06 09:43:43 -05:00
Dessalines
d95a1ae5e6
Merge pull request #510 from StaticallyTypedRice/StaticallyTypedRice-FUNDING.yml
Add liberapay to FUNDING.yml
2020-02-06 09:42:56 -05:00
Richie Zhang
a8ef9f8726
Create db-init.sh 2020-02-05 18:15:28 -08:00
Richie Zhang
8a3f5032c3
Translate support_on_liberapay to Brazilian Portuguese. 2020-02-05 17:44:52 -08:00
Richie Zhang
07fdb17557
Translate support_on_liberapay to Dutch. 2020-02-05 17:42:38 -08:00
Richie Zhang
1f96b73e51
Translate support_on_liberapay to Italian 2020-02-05 17:41:43 -08:00
Richie Zhang
45241cc5df
Translate support_on_liberapay to French 2020-02-05 17:40:43 -08:00
Richie Zhang
43c187cf08
Translate support_on_liberapay to Spanish. 2020-02-05 17:38:04 -08:00
Richie Zhang
fe1db54a93
Translate support_on_liberapay to Chinese 2020-02-05 17:36:22 -08:00
Richie Zhang
53a662e3b2
Translate support_on_liberapay to German 2020-02-05 17:35:09 -08:00
Richie Zhang
47a58ce0a8
Merge pull request #2 from dessalines/master
Merge upstream
2020-02-05 17:30:47 -08:00
Richie Zhang
8b04897632
Add liberapay to FUNDING.yml 2020-02-05 17:23:07 -08:00
Richie Zhang
261602335b
Merge pull request #1 from dessalines/master
Merge upstream
2020-02-05 17:11:18 -08:00
Dessalines
ee60465643 Adding if found to catch errors. 2020-02-05 14:24:35 -05:00
Dessalines
5601ad5283 Adding Liberapay. 2020-02-05 14:14:05 -05:00
Dessalines
ba25af4364 Merge branch 'nyex-pt_trans' into dev 2020-02-05 13:12:44 -05:00
Dessalines
6ec79d2696 Adding language and etc for pt-br. 2020-02-05 13:12:13 -05:00
Dessalines
b0399da27b Fix i18n issue with no communities. 2020-02-05 12:56:01 -05:00
olivia maia
f167906a74 added Portuguese translation 2020-02-05 14:16:01 -03:00
Dessalines
35efca4152 Version v0.6.11 2020-02-04 19:48:11 -05:00
Dessalines
5e4397866f Forgot to add cross-posted to, to i18n. 2020-02-04 16:37:52 -05:00
Dessalines
dcad63fe2a Merge branch 'websocket_reconnect_reload' into dev 2020-02-04 12:21:49 -05:00
Dessalines
ee0c802476 Some corrections to total user points. 2020-02-04 11:29:54 -05:00
Dessalines
238be5f71c Merge branch 'StaticallyTypedRice-webclient' of https://github.com/StaticallyTypedRice/lemmy into StaticallyTypedRice-StaticallyTypedRice-webclient 2020-02-04 11:20:58 -05:00
Dessalines
73a720e9c3 Websocket reconnect reload page data. Fixes #504 2020-02-04 11:19:05 -05:00
Dessalines
a4fa4a55d0
Merge pull request #503 from xyvs/master
Spanish translations
2020-02-04 10:35:31 -05:00
Jose Francisco Martinez Salgado
0732029df9 Translations report updated 2020-02-04 09:11:47 -06:00
Jose Francisco Martinez Salgado
81b985f997 New words and typo 2020-02-04 09:08:16 -06:00
Jose Francisco Martinez Salgado
7382defa4c Merge remote-tracking branch 'upstream/master' 2020-02-04 08:57:13 -06:00
Dessalines
7037506566 Combine duplicate front page posts. Fixes #284 2020-02-03 19:52:39 -05:00
Richie Zhang
9cb438b440
Translate total_number_of_points to German. 2020-02-03 15:23:36 -08:00
Richie Zhang
0b7e5a5b55
Display a given user's total score. 2020-02-03 15:20:59 -08:00
Richie Zhang
1bf8661834
Add total_number_of_points. 2020-02-03 15:14:10 -08:00
Richie Zhang
1e8b359571
Merge pull request #10 from dessalines/dev
Merge upstream
2020-02-03 15:10:39 -08:00
Dessalines
586e8861de Updating sponsors. 2020-02-03 18:01:14 -05:00
Dessalines
3bcf82682d Add disclaimer about image uploading. Fixes #501 2020-02-03 18:00:41 -05:00
Dessalines
4a0bebf45a Version v0.6.10 2020-02-03 10:58:17 -05:00
e19a22d909 bump 2020-02-03 15:00:08 +01:00
2cbf191b69 Integrate email relay in Ansible setup 2020-02-03 14:10:24 +01:00
Dessalines
448ac762b8 Merge branch 'master' of https://github.com/dessalines/lemmy 2020-02-02 22:54:05 -05:00
Dessalines
d4736be04f Returning specific slurs from slur filter on failure. Fixes #463 2020-02-02 22:51:54 -05:00
Dessalines
a221525eef
Merge pull request #498 from iav/docs4arm
Add doc compilation to multiplatform workflow
2020-02-02 20:06:42 -05:00
Igor Velkov
a1060e35a7 Add doc compilation to multiplatform workflow
Change rust builder version to current
2020-02-03 01:39:41 +02:00
Dessalines
3f42af9885 Version v0.6.9 2020-02-02 17:15:48 -05:00
Dessalines
2e45e88fcc Fixing ansible deploy. 2020-02-02 17:13:59 -05:00
Dessalines
36c451e7c0 Dont show deleted / removed replies or messages in inbox. Fixes #493 2020-02-02 17:07:16 -05:00
Dessalines
ad8e47f8d2 Doing UserJoin on connect / reconnect. 2020-02-02 16:37:57 -05:00
Dessalines
3ddbe2e370 Merge branch 'dev' into websocket_scopes 2020-02-02 14:37:19 -05:00
Dessalines
bf2543c4e6 Doing comment-node, comment-form and private-message. 2020-02-02 14:22:35 -05:00
Dessalines
04df95b8b2 Some minor fixes. 2020-02-02 14:10:15 -05:00
Dessalines
680eab53c1 Finishing up interpolation rework. 2020-02-02 13:50:44 -05:00
Dessalines
e09e3b6a92 Merge branch 'dev' into richardj-feature/frontend-a11y 2020-02-02 12:56:07 -05:00
Dessalines
ac64786dc0 Merge branch 'feature/frontend-a11y' of https://github.com/richardj/lemmy into richardj-feature/frontend-a11y 2020-02-02 12:55:45 -05:00
Dessalines
a40a7d515d Updating ansible version. 2020-02-02 12:54:35 -05:00
Dessalines
f74d7b0368 Removing PostLikeResponse in favor of PostResponse. Consolidating comment and post_sends. 2020-02-02 12:45:41 -05:00
Dessalines
08c6fcf6a8 Fixing issue with multiple notifications. 2020-02-02 10:09:01 -05:00
Dessalines
8c2c0f0440 Merge branch 'dev' into websocket_scopes 2020-02-02 09:15:02 -05:00
Dessalines
bc1b7afd60 Testing out reconnecting websocket instead of rxJS one. #496 2020-02-01 22:52:16 -05:00
Dessalines
323c5dc26c Adding username and email case insensitivity uniqueness. Fixes #341 2020-02-01 20:11:12 -05:00
Dessalines
1e8fa79b67 Version v0.6.8 2020-02-01 16:56:31 -05:00
Dessalines
3f561710ef Adding an i386 theme. Fixes #476 2020-02-01 16:34:54 -05:00
Dessalines
034adbe3a9 Adding materia theme. Fixes #478 2020-02-01 14:44:26 -05:00
Richard
d5bacc2839 changed h2 back to h5 2020-02-01 19:24:11 +01:00
Dessalines
82c4d04114 Merge branch 'dev' into websocket_scopes 2020-02-01 00:37:34 -05:00
Dessalines
c2542137db Merge branch 'feature/frontend-a11y' of https://github.com/richardj/lemmy into richardj-feature/frontend-a11y 2020-01-31 23:46:04 -05:00
Dessalines
8fc6b16639 Merge branch 'StaticallyTypedRice-StaticallyTypedRice-documentation' into dev 2020-01-31 23:08:18 -05:00
Dessalines
512ff8eec9 Merge branch 'dev' into StaticallyTypedRice-StaticallyTypedRice-minor-changes 2020-01-31 23:06:52 -05:00
Dessalines
f46b728499 Merge branch 'StaticallyTypedRice-minor-changes' of https://github.com/StaticallyTypedRice/lemmy into StaticallyTypedRice-StaticallyTypedRice-minor-changes 2020-01-31 23:06:46 -05:00
Dessalines
cdef3a8ed0 Merge branch 'dev' into richardj-language/dutch-language-update-2 2020-01-31 23:05:32 -05:00
Dessalines
be6bd0b36f Merge branch 'dev' into nutomic-ansible 2020-01-31 23:01:19 -05:00
Dessalines
648ac32f4b Merge branch 'dev' into nutomic-api-benchmark 2020-01-31 22:53:55 -05:00
Richie Zhang
3a74b0b534
Fix the broken link on the Lemmy logo. 2020-01-31 19:50:20 -08:00
Richie Zhang
1d2b096779
Merge pull request #8 from dessalines/master
Merge upstream
2020-01-31 19:47:46 -08:00
Dessalines
d2136ee81d Removing todo line. 2020-01-31 22:44:02 -05:00
Dessalines
4f08760ef4 Mostly done eliminating recurring fetches. 2020-01-31 22:34:08 -05:00
Dessalines
b14f7bae3c Starting to work on user message scope. 2020-01-31 20:02:20 -05:00
Richard
d81345560f updated Dutch translations 2020-01-31 22:18:22 +01:00
Richard
d829b9b5ac usability features
This are all the commits from another PR combined into a single commit.
2020-01-31 21:52:27 +01:00
b3e1930d03 Add script to test http api, fix two api calls 2020-01-31 14:17:37 +01:00
Dessalines
7809b6ab0d Merge branch 'dev' into websocket_scopes 2020-01-30 21:15:58 -05:00
Richie Zhang
41ac10f75e
Remove empty div tag containing the navbar.
In preliminary, the appearance and behavior of the navbar was not affected when the `<nav>` element was not wrapped, and an empty `<div>` element doesn't do anything anyway. I'm also pretty sure that the `<nav>` tag in HTML5 is basically a div with a fancy name, so it can do everything a div can.
2020-01-30 16:07:01 -08:00
Richie Zhang
000e1c8660
Merge pull request #6 from dessalines/dev
Merge upstream
2020-01-30 15:59:03 -08:00
758b6891eb
Merge pull request #483 from StaticallyTypedRice/StaticallyTypedRice-translation
Add minor German translations
2020-01-30 08:45:06 +00:00
Dessalines
aabb5e9973 Merge branch 'master' of https://github.com/dessalines/lemmy 2020-01-29 20:37:16 -05:00
Dessalines
a313e7fb1b Removing english placeholders for Farsi. 2020-01-29 20:35:41 -05:00
Dessalines
9aa59473ff Merge branch 'PersianTranslation' of https://github.com/ahangarha/lemmy into ahangarha-PersianTranslation 2020-01-29 20:17:11 -05:00
Richie Zhang
dd4f96673f
Use "Kryptowährung" for "crypto" in German. 2020-01-29 16:30:48 -08:00
Richie Zhang
213ed9b4b3
Use "nicht speichern" for "unsave" in German. 2020-01-29 15:57:32 -08:00
Richie Zhang
e370475f50
Use translation "rückgängig machen speichern" for "unsave" in German. 2020-01-29 13:25:19 -08:00
Richie Zhang
5060542a01
Fix the German translation for "unsave" 2020-01-29 12:57:40 -08:00
Richie Zhang
ca9eaade7d
Fix minor German translations 2020-01-29 12:29:05 -08:00
Mostafa Ahangarha
2aab6f02e7 initial Persian translation 2020-01-29 21:25:34 +04:30
Dessalines
eb5dae8429
Merge pull request #485 from ahangarha/patch-1
Remove word repeattion
2020-01-29 11:49:51 -05:00
Mostafa Ahangarha
68496128c5
Remove word repeattion 2020-01-29 20:37:59 +04:30
Dessalines
b4f3eb29fc
Merge pull request #484 from StaticallyTypedRice/StaticallyTypedRice-translation-1
Add minor Chinese translations
2020-01-29 08:40:51 -05:00
Richie Zhang
3e36fcff98
Add minor Chinese translations 2020-01-28 22:06:37 -08:00
Richie Zhang
c09ea38af6
Add minor German translations 2020-01-28 21:55:37 -08:00
Dessalines
0d64f20b68 Adding a materialized view for reply_view. Fixes #466 2020-01-28 23:53:19 -05:00
Richie Zhang
196d5d77b9
Merge pull request #2 from dessalines/master
Merge upstream changes.
2020-01-28 17:46:43 -08:00
Dessalines
0751ed0e3c Fixing coffee cup offset. Fixes #479 2020-01-28 20:10:22 -05:00
Dessalines
045f6e80d1 Adding yarn run ts-node to husky pre-commit. 2020-01-28 20:08:33 -05:00
Dessalines
43d8a2c2ae Merge branch 'DeeUnderscore-comment-sort-asc' 2020-01-28 15:00:42 -05:00
D Anzorge
a1a11e0ce7 Add sorting by old (date, ascending) to comment frontend 2020-01-28 20:14:08 +01:00
7c0a9121c9 Fix Ansible installation, add uninstall playbook 2020-01-28 17:39:27 +01:00
Dessalines
628d6729c1 Version v0.6.7 2020-01-28 08:50:58 -05:00
Dessalines
9431f93e5f Add not throwing error on auth check for getPost and getCommunity. Fixes #475 2020-01-28 08:47:37 -05:00
Dessalines
d639f85a30 Version v0.6.6 2020-01-27 22:02:53 -05:00
Dessalines
beb55a471f Allow pasting images into comment boxes and post url box. Fixes #472 2020-01-27 21:59:38 -05:00
Dessalines
e7c90bee01 Strictly typing websocket forms. 2020-01-27 21:04:30 -05:00
Dessalines
22883c94ea
Merge pull request #468 from Rynach/patch-2
Improve the Finnish translation
2020-01-26 20:59:43 -05:00
Dessalines
44b08ecc61 Fixing install.sh db export. 2020-01-26 20:44:00 -05:00
D Anzorge
866df99c4f Use correct database env variable in docs
see server/src/settings.rs
2020-01-27 01:45:02 +01:00
Rynach
8ef3abca0e
Improve the Finnish translation
Changed and made the wording in a couple lines more context-appropriate.
2020-01-27 00:24:50 +02:00
Dessalines
3180491748 Formatting translation file, changing deploy script to const. 2020-01-26 16:38:08 -05:00
Dessalines
f044459fda Adding Catalan to user pref dropdown, moment.js. 2020-01-26 16:32:35 -05:00
David
e160438a90 Lambdas reverted 2020-01-26 21:49:47 +01:00
David
88b798bb6b Added and completed CA translation 2020-01-26 21:20:25 +01:00
David
f3ece78f83 ES translation completed & some fixes 2020-01-26 20:31:34 +01:00
David
8ff637a57e TS func refactoring 2020-01-26 19:53:57 +01:00
Dessalines
8f9db655ec Adding finnish to translation report. 2020-01-26 12:37:07 -05:00
Dessalines
75554df998 Adding finnish to moment, dropdown. 2020-01-26 12:35:47 -05:00
Dessalines
3df34acdcf Starting to work on websocket scopes. 2020-01-26 12:23:28 -05:00
Rynach
7f57ec7ca7
Create fi.ts
Create the Finnish translation of Lemmy.
2020-01-26 16:27:45 +02:00
Dessalines
7468df649e Version v0.6.5 2020-01-25 15:06:31 -05:00
Dessalines
a7ac1d3bad Removing disabled from comment and post upvoting, showing toast now. Fixes #450 2020-01-25 13:39:22 -05:00
Dessalines
3b12f92752 Adding a noscript message. Fixes #464 2020-01-25 10:19:37 -05:00
Dessalines
2998957617 Fixing front end error messages. Fixes #462 2020-01-25 09:58:53 -05:00
Dessalines
c5fc5cc9d0 Merge branch 'dev' 2020-01-24 21:54:26 -05:00
Dessalines
7bbb071b0b Comment image uploads resize textarea. Fixes #460 2020-01-24 21:53:37 -05:00
Dessalines
d6d060f7ab Fixing repo mirrors section. 2020-01-24 21:19:37 -05:00
Dessalines
61a5bcaf04 Moving notification to bottom left. Fixes #457 2020-01-24 20:55:57 -05:00
Dessalines
c83dc4f311 Merge branch 'StaticallyTypedRice-master' 2020-01-24 20:44:39 -05:00
Dessalines
9140faded0 Squashed commit of the following:
commit 7be3cff714
Author: Dessalines <happydooby@gmail.com>
Date:   Fri Jan 24 20:41:48 2020 -0500

    Some minor additions.

commit 4dfd96ce8c
Author: Richie Zhang <12566991+StaticallyTypedRice@users.noreply.github.com>
Date:   Fri Jan 24 17:35:41 2020 -0800

    Add a list of repository mirrors to the README.md

commit 349751f143
Author: Richie Zhang <12566991+StaticallyTypedRice@users.noreply.github.com>
Date:   Fri Jan 24 16:47:33 2020 -0800

    Fix a typo in README.md

commit bfc45aa9bc
Merge: 197bd67 9024809
Author: Richie Zhang <12566991+StaticallyTypedRice@users.noreply.github.com>
Date:   Fri Jan 24 15:55:15 2020 -0800

    Merge pull request #1 from StaticallyTypedRice/StaticallyTypedRice-documentation

    Add more implemented features to README.md

commit 9024809a5c
Author: Richie Zhang <12566991+StaticallyTypedRice@users.noreply.github.com>
Date:   Fri Jan 24 15:52:27 2020 -0800

    Add more implemented features to README.md
2020-01-24 20:43:57 -05:00
Dessalines
7be3cff714 Some minor additions. 2020-01-24 20:41:48 -05:00
Richie Zhang
4dfd96ce8c
Add a list of repository mirrors to the README.md 2020-01-24 17:35:41 -08:00
Richie Zhang
349751f143
Fix a typo in README.md 2020-01-24 16:47:33 -08:00
Richie Zhang
bfc45aa9bc
Merge pull request #1 from StaticallyTypedRice/StaticallyTypedRice-documentation
Add more implemented features to README.md
2020-01-24 15:55:15 -08:00
Richie Zhang
9024809a5c
Add more implemented features to README.md 2020-01-24 15:52:27 -08:00
Dessalines
197bd67601 Version v0.6.4 2020-01-24 14:06:15 -05:00
Dessalines
ada50fc3de Adding autocomplete to post, community, message, and site forms. Fixes #453 2020-01-24 13:59:50 -05:00
Dessalines
351cd84ab8 Updating front end packages. 2020-01-24 11:28:11 -05:00
Dessalines
d458571f13 Moving message preview button. Fixes #459 2020-01-24 11:23:33 -05:00
Dessalines
58af4355c5 Merge branch 'master' into dev 2020-01-23 19:53:53 -05:00
Dessalines
937489ad51 Version v0.6.3 2020-01-23 19:47:30 -05:00
Dessalines
ba16e36202 Merge branch 'master' into dev 2020-01-23 19:46:28 -05:00
Dessalines
c3eaa2273a Squashed commit of the following:
commit f5b75f342b
Merge: bd1fc2b 69389f6
Author: Dessalines <happydooby@gmail.com>
Date:   Thu Jan 23 19:17:42 2020 -0500

    Done merging http-api and private_message

commit bd1fc2b80b
Author: Dessalines <happydooby@gmail.com>
Date:   Thu Jan 23 16:46:07 2020 -0500

    Remove danger from private-message.tsx

commit 69389f61c9
Author: Dessalines <happydooby@gmail.com>
Date:   Thu Jan 23 11:21:21 2020 -0500

    Fixing http curl POST docs.

commit 7fdcae4f07
Merge: dbe9ad0 752318f
Author: Dessalines <happydooby@gmail.com>
Date:   Thu Jan 23 11:01:06 2020 -0500

    Merge remote-tracking branch 'nutomic/http-api' into dessalines-http-api

commit 752318fdf3
Author: Felix <me@nutomic.com>
Date:   Thu Jan 23 15:22:17 2020 +0100

    api fixes

commit 9ccff18f23
Author: Dessalines <happydooby@gmail.com>
Date:   Wed Jan 22 22:29:11 2020 -0500

    Adding a toaster to replace alerts. Fixes #457

commit 5197407dd2
Merge: bacb9ac 58f673a
Author: Dessalines <happydooby@gmail.com>
Date:   Wed Jan 22 21:20:38 2020 -0500

    Merge branch 'private_messaging' into dev

commit 58f673ab78
Author: Dessalines <happydooby@gmail.com>
Date:   Wed Jan 22 21:09:17 2020 -0500

    Adding message to comment node actions.

commit bacb9ac59e
Merge: 10c6505 7d3adda
Author: Dessalines <happydooby@gmail.com>
Date:   Wed Jan 22 20:37:08 2020 -0500

    Merge branch 'private_messaging' into dev

commit 10c6505968
Author: Dessalines <happydooby@gmail.com>
Date:   Wed Jan 22 20:35:20 2020 -0500

    Adding correct hello_name to mail.

commit 7d3adda0cd
Author: Dessalines <happydooby@gmail.com>
Date:   Wed Jan 22 16:35:29 2020 -0500

    Adding private messaging, and matrix user ids.

    - Fixes #244

commit dbe9ad0998
Author: Dessalines <happydooby@gmail.com>
Date:   Mon Jan 20 18:49:54 2020 -0500

    Fixing last.

commit 20c9c54806
Author: Dessalines <happydooby@gmail.com>
Date:   Sun Jan 19 13:31:37 2020 -0500

    Updating API docs.

commit dc84ccaac9
Merge: 6c61dd2 3edd75e
Author: Dessalines <happydooby@gmail.com>
Date:   Sun Jan 19 10:06:25 2020 -0500

    Merge branch 'master' into dessalines-http-api

commit 6c61dd266b
Merge: c5eecd0 e518954
Author: Dessalines <happydooby@gmail.com>
Date:   Sun Jan 19 09:09:00 2020 -0500

    Merge remote-tracking branch 'nutomic/websocket-generics' into dessalines-http-api

commit e518954bca
Author: Felix <me@nutomic.com>
Date:   Sun Jan 19 14:25:50 2020 +0100

    Use generics to reduce code duplication in websocket

commit c5eecd055e
Author: Dessalines <happydooby@gmail.com>
Date:   Sun Jan 19 00:38:45 2020 -0500

    Strongly typing WebsocketJsonResponse. Forgot comment-form.tsx

commit 0c5eb47135
Author: Dessalines <happydooby@gmail.com>
Date:   Sat Jan 18 23:54:10 2020 -0500

    First pass at fixing UI to work with new websocketresponses.

commit baf77bb6be
Author: Felix <me@nutomic.com>
Date:   Sat Jan 18 17:25:45 2020 +0100

    simplify json serialization code

commit 047ec97e18
Author: Felix <me@nutomic.com>
Date:   Sat Jan 18 14:22:25 2020 +0100

    rewrite api endpoint urls

commit 2fb4900b0c
Author: Felix <me@nutomic.com>
Date:   Thu Jan 16 17:04:37 2020 +0100

    fix typo

commit cba8081579
Author: Felix <me@nutomic.com>
Date:   Thu Jan 16 16:47:38 2020 +0100

    fix formatting

commit d7285d8c25
Author: Felix <me@nutomic.com>
Date:   Thu Jan 16 16:09:01 2020 +0100

    small fix

commit 415040a1e9
Author: Felix <me@nutomic.com>
Date:   Thu Jan 16 15:39:08 2020 +0100

    working!

commit 7a97c981a0
Author: Felix <me@nutomic.com>
Date:   Wed Jan 15 16:48:21 2020 +0100

    try to simplify code with higher order functions

commit c41082f98f
Author: Felix <me@nutomic.com>
Date:   Wed Jan 15 16:37:25 2020 +0100

    Implement HTTP API using generics (fixes #380)
2020-01-23 19:39:59 -05:00
Dessalines
f5b75f342b Done merging http-api and private_message 2020-01-23 19:17:42 -05:00
Dessalines
bd1fc2b80b Remove danger from private-message.tsx 2020-01-23 16:46:07 -05:00
Dessalines
69389f61c9 Fixing http curl POST docs. 2020-01-23 11:21:21 -05:00
Dessalines
7fdcae4f07 Merge remote-tracking branch 'nutomic/http-api' into dessalines-http-api 2020-01-23 11:01:06 -05:00
752318fdf3 api fixes 2020-01-23 15:22:17 +01:00
Dessalines
9ccff18f23 Adding a toaster to replace alerts. Fixes #457 2020-01-22 22:29:11 -05:00
Dessalines
5197407dd2 Merge branch 'private_messaging' into dev 2020-01-22 21:20:38 -05:00
Dessalines
58f673ab78 Adding message to comment node actions. 2020-01-22 21:09:17 -05:00
Dessalines
bacb9ac59e Merge branch 'private_messaging' into dev 2020-01-22 20:37:08 -05:00
Dessalines
10c6505968 Adding correct hello_name to mail. 2020-01-22 20:35:20 -05:00
Dessalines
7d3adda0cd Adding private messaging, and matrix user ids.
- Fixes #244
2020-01-22 16:38:16 -05:00
Dessalines
759453772d
Merge pull request #452 from AndreVallestero/infernojs-url
Fixed infernojs url
2020-01-22 13:08:00 -05:00
Andre Vallestero
2b4bacaa10 Fixed infernojs url 2020-01-22 10:35:09 -05:00
Dessalines
dbe9ad0998 Fixing last. 2020-01-20 18:49:54 -05:00
Dessalines
fc86b83e36 Allow comment/post upvoting from other pages.
- Fixes #355
- Votes now coming back for posts and comments on search page.
2020-01-20 18:39:45 -05:00
Dessalines
dc35c7b126 Updating slur filter. 2020-01-20 17:12:23 -05:00
Dessalines
5fec981674 Adding to deploy. 2020-01-20 14:39:41 -05:00
Dessalines
572b3b876f Adding nsfw image blur. Fixes #438 2020-01-20 14:38:41 -05:00
Dessalines
7145dde79f Merge branch 'master' into dev 2020-01-20 11:10:00 -05:00
Dessalines
365f81b699 Fixing create_post endpoint, changing name url param to title. Fixes #448 2020-01-20 11:08:51 -05:00
Dessalines
5dc0d947e9 Version v0.6.2 2020-01-20 10:51:06 -05:00
Dessalines
6312ff333b Adding comment and post vote loading indicators. Fixes #449 2020-01-20 10:11:50 -05:00
Dessalines
2394993dd4 Fixing new comments and posts voting issue. Fixes #422 2020-01-19 19:48:34 -05:00
Dessalines
66adf67661 Version v0.6.1 2020-01-19 16:53:16 -05:00
Dessalines
7b7fb0f5d2 Hide next paginator.
- Fixes #441
- Hide post sort radio if no comments.
2020-01-19 16:47:54 -05:00
Dessalines
7f0e69e54c Fixing sponsor page. Fixes #444 2020-01-19 16:28:29 -05:00
Dessalines
3b8a2f61fc Automatically update translation report. Fixes #439 2020-01-19 16:23:04 -05:00
Dessalines
20c9c54806 Updating API docs. 2020-01-19 13:31:37 -05:00
Dessalines
dc84ccaac9 Merge branch 'master' into dessalines-http-api 2020-01-19 10:06:25 -05:00
Dessalines
3edd75ed43 Fixing a css sizing style. 2020-01-19 09:20:10 -05:00
Dessalines
6c61dd266b Merge remote-tracking branch 'nutomic/websocket-generics' into dessalines-http-api 2020-01-19 09:09:00 -05:00
e518954bca Use generics to reduce code duplication in websocket 2020-01-19 14:25:50 +01:00
0a409bc9be Use actual structs for nodeinfo 2020-01-19 12:32:02 +01:00
Dessalines
c5eecd055e Strongly typing WebsocketJsonResponse. Forgot comment-form.tsx 2020-01-19 00:38:45 -05:00
Dessalines
0c5eb47135 First pass at fixing UI to work with new websocketresponses. 2020-01-18 23:54:10 -05:00
Dessalines
9e60e76a8c Merge branch 'code-of-conduct' of https://github.com/Nutomic/lemmy into Nutomic-code-of-conduct 2020-01-18 13:42:40 -05:00
e859080632 Add Code of Conduct 2020-01-18 19:20:15 +01:00
Dessalines
126e2085fd Update donation section. 2020-01-18 12:38:50 -05:00
baf77bb6be simplify json serialization code 2020-01-18 17:25:45 +01:00
047ec97e18 rewrite api endpoint urls 2020-01-18 14:26:05 +01:00
2fb4900b0c fix typo 2020-01-18 14:26:05 +01:00
cba8081579 fix formatting 2020-01-18 14:26:05 +01:00
d7285d8c25 small fix 2020-01-18 14:26:04 +01:00
415040a1e9 working! 2020-01-18 14:26:04 +01:00
7a97c981a0 try to simplify code with higher order functions 2020-01-18 14:23:44 +01:00
c41082f98f Implement HTTP API using generics (fixes #380) 2020-01-18 14:23:43 +01:00
Dessalines
05f2bfc83c Fixing error when email already exists. Fixes #427 2020-01-17 20:34:16 -05:00
Dessalines
fb82a489d5 Update ansible inventory.example. 2020-01-17 18:51:07 -05:00
Dessalines
2af3f1d5cc Changing changelog to releases.md 2020-01-17 18:42:57 -05:00
Dessalines
b6aa9a30e8 Add more info to contributing. Fixes #429 2020-01-17 17:52:48 -05:00
Dessalines
676de4ab84 Updating translation report. 2020-01-17 13:30:26 -05:00
Dessalines
966f76f5cc Merge branch 'dev' 2020-01-17 13:29:46 -05:00
Dessalines
f8e9578ff8 Fix aarch64 dockerfile. 2020-01-17 13:28:29 -05:00
Dessalines
645fc9a620
Merge pull request #428 from richardj/language/dutch-language-update
updated translations that were still open for Dutch
2020-01-17 12:07:30 -05:00
Richard
a9c8127a69 updated translations that were still open for Dutch 2020-01-17 15:51:16 +01:00
Dessalines
5cf27d255a Version v0.6.0 2020-01-16 09:58:54 -05:00
Dessalines
b14c8f1a46 Version v0.5.22 2020-01-15 22:35:39 -05:00
Dessalines
d5af66c1b1 Fix image expanding and truncation issue. 2020-01-15 22:33:23 -05:00
Dessalines
0457d4c8f1 Version v0.5.21 2020-01-15 22:18:08 -05:00
Dessalines
69d816c865 Merge branch 'thebinarymutant-ui-fix' 2020-01-15 22:14:01 -05:00
Dessalines
24770126d4 Merge branch 'ui-fix' of https://github.com/thebinarymutant/lemmy into thebinarymutant-ui-fix 2020-01-15 22:08:49 -05:00
Dessalines
318ce4a52a Fix issue with adding multiple instances of chatserver. 2020-01-15 22:01:14 -05:00
Smit Patel
fc26a9a377 [CSS] Sticky Footer 2020-01-16 06:54:47 +05:30
Dessalines
1e884c6969 Fix new post fetching. Fixes #422 2020-01-15 17:28:59 -05:00
Dessalines
04c7f99f67 Adding limits to inbox fetching. Fixes #420 2020-01-15 16:20:54 -05:00
Dessalines
efdc98dfa0 Temp remove showing post editing on front page, due to glitches. 2020-01-15 16:13:46 -05:00
Dessalines
b0246a784b Updating the sponsor list. 2020-01-15 10:28:27 -05:00
Dessalines
7e8c0b146b Fixing text overflow / wrapping for titles and markdown. Fixes #414 2020-01-14 18:46:25 -05:00
Dessalines
d762230f61 Adding border lines and spacing on markdown tables.
- Fixes #413
2020-01-14 17:51:27 -05:00
Dessalines
f8525b2474 Merge branch 'dev' 2020-01-14 17:14:06 -05:00
Dessalines
48e221d06c Adding an env deploy. Fixes #410 2020-01-14 17:14:02 -05:00
Dessalines
6cd9156d3b Updating translation report. 2020-01-14 17:02:42 -05:00
Dessalines
655c5db59a Removing placeholder image for avatar upload.
- Fixes #419
2020-01-14 16:58:14 -05:00
Dessalines
10533ff005 Version v0.5.20 2020-01-14 12:19:08 -05:00
Dessalines
0671390475 Merge branch 'dev' 2020-01-14 12:18:30 -05:00
Dessalines
afdad2abc3 Merge branch 'dev' into materialized_views 2020-01-14 10:49:30 -05:00
Dessalines
a2c469977c Adding disable vote buttons when not logged in. #416 2020-01-14 10:38:45 -05:00
Dessalines
1cf97a8661 Adding force to the failing diesel install. 2020-01-14 09:55:05 -05:00
Dessalines
aaa64811f4 Trying build before clippy. 2020-01-14 09:42:11 -05:00
Dessalines
556016614d Adding these as global envs. 2020-01-14 09:23:41 -05:00
Dessalines
b0899cf55e Adding back in diesel install. 2020-01-14 09:15:30 -05:00
Dessalines
cc11930bdd Merge branch 'dev' into materialized_views 2020-01-13 23:05:22 -05:00
Dessalines
66f0683160 Adding options below comment searches. Fixes #412 2020-01-13 23:04:47 -05:00
Dessalines
13a5c50c70 Merge branch 'dev' into materialized_views 2020-01-13 22:58:46 -05:00
Dessalines
3f4cce99ed Add fast comment and post voting. (Doesn't wait for server return)
- Fixes #416
2020-01-13 22:57:32 -05:00
Dessalines
6260fea707 Merge branch 'dev' into materialized_views 2020-01-13 21:33:15 -05:00
Dessalines
083fcb9c6c Adding a better rust formatter. 2020-01-13 21:32:08 -05:00
Dessalines
a06476fa96 Fix testing by using RUST_TEST_THREADS=1. 2020-01-13 21:02:04 -05:00
Dessalines
aa502b687d A first pass at using materialized views. 2020-01-13 20:02:02 -05:00
Dessalines
5f4a35c80a Merge branch 'dev'
- r2d2 pooling. Fixes #409
2020-01-12 17:18:39 -05:00
Dessalines
a6d88fdfb0 Fixing clippy expect issue. 2020-01-12 17:04:58 -05:00
Dessalines
7839eb6d40 Fixing nodeinfo endpoint. 2020-01-12 16:57:48 -05:00
Dessalines
d0de6552ab removing some comments. 2020-01-12 16:47:02 -05:00
Dessalines
1707b19f80 Merge branch 'dev' into r2d2_actix_pooling 2020-01-12 13:48:14 -05:00
Dessalines
ebaa96a9d6 Adding proxy_cache. #408 2020-01-12 13:28:05 -05:00
Dessalines
33b602f353 Version v0.5.19 2020-01-12 13:05:09 -05:00
Dessalines
7a82e9ffd2 Adding an apache bench (ab) report. 2020-01-12 13:04:13 -05:00
Dessalines
ae02747ee0 Adding image caching via immutable to ansible. #371 #408 2020-01-12 12:58:06 -05:00
Dessalines
34ddd62fd1 Cleaning up nodeinfo. 2020-01-12 12:52:19 -05:00
Dessalines
9755654734 Trying to add r2d2 connection pooling to websockets. 2020-01-12 10:31:51 -05:00
Dessalines
38ba7dfb1a Version v0.5.18 2020-01-11 19:43:30 -05:00
Dessalines
519a509412 Merge branch 'db_indexes' into dev 2020-01-11 19:19:46 -05:00
Dessalines
dab6695ae2 Merge branch 'actix-2.0' into dev 2020-01-11 19:19:38 -05:00
Dessalines
a08d743747 Upping rust-musl to 1.40.0 2020-01-11 19:07:44 -05:00
Dessalines
ad2fc2e8d9 Back to just indexes. 2020-01-11 17:30:44 -05:00
02bcbc42d6 Make various functions async 2020-01-11 13:50:07 +01:00
Dessalines
8fe034c320 Trying some DB indexes. 2020-01-10 23:09:38 -05:00
66c95993dc Upgrade actix to 2.0 (fixes #392) 2020-01-10 23:41:08 +01:00
Dessalines
6d89f6f955 Adding ssh docker commands to dev deploy. 2020-01-07 12:53:32 -05:00
Dessalines
8079b6faef Version v0.5.17 2020-01-06 11:29:25 -05:00
Dessalines
fe264a2f30 Removing outdateds from package.json. 2020-01-06 11:23:29 -05:00
Dessalines
2cb57b833d Upgrade package.json. 2020-01-06 11:22:51 -05:00
Dessalines
588010ea88 Merge branch 'master' into dev 2020-01-04 17:22:59 -05:00
Dessalines
2d95db8a7d Adding cargo checking to husky pre-commit. Fixes #402 2020-01-04 17:21:33 -05:00
Dessalines
bd99f4994a Updating cargo deps. 2020-01-03 23:41:44 -05:00
Dessalines
813b053b5f Adding cargo outdated to clean script. 2020-01-03 23:39:31 -05:00
Dessalines
0f09171d68 Adding a clean script for cargo. (No husky hook yet) 2020-01-03 22:58:43 -05:00
Dessalines
7b492cc477 Version v0.5.16 2020-01-03 14:19:36 -05:00
Dessalines
912871d0ac Adding pictshare image thumbnailer.
- Fixes #377
2020-01-03 14:13:22 -05:00
Dessalines
07d7664a38 Fixing create_post, create_community, and login pages.
- Includes fetching the site for `enable_nsfw` info. Fixes #400
2020-01-03 13:52:21 -05:00
Dessalines
02cf67de4a Don't send email notification for self replies.
- Fixes #401
2020-01-03 13:12:19 -05:00
Dessalines
4157bf9a02 Trying another cache change. 2. 2020-01-03 12:58:50 -05:00
Dessalines
1180b89268 Trying another cache change. 2020-01-03 12:56:53 -05:00
Dessalines
d22bbafebb Trying out new cargo cache. #397 2020-01-03 12:09:06 -05:00
Dessalines
e3d4f9418e Version v0.5.15 2020-01-02 17:45:03 -05:00
Dessalines
beb63aedc8 Updating translation report. 2020-01-02 17:44:25 -05:00
Dessalines
e339f90737 Adding show_avatar user setting, and option to send notifications to inbox.
- Fixes #254
- Fixes #394
2020-01-02 16:55:54 -05:00
Dessalines
8c1316aa96 Version v0.5.14 2020-01-02 10:47:18 -05:00
Dessalines
ec146a0dea Fixing deploy and version for clippy. 2020-01-02 10:34:40 -05:00
Dessalines
c939c15530 Merge branch 'master' into lint 2020-01-02 10:03:38 -05:00
c01b40c517 Run lint in Travis CI 2020-01-02 12:52:08 +01:00
ddd4baf103 Apply changes suggested by cargo clippy (fixes #395) 2020-01-02 12:30:00 +01:00
Dessalines
a998bfc1f5 Version v0.5.13 2020-01-01 22:12:24 -05:00
Dessalines
3c6eb37a1b Add are you sure dialogs to mod actions.
- Fixes #386
2020-01-01 22:09:07 -05:00
Dessalines
2512babff1 Version v0.5.12 2020-01-01 17:57:39 -05:00
Dessalines
f71d19729a Adding some fixes to new docs system. 2020-01-01 17:47:00 -05:00
Dessalines
04da8146ba Merge branch 'mdbook' of https://yerbamate.dev/Nutomic/lemmy into Nutomic-mdbook 2020-01-01 16:42:38 -05:00
Dessalines
b63aabfdc2 Adding change password and email address from user settings.
- Fixes #384
- Fixes #385
2020-01-01 15:46:14 -05:00
4b6bba0e7b Include docs in docker image 2020-01-01 18:14:09 +01:00
Dessalines
a95704d5fc Only do arm build on major deploy. Fixes #393 2020-01-01 11:39:23 -05:00
Dessalines
dbd1d8faa5 Version v0.5.11 2020-01-01 11:32:58 -05:00
Dessalines
868ba5b64c Version v0.5.10 2020-01-01 11:28:18 -05:00
49de4ccbd9 Move translations to readme, put install instructions in both places 2020-01-01 17:26:59 +01:00
af83ec951f Use mdbook for documentation (fixes #375) 2020-01-01 17:24:36 +01:00
Dessalines
b365dd2349 Finally got debounce working!
- Fixes #367
- Fixes #376
2020-01-01 11:09:56 -05:00
Dessalines
9e20ddbfa4 Correcting mastodon follow link. 2019-12-31 11:07:57 -05:00
Dessalines
22904e1c66 Adding open_registration to nodeinfo. 2019-12-31 10:44:30 -05:00
f7156bdac3 Use actix config to handle routes in seperate folders (#378) 2019-12-31 14:17:24 +01:00
Dessalines
b6c297766b Version v0.5.9 2019-12-29 20:30:23 -05:00
Dessalines
a6bc0edc91 Adding case insensitivity to slur filter.
- Fixes #388
2019-12-29 20:29:07 -05:00
Dessalines
ddb512b1ae Version v0.5.0.8 2019-12-29 17:03:36 -05:00
Dessalines
88fed73ea3 Preview image type post.
- Fixes #383
2019-12-29 16:56:55 -05:00
Dessalines
51c8735682 Version v0.5.0.7 2019-12-29 16:06:36 -05:00
Dessalines
94c4504b33 Fix fuse.js 2019-12-29 16:06:04 -05:00
Dessalines
c06d01f753 Adding user avatars / icons. Requires pictshare.
- Fixes #188
2019-12-29 15:39:48 -05:00
Dessalines
daf22a12d9 Version v0.5.0.6 2019-12-28 21:10:16 -05:00
Dessalines
dc1fc1e04c Post editing fix. 2019-12-28 21:10:07 -05:00
Dessalines
18d4b3d2aa Version v0.5.0.5 2019-12-28 21:01:05 -05:00
Dessalines
3a85515bd5 Fixing non-existent user profile viewing.
- Fixes #381
2019-12-28 20:58:01 -05:00
Dessalines
b4b8e9d7f5 Fixing empty email field in register form breaking signups.
- Fixes #382
2019-12-28 20:25:36 -05:00
Dessalines
807dd8d82c Fixing ansible deploy. 2019-12-28 20:22:53 -05:00
Dessalines
c31fe3857c Version v0.5.0.4 2019-12-28 19:08:06 -05:00
Dessalines
14418c5a0d Ansible fix try #1. 2019-12-28 19:06:47 -05:00
Dessalines
0799ae1a1f Removing debounce. 2019-12-28 19:06:19 -05:00
Dessalines
8a9f1dbb59 Fixing travis build. 5. 2019-12-28 16:50:59 -05:00
Dessalines
a42e2af203 Fixing travis build. 4. 2019-12-28 16:49:38 -05:00
Dessalines
8bf5d0cca6 Fixing travis build. 3. 2019-12-28 16:44:57 -05:00
Dessalines
6b68d54e35 Fixing travis build. 2. 2019-12-28 16:42:20 -05:00
Dessalines
7d291ee95a Fixing travis build. 2019-12-28 16:24:57 -05:00
Dessalines
6248392992 Config fixes.
- Adding front_end_dir to settings.
- Adding unit test for PasswordResetRequest encryption.
- Readme points to lemmy.hjson
- Fixing docker prod, dev, and ansible builds.
- Removing redundant env files, as all config is now in a single file.
- Some formatting fixes.
2019-12-28 16:11:03 -05:00
f18ebed740 Fix overriding config vars with underscore from environment 2019-12-28 12:11:06 +01:00
10da3f2554 Fix review comments 2019-12-27 17:30:46 +01:00
8fb34843aa Replace rust-crypto with sha2 crate (fixes #372) 2019-12-27 17:30:46 +01:00
a882fbea97 Added option to enable/disable federation 2019-12-27 17:30:45 +01:00
ae3fccf701 Implement webfinger (fixes #149) 2019-12-27 17:29:50 +01:00
f7333705dc update documentation, docker and ansible files 2019-12-27 17:28:46 +01:00
140eff181c Added documentation comments in default config 2019-12-27 17:28:45 +01:00
2c26cc26b8 Implement SQL connection pool 2019-12-27 17:28:45 +01:00
bad4868a10 Implement config (fixes #351) 2019-12-27 17:28:44 +01:00
Lyra
844a97a6a5 Add correct ActivityPub types conversion for Community and Post. 2019-12-27 17:25:20 +01:00
Jose Francisco Martinez Salgado
a347163ad7 Spanish translations 2019-10-17 16:10:08 -05:00
215 changed files with 21504 additions and 9611 deletions

1
.dockerignore vendored
View file

@ -1,5 +1,4 @@
ui/node_modules ui/node_modules
ui/dist ui/dist
server/target server/target
docs
.git .git

1
.github/FUNDING.yml vendored
View file

@ -1,3 +1,4 @@
# These are supported funding model platforms # These are supported funding model platforms
patreon: dessalines patreon: dessalines
liberapay: Lemmy

6
.gitignore vendored
View file

@ -1,4 +1,10 @@
ansible/inventory ansible/inventory
ansible/inventory_dev
ansible/passwords/ ansible/passwords/
ansible/vars/
docker/lemmy_mine.hjson
docker/dev/env_deploy.sh
build/ build/
.idea/ .idea/
ui/src/translations
docker/dev/volumes

26
.travis.yml vendored
View file

@ -5,21 +5,31 @@ matrix:
allow_failures: allow_failures:
- rust: nightly - rust: nightly
fast_finish: true fast_finish: true
cache: cache: cargo
directories:
- /home/travis/.cargo
before_cache: before_cache:
- rm -rf /home/travis/.cargo/registry - rm -rfv target/debug/incremental/lemmy_server-*
- rm -rfv target/debug/.fingerprint/lemmy_server-*
- rm -rfv target/debug/build/lemmy_server-*
- rm -rfv target/debug/deps/lemmy_server-*
- rm -rfv target/debug/lemmy_server.d
- cargo clean
before_script: before_script:
- psql -c "create user rrr with password 'rrr' superuser;" -U postgres - psql -c "create user lemmy with password 'password' superuser;" -U postgres
- psql -c 'create database rrr with owner rrr;' -U postgres - psql -c 'create database lemmy with owner lemmy;' -U postgres
- rustup component add clippy --toolchain stable-x86_64-unknown-linux-gnu
before_install: before_install:
- cd server - cd server
script: script:
- diesel migration run # Default checks, but fail if anything is detected
- cargo build - cargo build
- cargo clippy -- -D clippy::style -D clippy::correctness -D clippy::complexity -D clippy::perf
- cargo install diesel_cli --no-default-features --features postgres --force
- diesel migration run
- cargo test - cargo test
env: env:
- DATABASE_URL=postgres://rrr:rrr@localhost/rrr global:
- DATABASE_URL=postgres://lemmy:password@localhost:5432/lemmy
- RUST_TEST_THREADS=1
addons: addons:
postgresql: "9.4" postgresql: "9.4"

35
CODE_OF_CONDUCT.md vendored Normal file
View file

@ -0,0 +1,35 @@
# Code of Conduct
- We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic.
- Please avoid using overtly sexual aliases or other nicknames that might detract from a friendly, safe and welcoming environment for all.
- Please be kind and courteous. Theres no need to be mean or rude.
- Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer.
- Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works.
- We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We interpret the term “harassment” as including the definition in the Citizen Code of Conduct; if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we dont tolerate behavior that excludes people in socially marginalized groups.
- Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or any of the Lemmy moderation team immediately. Whether youre a regular contributor or a newcomer, we care about making this community a safe place for you and weve got your back.
- Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
[**Message the Moderation Team on Mastodon**](https://mastodon.social/@LemmyDev)
[**Email The Moderation Team**](mailto:contact@lemmy.ml)
## Moderation
These are the policies for upholding our communitys standards of conduct. If you feel that a thread needs moderation, please contact the Lemmy moderation team .
1. Remarks that violate the Lemmy standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks, are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
3. Moderators will first respond to such remarks with a warning, at the same time the offending content will likely be removed whenever possible.
4. If the warning is unheeded, the user will be “kicked,” i.e., kicked out of the communication channel to cool off.
5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended party a genuine apology.
7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a different moderator, in private. Complaints about bans in-channel are not allowed.
8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate situation, they should expect less leeway than others.
In the Lemmy community we strive to go the extra step to look out for each other. Dont just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if theyre off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely.
And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you couldve communicated better — remember that its your responsibility to make others comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
The enforcement policies listed above apply to all official Lemmy venues; including git repositories under [github.com/dessalines/lemmy](https://github.com/dessalines/lemmy) and [yerbamate.dev/dessalines/lemmy](https://yerbamate.dev/dessalines/lemmy), the [Matrix channel](https://matrix.to/#/!BZVTUuEiNmRcbFeLeI:matrix.org?via=matrix.org&via=privacytools.io&via=permaweb.io); and all instances under lemmy.ml. For other projects adopting the Rust Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
Adapted from the [Rust Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct), which is based on the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the [Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).

4
CONTRIBUTING.md vendored Normal file
View file

@ -0,0 +1,4 @@
# Contributing
See [here](https://dev.lemmy.ml/docs/contributing.html) for contributing Instructions.

301
README.md vendored
View file

@ -1,97 +1,41 @@
<p align="center">
<a href="" rel="noopener">
<img width=200px height=200px src="ui/assets/favicon.svg"></a>
</p>
<h3 align="center">Lemmy</h3>
<div align="center"> <div align="center">
[![Github](https://img.shields.io/badge/-Github-blue)](https://github.com/dessalines/lemmy)
[![Gitlab](https://img.shields.io/badge/-Gitlab-yellowgreen)](https://gitlab.com/dessalines/lemmy)
![Mastodon Follow](https://img.shields.io/mastodon/follow/810572?domain=https%3A%2F%2Fmastodon.social&style=social)
![GitHub stars](https://img.shields.io/github/stars/dessalines/lemmy?style=social)
[![Matrix](https://img.shields.io/matrix/rust-reddit-fediverse:matrix.org.svg?label=matrix-chat)](https://riot.im/app/#/room/#rust-reddit-fediverse:matrix.org)
![GitHub tag (latest SemVer)](https://img.shields.io/github/tag/dessalines/lemmy.svg) ![GitHub tag (latest SemVer)](https://img.shields.io/github/tag/dessalines/lemmy.svg)
[![Build Status](https://travis-ci.org/dessalines/lemmy.svg?branch=master)](https://travis-ci.org/dessalines/lemmy) [![Build Status](https://travis-ci.org/dessalines/lemmy.svg?branch=master)](https://travis-ci.org/dessalines/lemmy)
[![GitHub issues](https://img.shields.io/github/issues-raw/dessalines/lemmy.svg)](https://github.com/dessalines/lemmy/issues) [![GitHub issues](https://img.shields.io/github/issues-raw/dessalines/lemmy.svg)](https://github.com/dessalines/lemmy/issues)
[![Docker Pulls](https://img.shields.io/docker/pulls/dessalines/lemmy.svg)](https://cloud.docker.com/repository/docker/dessalines/lemmy/) [![Docker Pulls](https://img.shields.io/docker/pulls/dessalines/lemmy.svg)](https://cloud.docker.com/repository/docker/dessalines/lemmy/)
![GitHub commit activity](https://img.shields.io/github/commit-activity/m/dessalines/lemmy.svg) [![Translation status](http://weblate.yerbamate.dev/widgets/lemmy/-/lemmy/svg-badge.svg)](http://weblate.yerbamate.dev/engage/lemmy/)
![GitHub repo size](https://img.shields.io/github/repo-size/dessalines/lemmy.svg)
[![License](https://img.shields.io/github/license/dessalines/lemmy.svg)](LICENSE) [![License](https://img.shields.io/github/license/dessalines/lemmy.svg)](LICENSE)
[![Patreon](https://img.shields.io/badge/-Support%20on%20Patreon-blueviolet.svg)](https://www.patreon.com/dessalines) ![GitHub stars](https://img.shields.io/github/stars/dessalines/lemmy?style=social)
</div> </div>
--- <p align="center">
<a href="https://dev.lemmy.ml/" rel="noopener">
<img width=200px height=200px src="ui/assets/favicon.svg"></a>
<p align="center">A link aggregator / reddit clone for the fediverse. <h3 align="center"><a href="https://dev.lemmy.ml">Lemmy</a></h3>
<br> <p align="center">
A link aggregator / reddit clone for the fediverse.
<br />
<br />
<a href="https://dev.lemmy.ml">View Site</a>
·
<a href="https://dev.lemmy.ml/docs/index.html">Documentation</a>
·
<a href="https://github.com/dessalines/lemmy/issues">Report Bug</a>
·
<a href="https://github.com/dessalines/lemmy/issues">Request Feature</a>
·
<a href="https://github.com/dessalines/lemmy/blob/master/RELEASES.md">Releases</a>
</p>
</p> </p>
[Lemmy Dev instance](https://dev.lemmy.ml) *for testing purposes only* ## About The Project
This is a **very early beta version**, and a lot of features are currently broken or in active development, such as federation.
Front Page|Post Front Page|Post
---|--- ---|---
![main screen](https://i.imgur.com/kZSRcRu.png)|![chat screen](https://i.imgur.com/4XghNh6.png) ![main screen](https://i.imgur.com/kZSRcRu.png)|![chat screen](https://i.imgur.com/4XghNh6.png)
## 📝 Table of Contents
<!-- toc -->
- [Features](#features)
- [About](#about)
* [Why's it called Lemmy?](#whys-it-called-lemmy)
- [Install](#install)
* [Docker](#docker)
+ [Updating](#updating)
* [Ansible](#ansible)
* [Kubernetes](#kubernetes)
- [Develop](#develop)
* [Docker Development](#docker-development)
* [Local Development](#local-development)
+ [Requirements](#requirements)
+ [Set up Postgres DB](#set-up-postgres-db)
+ [Running](#running)
- [Documentation](#documentation)
- [Support](#support)
- [Translations](#translations)
- [Credits](#credits)
<!-- tocstop -->
## Features
- Open source, [AGPL License](/LICENSE).
- Self hostable, easy to deploy.
- Comes with [Docker](#docker), [Ansible](#ansible), [Kubernetes](#kubernetes).
- Clean, mobile-friendly interface.
- Live-updating Comment threads.
- Full vote scores `(+/-)` like old reddit.
- Themes, including light, dark, and solarized.
- Emojis with autocomplete support. Start typing `:`
- User tagging using `@`, Community tagging using `#`.
- Notifications, on comment replies and when you're tagged.
- i18n / internationalization support.
- RSS / Atom feeds for `All`, `Subscribed`, `Inbox`, `User`, and `Community`.
- Cross-posting support.
- A *similar post search* when creating new posts. Great for question / answer communities.
- Moderation abilities.
- Public Moderation Logs.
- Both site admins, and community moderators, who can appoint other moderators.
- Can lock, remove, and restore posts and comments.
- Can ban and unban users from communities and the site.
- Can transfer site and communities to others.
- Can fully erase your data, replacing all posts and comments.
- NSFW post / community support.
- High performance.
- Server is written in rust.
- Front end is `~80kB` gzipped.
- Supports arm64 / Raspberry Pi.
## About
[Lemmy](https://github.com/dessalines/lemmy) is similar to sites like [Reddit](https://reddit.com), [Lobste.rs](https://lobste.rs), [Raddle](https://raddle.me), or [Hacker News](https://news.ycombinator.com/): you subscribe to forums you're interested in, post links and discussions, then vote, and comment on them. Behind the scenes, it is very different; anyone can easily run a server, and all these servers are federated (think email), and connected to the same universe, called the [Fediverse](https://en.wikipedia.org/wiki/Fediverse). [Lemmy](https://github.com/dessalines/lemmy) is similar to sites like [Reddit](https://reddit.com), [Lobste.rs](https://lobste.rs), [Raddle](https://raddle.me), or [Hacker News](https://news.ycombinator.com/): you subscribe to forums you're interested in, post links and discussions, then vote, and comment on them. Behind the scenes, it is very different; anyone can easily run a server, and all these servers are federated (think email), and connected to the same universe, called the [Fediverse](https://en.wikipedia.org/wiki/Fediverse).
For a link aggregator, this means a user registered on one server can subscribe to forums on any other server, and can have discussions with users registered elsewhere. For a link aggregator, this means a user registered on one server can subscribe to forums on any other server, and can have discussions with users registered elsewhere.
@ -100,6 +44,8 @@ The overall goal is to create an easily self-hostable, decentralized alternative
Each lemmy server can set its own moderation policy; appointing site-wide admins, and community moderators to keep out the trolls, and foster a healthy, non-toxic environment where all can feel comfortable contributing. Each lemmy server can set its own moderation policy; appointing site-wide admins, and community moderators to keep out the trolls, and foster a healthy, non-toxic environment where all can feel comfortable contributing.
*Note: Federation is still in active development*
### Why's it called Lemmy? ### Why's it called Lemmy?
- Lead singer from [Motörhead](https://invidio.us/watch?v=pWB5JZRGl0U). - Lead singer from [Motörhead](https://invidio.us/watch?v=pWB5JZRGl0U).
@ -107,163 +53,88 @@ Each lemmy server can set its own moderation policy; appointing site-wide admins
- The [Koopa from Super Mario](https://www.mariowiki.com/Lemmy_Koopa). - The [Koopa from Super Mario](https://www.mariowiki.com/Lemmy_Koopa).
- The [furry rodents](http://sunchild.fpwc.org/lemming-the-little-giant-of-the-north/). - The [furry rodents](http://sunchild.fpwc.org/lemming-the-little-giant-of-the-north/).
Made with [Rust](https://www.rust-lang.org), [Actix](https://actix.rs/), [Inferno](https://www.infernojs.org), [Typescript](https://www.typescriptlang.org/) and [Diesel](http://diesel.rs/). ### Built With
## Install - [Rust](https://www.rust-lang.org)
- [Actix](https://actix.rs/)
- [Diesel](http://diesel.rs/)
- [Inferno](https://infernojs.org)
- [Typescript](https://www.typescriptlang.org/)
### Docker ## Features
Make sure you have both docker and docker-compose(>=`1.24.0`) installed: - Open source, [AGPL License](/LICENSE).
- Self hostable, easy to deploy.
- Comes with [Docker](#docker), [Ansible](#ansible), [Kubernetes](#kubernetes).
- Clean, mobile-friendly interface.
- Only a minimum of a username and password is required to sign up!
- User avatar support.
- Live-updating Comment threads.
- Full vote scores `(+/-)` like old reddit.
- Themes, including light, dark, and solarized.
- Emojis with autocomplete support. Start typing `:`
- User tagging using `@`, Community tagging using `#`.
- Integrated image uploading in both posts and comments.
- A post can consist of a title and any combination of self text, a URL, or nothing else.
- Notifications, on comment replies and when you're tagged.
- Notifications can be sent via email.
- Private messaging support.
- i18n / internationalization support.
- RSS / Atom feeds for `All`, `Subscribed`, `Inbox`, `User`, and `Community`.
- Cross-posting support.
- A *similar post search* when creating new posts. Great for question / answer communities.
- Moderation abilities.
- Public Moderation Logs.
- Can sticky posts to the top of communities.
- Both site admins, and community moderators, who can appoint other moderators.
- Can lock, remove, and restore posts and comments.
- Can ban and unban users from communities and the site.
- Can transfer site and communities to others.
- Can fully erase your data, replacing all posts and comments.
- NSFW post / community support.
- OEmbed support via Iframely.
- High performance.
- Server is written in rust.
- Front end is `~80kB` gzipped.
- Supports arm64 / Raspberry Pi.
```bash ## Installation
mkdir lemmy/
cd lemmy/
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/prod/docker-compose.yml
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/prod/.env
# Edit the .env if you want custom passwords
docker-compose up -d
```
and go to http://localhost:8536. - [Docker](https://dev.lemmy.ml/docs/administration_install_docker.html)
- [Ansible](https://dev.lemmy.ml/docs/administration_install_ansible.html)
- [Kubernetes](https://dev.lemmy.ml/docs/administration_install_kubernetes.html)
[A sample nginx config](/ansible/templates/nginx.conf), could be setup with: ## Support / Donate
```bash
wget https://raw.githubusercontent.com/dessalines/lemmy/master/ansible/templates/nginx.conf
# Replace the {{ vars }}
sudo mv nginx.conf /etc/nginx/sites-enabled/lemmy.conf
```
#### Updating
To update to the newest version, run:
```bash
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/prod/docker-compose.yml
docker-compose up -d
```
### Ansible
First, you need to [install Ansible on your local computer](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) (e.g. using `sudo apt install ansible`) or the equivalent for you platform.
Then run the following commands on your local computer:
```bash
git clone https://github.com/dessalines/lemmy.git
cd lemmy/ansible/
cp inventory.example inventory
nano inventory # enter your server, domain, contact email
ansible-playbook lemmy.yml --become
```
### Kubernetes
You'll need to have an existing Kubernetes cluster and [storage class](https://kubernetes.io/docs/concepts/storage/storage-classes/).
Setting this up will vary depending on your provider.
To try it locally, you can use [MicroK8s](https://microk8s.io/) or [Minikube](https://kubernetes.io/docs/tasks/tools/install-minikube/).
Once you have a working cluster, edit the environment variables and volume sizes in `docker/k8s/*.yml`.
You may also want to change the service types to use `LoadBalancer`s depending on where you're running your cluster (add `type: LoadBalancer` to `ports)`, or `NodePort`s.
By default they will use `ClusterIP`s, which will allow access only within the cluster. See the [docs](https://kubernetes.io/docs/concepts/services-networking/service/) for more on networking in Kubernetes.
**Important** Running a database in Kubernetes will work, but is generally not recommended.
If you're deploying on any of the common cloud providers, you should consider using their managed database service instead (RDS, Cloud SQL, Azure Databse, etc.).
Now you can deploy:
```bash
# Add `-n foo` if you want to deploy into a specific namespace `foo`;
# otherwise your resources will be created in the `default` namespace.
kubectl apply -f docker/k8s/db.yml
kubectl apply -f docker/k8s/pictshare.yml
kubectl apply -f docker/k8s/lemmy.yml
```
If you used a `LoadBalancer`, you should see it in your cloud provider's console.
## Develop
### Docker Development
Run:
```bash
git clone https://github.com/dessalines/lemmy
cd lemmy/docker/dev
./docker_update.sh # This builds and runs it, updating for your changes
```
and go to http://localhost:8536.
### Local Development
#### Requirements
- [Rust](https://www.rust-lang.org/)
- [Yarn](https://yarnpkg.com/en/)
- [Postgres](https://www.postgresql.org/)
#### Set up Postgres DB
```bash
psql -c "create user lemmy with password 'password' superuser;" -U postgres
psql -c 'create database lemmy with owner lemmy;' -U postgres
export DATABASE_URL=postgres://lemmy:password@localhost:5432/lemmy
```
#### Running
```bash
git clone https://github.com/dessalines/lemmy
cd lemmy
./install.sh
# For live coding, where both the front and back end, automagically reload on any save, do:
# cd ui && yarn start
# cd server && cargo watch -x run
```
## Documentation
- [Websocket API for App developers](docs/api.md)
- [ActivityPub API.md](docs/apub_api_outline.md)
- [Goals](docs/goals.md)
- [Ranking Algorithm](docs/ranking.md)
## Support
Lemmy is free, open-source software, meaning no advertising, monetizing, or venture capital, ever. Your donations directly support full-time development of the project. Lemmy is free, open-source software, meaning no advertising, monetizing, or venture capital, ever. Your donations directly support full-time development of the project.
- [Support on Liberapay.](https://liberapay.com/Lemmy)
- [Support on Patreon](https://www.patreon.com/dessalines). - [Support on Patreon](https://www.patreon.com/dessalines).
- [Sponsor List](https://dev.lemmy.ml/sponsors). - [List of Sponsors](https://dev.lemmy.ml/sponsors).
### Crypto
- bitcoin: `1Hefs7miXS5ff5Ck5xvmjKjXf5242KzRtK` - bitcoin: `1Hefs7miXS5ff5Ck5xvmjKjXf5242KzRtK`
- ethereum: `0x400c96c96acbC6E7B3B43B1dc1BB446540a88A01` - ethereum: `0x400c96c96acbC6E7B3B43B1dc1BB446540a88A01`
- monero: `41taVyY6e1xApqKyMVDRVxJ76sPkfZhALLTjRvVKpaAh2pBd4wv9RgYj1tSPrx8wc6iE1uWUfjtQdTmTy2FGMeChGVKPQuV` - monero: `41taVyY6e1xApqKyMVDRVxJ76sPkfZhALLTjRvVKpaAh2pBd4wv9RgYj1tSPrx8wc6iE1uWUfjtQdTmTy2FGMeChGVKPQuV`
## Translations ## Contributing
If you'd like to add translations, take a look a look at the [English translation file](ui/src/translations/en.ts). - [Contributing instructions](https://dev.lemmy.ml/docs/contributing.html)
- [Docker Development](https://dev.lemmy.ml/docs/contributing_docker_development.html)
- [Local Development](https://dev.lemmy.ml/docs/contributing_local_development.html)
- Languages supported: English (`en`), Chinese (`zh`), Dutch (`nl`), Esperanto (`eo`), French (`fr`), Spanish (`es`), Swedish (`sv`), German (`de`), Russian (`ru`), Italian (`it`). ### Translations
lang | done | missing If you want to help with translating, take a look at [Weblate](https://weblate.yerbamate.dev/projects/lemmy/).
--- | --- | ---
de | 100% |
eo | 86% | number_of_communities,preview,upload_image,formatting_help,view_source,sticky,unsticky,archive_link,stickied,delete_account,delete_account_confirm,banned,creator,number_online,replies,mentions,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,language,browser_default,theme,are_you_sure,yes,no
es | 95% | archive_link,replies,mentions,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,language,browser_default
fr | 95% | archive_link,replies,mentions,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,language,browser_default
it | 96% | archive_link,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,language,browser_default
nl | 88% | preview,upload_image,formatting_help,view_source,sticky,unsticky,archive_link,stickied,delete_account,delete_account_confirm,banned,creator,number_online,replies,mentions,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,language,browser_default,theme
ru | 82% | cross_posts,cross_post,number_of_communities,preview,upload_image,formatting_help,view_source,sticky,unsticky,archive_link,stickied,delete_account,delete_account_confirm,banned,creator,number_online,replies,mentions,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,language,browser_default,recent_comments,theme,monero,by,to,transfer_community,transfer_site,are_you_sure,yes,no
sv | 95% | archive_link,replies,mentions,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,language,browser_default
zh | 80% | cross_posts,cross_post,users,number_of_communities,preview,upload_image,formatting_help,view_source,sticky,unsticky,archive_link,settings,stickied,delete_account,delete_account_confirm,banned,creator,number_online,replies,mentions,forgot_password,reset_password_mail_sent,password_change,new_password,no_email_setup,language,browser_default,recent_comments,nsfw,show_nsfw,theme,monero,by,to,transfer_community,transfer_site,are_you_sure,yes,no
## Contact
If you'd like to update this report, run: - [Mastodon](https://mastodon.social/@LemmyDev) - [![Mastodon Follow](https://img.shields.io/mastodon/follow/810572?domain=https%3A%2F%2Fmastodon.social&style=social)](https://mastodon.social/@LemmyDev)
- [Matrix](https://riot.im/app/#/room/#rust-reddit-fediverse:matrix.org) - [![Matrix](https://img.shields.io/matrix/rust-reddit-fediverse:matrix.org.svg?label=matrix-chat)](https://riot.im/app/#/room/#rust-reddit-fediverse:matrix.org)
```bash - [GitHub](https://github.com/dessalines/lemmy)
cd ui - [Gitea](https://yerbamate.dev/dessalines/lemmy)
ts-node translation_report.ts > tmp # And replace the text above. - [GitLab](https://gitlab.com/dessalines/lemmy)
```
## Credits ## Credits

22
RELEASES.md vendored Normal file
View file

@ -0,0 +1,22 @@
# Lemmy v0.6.0 Release (2020-01-16)
`v0.6.0` is here, and we've closed [41 issues!](https://github.com/dessalines/lemmy/milestone/15?closed=1)
This is the biggest release by far:
- Avatars!
- Optional Email notifications for username mentions, post and comment replies.
- Ability to change your password and email address.
- Can set a custom language.
- Lemmy-wide settings to disable downvotes, and close registration.
- A better documentation system, hosted in lemmy itself.
- [Huge DB performance gains](https://github.com/dessalines/lemmy/issues/411) (everthing down to < `30ms`) by using materialized views.
- Fixed major issue with similar post URL and title searching.
- Upgraded to Actix `2.0`
- Faster comment / post voting.
- Better small screen support.
- Lots of bug fixes, refactoring of back end code.
Another major announcement is that Lemmy now has another lead developer besides me, [@felix@radical.town](https://radical.town/@felix). Theyve created a better documentation system, implemented RSS feeds, simplified docker and project configs, upgraded actix, working on federation, a whole lot else.
https://dev.lemmy.ml

1
ansible/VERSION vendored Normal file
View file

@ -0,0 +1 @@
v0.6.33

2
ansible/ansible.cfg vendored
View file

@ -2,4 +2,4 @@
inventory=inventory inventory=inventory
[ssh_connection] [ssh_connection]
pipelining = True #pipelining = True

25
ansible/lemmy.yml vendored
View file

@ -29,23 +29,20 @@
- { path: '/lemmy/' } - { path: '/lemmy/' }
- { path: '/lemmy/volumes/' } - { path: '/lemmy/volumes/' }
- name: add all template files - block:
template: src={{item.src}} dest={{item.dest}} - name: add template files
template: src={{item.src}} dest={{item.dest}} mode={{item.mode}}
with_items: with_items:
- { src: 'templates/env', dest: '/lemmy/.env' } - { src: 'templates/docker-compose.yml', dest: '/lemmy/docker-compose.yml', mode: '0600' }
- { src: '../docker/prod/docker-compose.yml', dest: '/lemmy/docker-compose.yml' } - { src: 'templates/nginx.conf', dest: '/etc/nginx/sites-enabled/lemmy.conf', mode: '0644' }
- { src: 'templates/nginx.conf', dest: '/etc/nginx/sites-enabled/lemmy.conf' } - { src: '../docker/iframely.config.local.js', dest: '/lemmy/iframely.config.local.js', mode: '0600' }
- name: add config file (only during initial setup)
template: src='templates/config.hjson' dest='/lemmy/lemmy.hjson' mode='0600' force='no' owner='1000' group='1000'
vars: vars:
postgres_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/postgres chars=ascii_letters,digits') }}" postgres_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/postgres chars=ascii_letters,digits') }}"
jwt_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/jwt chars=ascii_letters,digits') }}" jwt_password: "{{ lookup('password', 'passwords/{{ inventory_hostname }}/jwt chars=ascii_letters,digits') }}"
lemmy_docker_image: "dessalines/lemmy:{{ lookup('file', 'VERSION') }}"
- name: set env file permissions
file:
path: "/lemmy/.env"
state: touch
mode: 0600
access_time: preserve
modification_time: preserve
- name: enable and start docker service - name: enable and start docker service
systemd: systemd:
@ -67,4 +64,4 @@
special_time=daily special_time=daily
name=certbot-renew-lemmy name=certbot-renew-lemmy
user=root user=root
job="certbot certonly --nginx -d '{{ domain }}' --deploy-hook 'docker-compose -f /peertube/docker-compose.yml exec nginx nginx -s reload'" job="certbot certonly --nginx -d '{{ domain }}' --deploy-hook 'nginx -s reload'"

131
ansible/lemmy_dev.yml vendored Normal file
View file

@ -0,0 +1,131 @@
---
- hosts: all
vars:
lemmy_docker_image: "lemmy:dev"
# Install python if required
# https://www.josharcher.uk/code/ansible-python-connection-failure-ubuntu-server-1604/
gather_facts: False
pre_tasks:
- name: install python for Ansible
raw: test -e /usr/bin/python || (apt -y update && apt install -y python-minimal python-setuptools)
args:
executable: /bin/bash
register: output
changed_when: output.stdout != ""
- setup: # gather facts
tasks:
# TODO: this task is running on all hosts at the same time so there is a race condition
- name: xxx
shell: |
mkdir -p "vars/{{ inventory_hostname }}/"
if [ ! -f "vars/{{ inventory_hostname }}/port_counter" ]; then
if [ -f "vars/max_port_counter" ]; then
MAX_PORT=$(cat vars/max_port_counter)
else
MAX_PORT=8000
fi
OUR_PORT=$(expr $MAX_PORT + 10)
echo $OUR_PORT > "vars/{{ inventory_hostname }}/port_counter"
echo $OUR_PORT > "vars/max_port_counter"
fi
cat "vars/{{ inventory_hostname }}/port_counter"
args:
executable: /bin/bash
delegate_to: localhost
register: lemmy_port
- set_fact: "lemmy_port={{ lemmy_port.stdout_lines[0] }}"
- set_fact: "pictshare_port={{ lemmy_port|int + 1 }}"
- set_fact: "iframely_port={{ lemmy_port|int + 2 }}"
- debug:
msg: "lemmy_port={{ lemmy_port }} pictshare_port={{pictshare_port}} iframely_port={{iframely_port}}"
- name: install dependencies
apt:
pkg: ['nginx', 'docker-compose', 'docker.io', 'certbot', 'python-certbot-nginx']
- name: request initial letsencrypt certificate
command: certbot certonly --nginx --agree-tos -d '{{ domain }}' -m '{{ letsencrypt_contact_email }}'
args:
creates: '/etc/letsencrypt/live/{{domain}}/privkey.pem'
# TODO: need to use different path per domain
- name: create lemmy folder
file: path={{item.path}} state=directory
with_items:
- { path: '/lemmy/{{ domain }}/' }
- { path: '/lemmy/{{ domain }}/volumes/' }
- { path: '/var/cache/lemmy/{{ domain }}/' }
- block:
- name: add template files
template: src={{item.src}} dest={{item.dest}} mode={{item.mode}}
with_items:
- { src: 'templates/docker-compose.yml', dest: '/lemmy/{{domain}}/docker-compose.yml', mode: '0600' }
- { src: 'templates/nginx.conf', dest: '/etc/nginx/sites-enabled/lemmy-{{ domain }}.conf', mode: '0644' }
- { src: '../docker/iframely.config.local.js', dest: '/lemmy/{{ domain }}/iframely.config.local.js', mode: '0600' }
- name: add config file (only during initial setup)
template: src='templates/config.hjson' dest='/lemmy/{{domain}}/lemmy.hjson' mode='0600' force='no' owner='1000' group='1000'
vars:
# TODO: these paths are changed, need to move the files
# TODO: not sure what to call the local var folder, its not mentioned in the ansible docs
postgres_password: "{{ lookup('password', 'vars/{{ inventory_hostname }}/postgres_password chars=ascii_letters,digits') }}"
jwt_password: "{{ lookup('password', 'vars/{{ inventory_hostname }}/jwt_password chars=ascii_letters,digits') }}"
- name: build the dev docker image
local_action: shell cd .. && sudo docker build . -f docker/dev/Dockerfile -t lemmy:dev
register: image_build
- name: find hash of the new docker image
set_fact:
image_hash: "{{ image_build.stdout | regex_search('(?<=Successfully built )[0-9a-f]{12}') }}"
# this does not use become so that the output file is written as non-root user and is easy to delete later
- name: save dev docker image to file
local_action: shell sudo docker save lemmy:dev > lemmy-dev.tar
- name: copy dev docker image to server
copy: src=lemmy-dev.tar dest=/lemmy/lemmy-dev.tar
- name: import docker image
docker_image:
name: lemmy
tag: dev
load_path: /lemmy/lemmy-dev.tar
source: load
force_source: yes
register: image_import
- name: delete remote image file
file: path=/lemmy/lemmy-dev.tar state=absent
- name: delete local image file
local_action: file path=lemmy-dev.tar state=absent
- name: enable and start docker service
systemd:
name: docker
enabled: yes
state: started
# cant pull here because that fails due to lemmy:dev (without dessalines/) not being on docker hub, but that shouldnt
# be a problem for testing
- name: start docker-compose
docker_compose:
project_src: "/lemmy/{{ domain }}/"
state: present
recreate: always
ignore_errors: yes
- name: reload nginx with new config
shell: nginx -s reload
- name: certbot renewal cronjob
cron:
special_time=daily
name=certbot-renew-lemmy-{{ domain }}
user=root
job="certbot certonly --nginx -d '{{ domain }}' --deploy-hook 'nginx -s reload'"

14
ansible/templates/config.hjson vendored Normal file
View file

@ -0,0 +1,14 @@
{
database: {
password: "{{ postgres_password }}"
host: "postgres"
}
hostname: "{{ domain }}"
jwt_secret: "{{ jwt_password }}"
front_end_dir: "/app/dist"
email: {
smtp_server: "postfix:25"
smtp_from_address: "noreply@{{ domain }}"
use_tls: false
}
}

46
ansible/templates/docker-compose.yml vendored Normal file
View file

@ -0,0 +1,46 @@
version: '3.3'
services:
lemmy:
image: {{ lemmy_docker_image }}
ports:
- "127.0.0.1:{{ lemmy_port }}:8536"
restart: always
volumes:
- ./lemmy.hjson:/config/config.hjson:ro
depends_on:
- postgres
- pictshare
- iframely
postgres:
image: postgres:12-alpine
environment:
- POSTGRES_USER=lemmy
- POSTGRES_PASSWORD={{ postgres_password }}
- POSTGRES_DB=lemmy
volumes:
- ./volumes/postgres:/var/lib/postgresql/data
restart: always
pictshare:
image: shtripok/pictshare:latest
ports:
- "127.0.0.1:{{ pictshare_port }}:80"
volumes:
- ./volumes/pictshare:/usr/share/nginx/html/data
restart: always
iframely:
image: dogbin/iframely:latest
ports:
- "127.0.0.1:{{ iframely_port }}:80"
volumes:
- ./iframely.config.local.js:/iframely/config.local.js:ro
restart: always
postfix:
image: mwader/postfix-relay
environment:
- POSTFIX_myhostname={{ domain }}
restart: "always"

14
ansible/templates/env vendored
View file

@ -1,14 +0,0 @@
DOMAIN={{ domain }}
DATABASE_PASSWORD={{ postgres_password }}
DATABASE_URL=postgres://lemmy:{{ postgres_password }}@lemmy_db:5432/lemmy
JWT_SECRET={{ jwt_password }}
RATE_LIMIT_MESSAGE=30
RATE_LIMIT_MESSAGE_PER_SECOND=60
RATE_LIMIT_POST=3
RATE_LIMIT_POST_PER_SECOND=600
RATE_LIMIT_REGISTER=3
RATE_LIMIT_REGISTER_PER_SECOND=3600
SMTP_SERVER={{ smtp_server }}
SMTP_LOGIN={{ smtp_login }}
SMTP_PASSWORD={{ smtp_password }}
SMTP_FROM_ADDRESS={{ smtp_from_address }}

View file

@ -1,3 +1,5 @@
proxy_cache_path /var/cache/lemmy/{{ domain }} levels=1:2 keys_zone=lemmy_frontend_cache_{{ domain }}:10m max_size=100m use_temp_path=off;
server { server {
listen 80; listen 80;
server_name {{ domain }}; server_name {{ domain }};
@ -50,7 +52,7 @@ server {
client_max_body_size 50M; client_max_body_size 50M;
location / { location / {
proxy_pass http://0.0.0.0:8536; proxy_pass http://0.0.0.0:{{ lemmy_port }};
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
@ -59,19 +61,32 @@ server {
proxy_http_version 1.1; proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade; proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade"; proxy_set_header Connection "upgrade";
# Proxy Cache
proxy_cache lemmy_frontend_cache_{{ domain }};
proxy_cache_use_stale error timeout http_500 http_502 http_503 http_504;
proxy_cache_revalidate on;
proxy_cache_lock on;
proxy_cache_min_uses 5;
} }
location /pictshare/ { location /pictshare/ {
proxy_pass http://0.0.0.0:8537/; proxy_pass http://0.0.0.0:{{ pictshare_port }}/;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
if ($request_uri ~ \.(?:ico|gif|jpe?g|png|webp|bmp|mp4)$) { if ($request_uri ~ \.(?:ico|gif|jpe?g|png|webp|bmp|mp4)$) {
add_header Cache-Control "public"; add_header Cache-Control "public, max-age=31536000, immutable";
expires max;
} }
} }
location /iframely/ {
proxy_pass http://0.0.0.0:{{ iframely_port }}/;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
} }
# Anonymize IP addresses # Anonymize IP addresses
@ -83,6 +98,6 @@ map $remote_addr $remote_addr_anon {
::1 $remote_addr; ::1 $remote_addr;
default 0.0.0.0; default 0.0.0.0;
} }
log_format main '$remote_addr_anon - $remote_user [$time_local] "$request" ' log_format main_{{ domain }} '$remote_addr_anon - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" "$http_user_agent"'; '$status $body_bytes_sent "$http_referer" "$http_user_agent"';
access_log /dev/stdout main; access_log /var/log/nginx/access.log main_{{ domain }};

48
ansible/uninstall.yml vendored Normal file
View file

@ -0,0 +1,48 @@
---
- hosts: all
vars_prompt:
- name: confirm_uninstall
prompt: "Do you really want to uninstall Lemmy? This will delete all data and can not be reverted [yes/no]"
private: no
- name: delete_certs
prompt: "Delete certificates? Select 'no' if you want to reinstall Lemmy [yes/no]"
private: no
tasks:
- name: end play if no confirmation was given
debug:
msg: "Uninstall cancelled, doing nothing"
when: not confirm_uninstall|bool
- meta: end_play
when: not confirm_uninstall|bool
- name: stop docker-compose
docker_compose:
project_src: /lemmy/
state: absent
- name: delete data
file: path={{item.path}} state=absent
with_items:
- { path: '/lemmy/' }
- { path: '/etc/nginx/sites-enabled/lemmy.conf' }
- name: Remove a volume
docker_volume: name={{item.name}} state=absent
with_items:
- { name: 'lemmy_lemmy_db' }
- { name: 'lemmy_lemmy_pictshare' }
- name: delete entire ecloud folder
file: path='/mnt/repo-base/' state=absent
when: delete_certs|bool
- name: remove certbot cronjob
cron:
name=certbot-renew-lemmy
state=absent

17
docker/dev/.env vendored
View file

@ -1,17 +0,0 @@
DOMAIN=my_domain
DATABASE_PASSWORD=password
DATABASE_URL=postgres://lemmy:password@lemmy_db:5432/lemmy
JWT_SECRET=changeme
RATE_LIMIT_MESSAGE=30
RATE_LIMIT_MESSAGE_PER_SECOND=60
RATE_LIMIT_POST=6
RATE_LIMIT_POST_PER_SECOND=600
RATE_LIMIT_REGISTER=3
RATE_LIMIT_REGISTER_PER_SECOND=3600
# Optional email fields
SMTP_SERVER=
SMTP_LOGIN=
SMTP_PASSWORD=
SMTP_FROM_ADDRESS=Domain.com Lemmy Admin <notifications@domain.com>

13
docker/dev/Dockerfile vendored
View file

@ -10,7 +10,7 @@ RUN yarn install --pure-lockfile
COPY ui /app/ui COPY ui /app/ui
RUN yarn build RUN yarn build
FROM ekidd/rust-musl-builder:1.38.0-openssl11 as rust FROM ekidd/rust-musl-builder:1.40.0-openssl11 as rust
# Cache deps # Cache deps
WORKDIR /app WORKDIR /app
@ -32,14 +32,25 @@ RUN cargo build --frozen --release
# Get diesel-cli on there just in case # Get diesel-cli on there just in case
# RUN cargo install diesel_cli --no-default-features --features postgres # RUN cargo install diesel_cli --no-default-features --features postgres
FROM ekidd/rust-musl-builder:1.40.0-openssl11 as docs
WORKDIR /app
COPY docs ./docs
RUN sudo chown -R rust:rust .
RUN mdbook build docs/
FROM alpine:3.10 FROM alpine:3.10
# Install libpq for postgres # Install libpq for postgres
RUN apk add libpq RUN apk add libpq
# Copy resources # Copy resources
COPY server/config/defaults.hjson /config/defaults.hjson
COPY --from=rust /app/server/target/x86_64-unknown-linux-musl/release/lemmy_server /app/lemmy COPY --from=rust /app/server/target/x86_64-unknown-linux-musl/release/lemmy_server /app/lemmy
COPY --from=docs /app/docs/book/ /app/dist/documentation/
COPY --from=node /app/ui/dist /app/dist COPY --from=node /app/ui/dist /app/dist
RUN addgroup -g 1000 lemmy RUN addgroup -g 1000 lemmy
RUN adduser -D -s /bin/sh -u 1000 -G lemmy lemmy RUN adduser -D -s /bin/sh -u 1000 -G lemmy lemmy
RUN chown lemmy:lemmy /app/lemmy RUN chown lemmy:lemmy /app/lemmy

View file

@ -15,7 +15,7 @@ RUN yarn build
FROM multiarch/qemu-user-static as qemu FROM multiarch/qemu-user-static as qemu
FROM arm64v8/rust:1.37-buster as rust FROM arm64v8/rust:1.40-buster as rust
COPY --from=qemu /usr/bin/qemu-aarch64-static /usr/bin COPY --from=qemu /usr/bin/qemu-aarch64-static /usr/bin
#COPY --from=qemu /usr/bin/qemu-arm-static /usr/bin #COPY --from=qemu /usr/bin/qemu-arm-static /usr/bin
@ -69,6 +69,7 @@ RUN addgroup --gid 1000 lemmy
RUN adduser --disabled-password --shell /bin/sh --uid 1000 --ingroup lemmy lemmy RUN adduser --disabled-password --shell /bin/sh --uid 1000 --ingroup lemmy lemmy
# Copy resources # Copy resources
COPY server/config/defaults.hjson /config/defaults.hjson
COPY --from=rust /app/server/ready /app/lemmy COPY --from=rust /app/server/ready /app/lemmy
COPY --from=node /app/ui/dist /app/dist COPY --from=node /app/ui/dist /app/dist

View file

@ -69,6 +69,7 @@ RUN addgroup --gid 1000 lemmy
RUN adduser --disabled-password --shell /bin/sh --uid 1000 --ingroup lemmy lemmy RUN adduser --disabled-password --shell /bin/sh --uid 1000 --ingroup lemmy lemmy
# Copy resources # Copy resources
COPY server/config/defaults.hjson /config/defaults.hjson
COPY --from=rust /app/server/ready /app/lemmy COPY --from=rust /app/server/ready /app/lemmy
COPY --from=node /app/ui/dist /app/dist COPY --from=node /app/ui/dist /app/dist

View file

@ -20,10 +20,11 @@ COPY ui /app/ui
RUN yarn build RUN yarn build
FROM rust:1.37 as rust FROM rust:1.40 as rust
# Cache deps # Cache deps
WORKDIR /app WORKDIR /app
RUN USER=root cargo new server RUN USER=root cargo new server
WORKDIR /app/server WORKDIR /app/server
COPY server/Cargo.toml server/Cargo.lock ./ COPY server/Cargo.toml server/Cargo.lock ./
@ -31,24 +32,35 @@ RUN mkdir -p ./src/bin \
&& echo 'fn main() { println!("Dummy") }' > ./src/bin/main.rs && echo 'fn main() { println!("Dummy") }' > ./src/bin/main.rs
#RUN cargo build --release RUN cargo build --release
RUN cargo build && \ #RUN cargo build && \
rm -f ./target/release/deps/lemmy_server* ; rm -f ./target/debug/deps/lemmy_server* # rm -f ./target/release/deps/lemmy_server* ; rm -f ./target/debug/deps/lemmy_server*
COPY server/src ./src/ COPY server/src ./src/
COPY server/migrations ./migrations/ COPY server/migrations ./migrations/
# build for release # build for release
# workaround for https://github.com/rust-lang/rust/issues/62896 # workaround for https://github.com/rust-lang/rust/issues/62896
#RUN RUSTFLAGS='-Ccodegen-units=1' cargo build --release #RUN RUSTFLAGS='-Ccodegen-units=1' cargo build --release
#RUN cargo build --release --frozen RUN cargo build --release --frozen
RUN cargo build --frozen #RUN cargo build --frozen
# Get diesel-cli on there just in case # Get diesel-cli on there just in case
# RUN cargo install diesel_cli --no-default-features --features postgres # RUN cargo install diesel_cli --no-default-features --features postgres
# make result place always the same for lemmy container # make result place always the same for lemmy container
#RUN cp /app/server/target/release/lemmy_server /app/server/ready RUN cp /app/server/target/release/lemmy_server /app/server/ready
RUN cp /app/server/target/debug/lemmy_server /app/server/ready #RUN cp /app/server/target/debug/lemmy_server /app/server/ready
FROM rust:1.40 as docs
WORKDIR /app
# Build docs
COPY docs ./docs
RUN cargo install mdbook
RUN mdbook build docs/
#FROM alpine:3.10 #FROM alpine:3.10
@ -65,8 +77,11 @@ RUN addgroup --gid 1000 lemmy
RUN adduser --disabled-password --shell /bin/sh --uid 1000 --ingroup lemmy lemmy RUN adduser --disabled-password --shell /bin/sh --uid 1000 --ingroup lemmy lemmy
# Copy resources # Copy resources
COPY --from=rust /app/server/ready /app/lemmy COPY server/config/defaults.hjson /config/defaults.hjson
COPY --from=node /app/ui/dist /app/dist COPY --from=node /app/ui/dist /app/dist
COPY --from=docs /app/docs/book/ /app/dist/documentation/
COPY --from=rust /app/server/ready /app/lemmy
RUN chown lemmy:lemmy /app/lemmy RUN chown lemmy:lemmy /app/lemmy
USER lemmy USER lemmy
EXPOSE 8536 EXPOSE 8536

42
docker/dev/deploy.sh vendored
View file

@ -1,29 +1,36 @@
#!/bin/sh #!/bin/sh
git checkout master git checkout master
# Import translations
git fetch weblate
git merge weblate/master
# Creating the new tag # Creating the new tag
new_tag="$1" new_tag="$1"
git tag $new_tag third_semver=$(echo $new_tag | cut -d "." -f 3)
# Setting the version on the front end # Setting the version on the front end
cd ../../ cd ../../
echo "export let version: string = '$(git describe --tags)';" > "ui/src/version.ts" echo "export const version: string = '$new_tag';" > "ui/src/version.ts"
git add "ui/src/version.ts" git add "ui/src/version.ts"
# Setting the version on the backend # Setting the version on the backend
echo "pub const VERSION: &'static str = \"$(git describe --tags)\";" > "server/src/version.rs" echo "pub const VERSION: &str = \"$new_tag\";" > "server/src/version.rs"
git add "server/src/version.rs" git add "server/src/version.rs"
# Setting the version for Ansible
echo $new_tag > "ansible/VERSION"
git add "ansible/VERSION"
cd docker/dev cd docker/dev || exit
# Changing the docker-compose prod # Changing the docker-compose prod
sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" ../prod/docker-compose.yml sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" ../prod/docker-compose.yml
sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" ../../ansible/templates/docker-compose.yml
git add ../prod/docker-compose.yml git add ../prod/docker-compose.yml
git add ../../ansible/templates/docker-compose.yml
# The commit # The commit
git commit -m"Version $new_tag" git commit -m"Version $new_tag"
git tag $new_tag
# Registering qemu binaries
docker run --rm --privileged multiarch/qemu-user-static:register --reset
# Rebuilding docker # Rebuilding docker
docker-compose build docker-compose build
@ -38,14 +45,25 @@ docker push dessalines/lemmy:x64-$new_tag
# docker push dessalines/lemmy:armv7hf-$new_tag # docker push dessalines/lemmy:armv7hf-$new_tag
# aarch64 # aarch64
docker build -t lemmy:aarch64 -f Dockerfile.aarch64 ../../ # Only do this on major releases (IE the third semver is 0)
docker tag lemmy:aarch64 dessalines/lemmy:arm64-$new_tag if [ $third_semver -eq 0 ]; then
docker push dessalines/lemmy:arm64-$new_tag # Registering qemu binaries
docker run --rm --privileged multiarch/qemu-user-static:register --reset
docker build -t lemmy:aarch64 -f Dockerfile.aarch64 ../../
docker tag lemmy:aarch64 dessalines/lemmy:arm64-$new_tag
docker push dessalines/lemmy:arm64-$new_tag
fi
# Creating the manifest for the multi-arch build # Creating the manifest for the multi-arch build
docker manifest create dessalines/lemmy:$new_tag \ if [ $third_semver -eq 0 ]; then
docker manifest create dessalines/lemmy:$new_tag \
dessalines/lemmy:x64-$new_tag \ dessalines/lemmy:x64-$new_tag \
dessalines/lemmy:arm64-$new_tag dessalines/lemmy:arm64-$new_tag
else
docker manifest create dessalines/lemmy:$new_tag \
dessalines/lemmy:x64-$new_tag
fi
docker manifest push dessalines/lemmy:$new_tag docker manifest push dessalines/lemmy:$new_tag
@ -54,5 +72,5 @@ git push origin $new_tag
git push git push
# Pushing to any ansible deploys # Pushing to any ansible deploys
cd ../../ansible cd ../../ansible || exit
ansible-playbook lemmy.yml --become ansible-playbook lemmy.yml --become

View file

@ -7,3 +7,6 @@ git checkout dev
docker-compose build docker-compose build
docker tag dev_lemmy:latest dessalines/lemmy:dev docker tag dev_lemmy:latest dessalines/lemmy:dev
docker push dessalines/lemmy:dev docker push dessalines/lemmy:dev
# SSH and pull it
ssh $LEMMY_USER@$LEMMY_HOST "cd ~/git/lemmy/docker/dev && docker pull dessalines/lemmy:dev && docker-compose up -d"

View file

@ -1,46 +1,42 @@
version: '3.3' version: '3.3'
services: services:
lemmy_db: postgres:
image: postgres:12-alpine image: postgres:12-alpine
environment: environment:
- POSTGRES_USER=lemmy - POSTGRES_USER=lemmy
- POSTGRES_PASSWORD=${DATABASE_PASSWORD} - POSTGRES_PASSWORD=password
- POSTGRES_DB=lemmy - POSTGRES_DB=lemmy
volumes: volumes:
- lemmy_db:/var/lib/postgresql/data - ./volumes/postgres:/var/lib/postgresql/data
restart: always restart: always
lemmy: lemmy:
build: build:
context: ../../ context: ../../
dockerfile: docker/dev/Dockerfile dockerfile: docker/dev/Dockerfile
ports: ports:
- "127.0.0.1:8536:8536" - "127.0.0.1:8536:8536"
environment:
- LEMMY_FRONT_END_DIR=/app/dist
- DATABASE_URL=${DATABASE_URL}
- JWT_SECRET=${JWT_SECRET}
- HOSTNAME=${DOMAIN}
- RATE_LIMIT_MESSAGE=${RATE_LIMIT_MESSAGE}
- RATE_LIMIT_MESSAGE_PER_SECOND=${RATE_LIMIT_MESSAGE_PER_SECOND}
- RATE_LIMIT_POST=${RATE_LIMIT_POST}
- RATE_LIMIT_POST_PER_SECOND=${RATE_LIMIT_POST_PER_SECOND}
- RATE_LIMIT_REGISTER=${RATE_LIMIT_REGISTER}
- RATE_LIMIT_REGISTER_PER_SECOND=${RATE_LIMIT_REGISTER_PER_SECOND}
- SMTP_SERVER=${SMTP_SERVER}
- SMTP_LOGIN=${SMTP_LOGIN}
- SMTP_PASSWORD=${SMTP_PASSWORD}
- SMTP_FROM_ADDRESS=${SMTP_FROM_ADDRESS}
restart: always restart: always
volumes:
- ../lemmy.hjson:/config/config.hjson:ro
depends_on: depends_on:
- lemmy_db - postgres
lemmy_pictshare: - pictshare
- iframely
pictshare:
image: shtripok/pictshare:latest image: shtripok/pictshare:latest
ports: ports:
- "127.0.0.1:8537:80" - "127.0.0.1:8537:80"
volumes: volumes:
- lemmy_pictshare:/usr/share/nginx/html/data - ./volumes/pictshare:/usr/share/nginx/html/data
restart: always
iframely:
image: dogbin/iframely:latest
ports:
- "127.0.0.1:8061:80"
volumes:
- ../iframely.config.local.js:/iframely/config.local.js:ro
restart: always restart: always
volumes:
lemmy_db:
lemmy_pictshare:

283
docker/iframely.config.local.js vendored Normal file
View file

@ -0,0 +1,283 @@
(function() {
var config = {
// Specify a path for custom plugins. Custom plugins will override core plugins.
// CUSTOM_PLUGINS_PATH: __dirname + '/yourcustom-plugin-folder',
DEBUG: false,
RICH_LOG_ENABLED: false,
// For embeds that require render, baseAppUrl will be used as the host.
baseAppUrl: "http://yourdomain.com",
relativeStaticUrl: "/r",
// Or just skip built-in renders altogether
SKIP_IFRAMELY_RENDERS: true,
// For legacy reasons the response format of Iframely open-source is
// different by default as it does not group the links array by rel.
// In order to get the same grouped response as in Cloud API,
// add `&group=true` to your request to change response per request
// or set `GROUP_LINKS` in your config to `true` for a global change.
GROUP_LINKS: true,
// Number of maximum redirects to follow before aborting the page
// request with `redirect loop` error.
MAX_REDIRECTS: 4,
SKIP_OEMBED_RE_LIST: [
// /^https?:\/\/yourdomain\.com\//,
],
/*
// Used to pass parameters to the generate functions when creating HTML elements
// disableSizeWrapper: Don't wrap element (iframe, video, etc) in a positioned div
GENERATE_LINK_PARAMS: {
disableSizeWrapper: true
},
*/
port: 80, //can be overridden by PORT env var
host: '0.0.0.0', // Dockers beware. See https://github.com/itteco/iframely/issues/132#issuecomment-242991246
//can be overridden by HOST env var
// Optional SSL cert, if you serve under HTTPS.
/*
ssl: {
key: require('fs').readFileSync(__dirname + '/key.pem'),
cert: require('fs').readFileSync(__dirname + '/cert.pem'),
port: 443
},
*/
/*
Supported cache engines:
- no-cache - no caching will be used.
- node-cache - good for debug, node memory will be used (https://github.com/tcs-de/nodecache).
- redis - https://github.com/mranney/node_redis.
- memcached - https://github.com/3rd-Eden/node-memcached
*/
CACHE_ENGINE: 'node-cache',
CACHE_TTL: 0, // In seconds.
// 0 = 'never expire' for memcached & node-cache to let cache engine decide itself when to evict the record
// 0 = 'no cache' for redis. Use high enough (e.g. 365*24*60*60*1000) ttl for similar 'never expire' approach instead
/*
// Redis cache options.
REDIS_OPTIONS: {
host: '127.0.0.1',
port: 6379
},
*/
/*
// Memcached options. See https://github.com/3rd-Eden/node-memcached#server-locations
MEMCACHED_OPTIONS: {
locations: "127.0.0.1:11211"
}
*/
/*
// Access-Control-Allow-Origin list.
allowedOrigins: [
"*",
"http://another_domain.com"
],
*/
/*
// Uncomment to enable plugin testing framework.
tests: {
mongodb: 'mongodb://localhost:27017/iframely-tests',
single_test_timeout: 10 * 1000,
plugin_test_period: 2 * 60 * 60 * 1000,
relaunch_script_period: 5 * 60 * 1000
},
*/
// If there's no response from remote server, the timeout will occur after
RESPONSE_TIMEOUT: 5 * 1000, //ms
/* From v1.4.0, Iframely supports HTTP/2 by default. Disable it, if you'd rather not.
Alternatively, you can also disable per origin. See `proxy` option below.
*/
// DISABLE_HTTP2: true,
// Customize API calls to oembed endpoints.
ADD_OEMBED_PARAMS: [{
// Endpoint url regexp array.
re: [/^http:\/\/api\.instagram\.com\/oembed/],
// Custom get params object.
params: {
hidecaption: true
}
}, {
re: [/^https:\/\/www\.facebook\.com\/plugins\/page\/oembed\.json/i],
params: {
show_posts: 0,
show_facepile: 0,
maxwidth: 600
}
}, {
// match i=user or i=moment or i=timeline to configure these types invidually
// see params spec at https://dev.twitter.com/web/embedded-timelines/oembed
re: [/^https?:\/\/publish\.twitter\.com\/oembed\?i=user/i],
params: {
limit: 1,
maxwidth: 600
}
/*
}, {
// Facebook https://developers.facebook.com/docs/plugins/oembed-endpoints
re: [/^https:\/\/www\.facebook\.com\/plugins\/\w+\/oembed\.json/i],
params: {
// Skip script tag and fb-root div.
omitscript: true
}
*/
}],
/*
// Configure use of HTTP proxies as needed.
// You don't have to specify all options per regex - just what you need to override
PROXY: [{
re: [/^https?:\/\/www\.domain\.com/],
proxy_server: 'http://1.2.3.4:8080',
user_agent: 'CHANGE YOUR AGENT',
headers: {
// HTTP headers
// Overrides previous params if overlapped.
},
request_options: {
// Refer to: https://github.com/request/request
// Overrides previous params if overlapped.
},
disable_http2: true
}],
*/
// Customize API calls to 3rd parties. At the very least - configure required keys.
providerOptions: {
locale: "en_US", // ISO 639-1 two-letter language code, e.g. en_CA or fr_CH.
// Will be added as highest priotity in accept-language header with each request.
// Plus is used in FB, YouTube and perhaps other plugins
"twitter": {
"max-width": 550,
"min-width": 250,
hide_media: false,
hide_thread: false,
omit_script: false,
center: false,
// dnt: true,
cache_ttl: 100 * 365 * 24 * 3600 // 100 Years.
},
readability: {
enabled: false
// allowPTagDescription: true // to enable description fallback to first paragraph
},
images: {
loadSize: false, // if true, will try an load first bytes of all images to get/confirm the sizes
checkFavicon: false // if true, will verify all favicons
},
tumblr: {
consumer_key: "INSERT YOUR VALUE"
// media_only: true // disables status embeds for images and videos - will return plain media
},
google: {
// https://developers.google.com/maps/documentation/embed/guide#api_key
maps_key: "INSERT YOUR VALUE"
},
/*
// Optional Camo Proxy to wrap all images: https://github.com/atmos/camo
camoProxy: {
camo_proxy_key: "INSERT YOUR VALUE",
camo_proxy_host: "INSERT YOUR VALUE"
// ssl_only: true // will only proxy non-ssl images
},
*/
// List of query parameters to add to YouTube and Vimeo frames
// Start it with leading "?". Or omit alltogether for default values
// API key is optional, youtube will work without it too.
// It is probably the same API key you use for Google Maps.
youtube: {
// api_key: "INSERT YOUR VALUE",
get_params: "?rel=0&showinfo=1" // https://developers.google.com/youtube/player_parameters
},
vimeo: {
get_params: "?byline=0&badge=0" // https://developer.vimeo.com/player/embedding
},
/*
soundcloud: {
old_player: true // enables classic player
},
giphy: {
media_only: true // disables branded player for gifs and returns just the image
}
*/
/*
bandcamp: {
get_params: '/size=large/bgcol=333333/linkcol=ffffff/artwork=small/transparent=true/',
media: {
album: {
height: 472,
'max-width': 700
},
track: {
height: 120,
'max-width': 700
}
}
}
*/
},
// WHITELIST_WILDCARD, if present, will be added to whitelist as record for top level domain: "*"
// with it, you can define what parsers do when they run accross unknown publisher.
// If absent or empty, all generic media parsers will be disabled except for known domains
// More about format: https://iframely.com/docs/qa-format
/*
WHITELIST_WILDCARD: {
"twitter": {
"player": "allow",
"photo": "deny"
},
"oembed": {
"video": "allow",
"photo": "allow",
"rich": "deny",
"link": "deny"
},
"og": {
"video": ["allow", "ssl", "responsive"]
},
"iframely": {
"survey": "allow",
"reader": "allow",
"player": "allow",
"image": "allow"
},
"html-meta": {
"video": ["allow", "responsive"],
"promo": "allow"
}
}
*/
// Black-list any of the inappropriate domains. Iframely will return 417
// At minimum, keep your localhosts blacklisted to avoid SSRF
BLACKLIST_DOMAINS_RE: [
/^https?:\/\/127\.0\.0\.1/i,
/^https?:\/\/localhost/i,
// And this is AWS metadata service
// https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html
/^https?:\/\/169\.254\.169\.254/
]
};
module.exports = config;
})();

View file

@ -14,13 +14,13 @@ spec:
spec: spec:
containers: containers:
- env: - env:
- name: DATABASE_URL - name: LEMMY_DATABASE_URL
# example: 'postgres://lemmy:password@db:5432/lemmy' # example: 'postgres://lemmy:password@db:5432/lemmy'
value: CHANGE_ME value: CHANGE_ME
- name: HOSTNAME - name: LEMMY_HOSTNAME
# example: 'lemmy.example.com' # example: 'lemmy.example.com'
value: CHANGE_ME value: CHANGE_ME
- name: JWT_SECRET - name: LEMMY_JWT_SECRET
# example: 'very-super-good-secret' # example: 'very-super-good-secret'
value: CHANGE_ME value: CHANGE_ME
- name: LEMMY_FRONT_END_DIR - name: LEMMY_FRONT_END_DIR

56
docker/lemmy.hjson vendored Normal file
View file

@ -0,0 +1,56 @@
{
database: {
# username to connect to postgres
user: "lemmy"
# password to connect to postgres
password: "password"
# host where postgres is running
host: "postgres"
# port where postgres can be accessed
port: 5432
# name of the postgres database for lemmy
database: "lemmy"
# maximum number of active sql connections
pool_size: 5
}
# the domain name of your instance (eg "dev.lemmy.ml")
hostname: "my_domain"
# address where lemmy should listen for incoming requests
bind: "0.0.0.0"
# port where lemmy should listen for incoming requests
port: 8536
# json web token for authorization between server and client
jwt_secret: "changeme"
# The dir for the front end
front_end_dir: "/app/dist"
# whether to enable activitypub federation. this feature is in alpha, do not enable in production, as might
# cause problems like remote instances fetching and permanently storing bad data.
federation_enabled: false
# rate limits for various user actions, by user ip
rate_limit: {
# maximum number of messages created in interval
message: 180
# interval length for message limit
message_per_second: 60
# maximum number of posts created in interval
post: 6
# interval length for post limit
post_per_second: 600
# maximum number of registrations in interval
register: 3
# interval length for registration limit
register_per_second: 3600
}
# # email sending configuration
# email: {
# # hostname of the smtp server
# smtp_server: ""
# # login name for smtp server
# smtp_login: ""
# # password to login to the smtp server
# smtp_password: ""
# # address to send emails from, eg "info@your-instance.com"
# smtp_from_address: ""
# }
}

17
docker/prod/.env vendored
View file

@ -1,17 +0,0 @@
DOMAIN=my_domain
DATABASE_PASSWORD=password
DATABASE_URL=postgres://lemmy:password@lemmy_db:5432/lemmy
JWT_SECRET=changeme
RATE_LIMIT_MESSAGE=30
RATE_LIMIT_MESSAGE_PER_SECOND=60
RATE_LIMIT_POST=6
RATE_LIMIT_POST_PER_SECOND=600
RATE_LIMIT_REGISTER=3
RATE_LIMIT_REGISTER_PER_SECOND=3600
# Optional email fields
SMTP_SERVER=
SMTP_LOGIN=
SMTP_PASSWORD=
SMTP_FROM_ADDRESS=Domain.com Lemmy Admin <notifications@domain.com>

View file

@ -1,44 +1,40 @@
version: '3.3' version: '3.3'
services: services:
lemmy_db: postgres:
image: postgres:12-alpine image: postgres:12-alpine
environment: environment:
- POSTGRES_USER=lemmy - POSTGRES_USER=lemmy
- POSTGRES_PASSWORD=${DATABASE_PASSWORD} - POSTGRES_PASSWORD=password
- POSTGRES_DB=lemmy - POSTGRES_DB=lemmy
volumes: volumes:
- lemmy_db:/var/lib/postgresql/data - ./volumes/postgres:/var/lib/postgresql/data
restart: always restart: always
lemmy: lemmy:
image: dessalines/lemmy:v0.5.0.3 image: dessalines/lemmy:v0.6.33
ports: ports:
- "127.0.0.1:8536:8536" - "127.0.0.1:8536:8536"
environment:
- LEMMY_FRONT_END_DIR=/app/dist
- DATABASE_URL=${DATABASE_URL}
- JWT_SECRET=${JWT_SECRET}
- HOSTNAME=${DOMAIN}
- RATE_LIMIT_MESSAGE=${RATE_LIMIT_MESSAGE}
- RATE_LIMIT_MESSAGE_PER_SECOND=${RATE_LIMIT_MESSAGE_PER_SECOND}
- RATE_LIMIT_POST=${RATE_LIMIT_POST}
- RATE_LIMIT_POST_PER_SECOND=${RATE_LIMIT_POST_PER_SECOND}
- RATE_LIMIT_REGISTER=${RATE_LIMIT_REGISTER}
- RATE_LIMIT_REGISTER_PER_SECOND=${RATE_LIMIT_REGISTER_PER_SECOND}
- SMTP_SERVER=${SMTP_SERVER}
- SMTP_LOGIN=${SMTP_LOGIN}
- SMTP_PASSWORD=${SMTP_PASSWORD}
- SMTP_FROM_ADDRESS=${SMTP_FROM_ADDRESS}
restart: always restart: always
volumes:
- ./lemmy.hjson:/config/config.hjson:ro
depends_on: depends_on:
- lemmy_db - postgres
lemmy_pictshare: - pictshare
- iframely
pictshare:
image: shtripok/pictshare:latest image: shtripok/pictshare:latest
ports: ports:
- "127.0.0.1:8537:80" - "127.0.0.1:8537:80"
volumes: volumes:
- lemmy_pictshare:/usr/share/nginx/html/data - ./volumes/pictshare:/usr/share/nginx/html/data
restart: always
iframely:
image: dogbin/iframely:latest
ports:
- "127.0.0.1:8061:80"
volumes:
- ./iframely.config.local.js:/iframely/config.local.js:ro
restart: always restart: always
volumes:
lemmy_db:
lemmy_pictshare:

1
docs/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
book

6
docs/book.toml vendored Normal file
View file

@ -0,0 +1,6 @@
[book]
authors = ["Felix Ableitner"]
language = "en"
multilingual = false
src = "src"
title = "Lemmy Documentation"

18
docs/src/SUMMARY.md vendored Normal file
View file

@ -0,0 +1,18 @@
# Summary
- [About](about.md)
- [Features](about_features.md)
- [Goals](about_goals.md)
- [Post and Comment Ranking](about_ranking.md)
- [Guide](about_guide.md)
- [Administration](administration.md)
- [Install with Docker](administration_install_docker.md)
- [Install with Ansible](administration_install_ansible.md)
- [Install with Kubernetes](administration_install_kubernetes.md)
- [Configuration](administration_configuration.md)
- [Contributing](contributing.md)
- [Docker Development](contributing_docker_development.md)
- [Local Development](contributing_local_development.md)
- [Websocket/HTTP API](contributing_websocket_http_api.md)
- [ActivityPub API Outline](contributing_apub_api_outline.md)
- [Lemmy Council](lemmy_council.md)

30
docs/src/about.md vendored Normal file
View file

@ -0,0 +1,30 @@
## About The Project
Front Page|Post
---|---
![main screen](https://i.imgur.com/kZSRcRu.png)|![chat screen](https://i.imgur.com/4XghNh6.png)
[Lemmy](https://github.com/dessalines/lemmy) is similar to sites like [Reddit](https://reddit.com), [Lobste.rs](https://lobste.rs), [Raddle](https://raddle.me), or [Hacker News](https://news.ycombinator.com/): you subscribe to forums you're interested in, post links and discussions, then vote, and comment on them. Behind the scenes, it is very different; anyone can easily run a server, and all these servers are federated (think email), and connected to the same universe, called the [Fediverse](https://en.wikipedia.org/wiki/Fediverse).
For a link aggregator, this means a user registered on one server can subscribe to forums on any other server, and can have discussions with users registered elsewhere.
The overall goal is to create an easily self-hostable, decentralized alternative to reddit and other link aggregators, outside of their corporate control and meddling.
Each lemmy server can set its own moderation policy; appointing site-wide admins, and community moderators to keep out the trolls, and foster a healthy, non-toxic environment where all can feel comfortable contributing.
*Note: Federation is still in active development*
### Why's it called Lemmy?
- Lead singer from [Motörhead](https://invidio.us/watch?v=pWB5JZRGl0U).
- The old school [video game](<https://en.wikipedia.org/wiki/Lemmings_(video_game)>).
- The [Koopa from Super Mario](https://www.mariowiki.com/Lemmy_Koopa).
- The [furry rodents](http://sunchild.fpwc.org/lemming-the-little-giant-of-the-north/).
### Built With
- [Rust](https://www.rust-lang.org)
- [Actix](https://actix.rs/)
- [Diesel](http://diesel.rs/)
- [Inferno](https://infernojs.org)
- [Typescript](https://www.typescriptlang.org/)

34
docs/src/about_features.md vendored Normal file
View file

@ -0,0 +1,34 @@
# Features
- Open source, [AGPL License](/LICENSE).
- Self hostable, easy to deploy.
- Comes with [Docker](#docker), [Ansible](#ansible), [Kubernetes](#kubernetes).
- Clean, mobile-friendly interface.
- Only a minimum of a username and password is required to sign up!
- User avatar support.
- Live-updating Comment threads.
- Full vote scores `(+/-)` like old reddit.
- Themes, including light, dark, and solarized.
- Emojis with autocomplete support. Start typing `:`
- User tagging using `@`, Community tagging using `#`.
- Integrated image uploading in both posts and comments.
- A post can consist of a title and any combination of self text, a URL, or nothing else.
- Notifications, on comment replies and when you're tagged.
- Notifications can be sent via email.
- i18n / internationalization support.
- RSS / Atom feeds for `All`, `Subscribed`, `Inbox`, `User`, and `Community`.
- Cross-posting support.
- A *similar post search* when creating new posts. Great for question / answer communities.
- Moderation abilities.
- Public Moderation Logs.
- Can sticky posts to the top of communities.
- Both site admins, and community moderators, who can appoint other moderators.
- Can lock, remove, and restore posts and comments.
- Can ban and unban users from communities and the site.
- Can transfer site and communities to others.
- Can fully erase your data, replacing all posts and comments.
- NSFW post / community support.
- High performance.
- Server is written in rust.
- Front end is `~80kB` gzipped.
- Supports arm64 / Raspberry Pi.

View file

@ -1,4 +1,5 @@
# Goals # Goals
- Come up with a name / codename. - Come up with a name / codename.
- Must have communities. - Must have communities.
- Must have threaded comments. - Must have threaded comments.
@ -7,6 +8,7 @@
- Use websockets for post / gets to your own instance. - Use websockets for post / gets to your own instance.
# Questions # Questions
- How does voting work? Should we go back to the old way of showing up and downvote counts? Or just a score? - How does voting work? Should we go back to the old way of showing up and downvote counts? Or just a score?
- Decide on tech to be used - Decide on tech to be used
- Backend: Actix, Diesel. - Backend: Actix, Diesel.
@ -17,10 +19,7 @@
- On mobile, allow you to switch between them. Default? - On mobile, allow you to switch between them. Default?
# Resources / Potential Libraries # Resources / Potential Libraries
- Use the [activitypub crate.](https://docs.rs/activitypub/0.1.4/activitypub/)
- https://docs.rs/activitypub/0.1.4/activitypub/
- [Activitypub vocab.](https://www.w3.org/TR/activitystreams-vocabulary/)
- [Activitypub main](https://www.w3.org/TR/activitypub/)
- [Diesel to Postgres data types](https://kotiri.com/2018/01/31/postgresql-diesel-rust-types.html) - [Diesel to Postgres data types](https://kotiri.com/2018/01/31/postgresql-diesel-rust-types.html)
- [helpful diesel examples](http://siciarz.net/24-days-rust-diesel/) - [helpful diesel examples](http://siciarz.net/24-days-rust-diesel/)
- [Recursive query for adjacency list for nested comments](https://stackoverflow.com/questions/192220/what-is-the-most-efficient-elegant-way-to-parse-a-flat-table-into-a-tree/192462#192462) - [Recursive query for adjacency list for nested comments](https://stackoverflow.com/questions/192220/what-is-the-most-efficient-elegant-way-to-parse-a-flat-table-into-a-tree/192462#192462)
@ -36,9 +35,15 @@
- [Temp Icon](https://www.flaticon.com/free-icon/mouse_194242) - [Temp Icon](https://www.flaticon.com/free-icon/mouse_194242)
- [Rust docker build](https://shaneutt.com/blog/rust-fast-small-docker-image-builds/) - [Rust docker build](https://shaneutt.com/blog/rust-fast-small-docker-image-builds/)
- [Zurb mentions](https://github.com/zurb/tribute) - [Zurb mentions](https://github.com/zurb/tribute)
- Activitypub guides - [TippyJS](https://github.com/atomiks/tippyjs)
- https://blog.joinmastodon.org/2018/06/how-to-implement-a-basic-activitypub-server/
- https://raw.githubusercontent.com/w3c/activitypub/gh-pages/activitypub-tutorial.txt
- https://github.com/tOkeshu/activitypub-example
- https://blog.joinmastodon.org/2018/07/how-to-make-friends-and-verify-requests/
## Activitypub guides
- https://blog.joinmastodon.org/2018/06/how-to-implement-a-basic-activitypub-server/
- https://raw.githubusercontent.com/w3c/activitypub/gh-pages/activitypub-tutorial.txt
- https://github.com/tOkeshu/activitypub-example
- https://blog.joinmastodon.org/2018/07/how-to-make-friends-and-verify-requests/
- Use the [activitypub crate.](https://docs.rs/activitypub/0.1.4/activitypub/)
- https://docs.rs/activitypub/0.1.4/activitypub/
- [Activitypub vocab.](https://www.w3.org/TR/activitystreams-vocabulary/)
- [Activitypub main](https://www.w3.org/TR/activitypub/)

40
docs/src/about_guide.md vendored Normal file
View file

@ -0,0 +1,40 @@
# Lemmy Guide
Start typing...
- `@a_user_name` to get a list of usernames.
- `#a_community` to get a list of communities.
- `:emoji` to get a list of emojis.
## Sorting
*Applies to both posts and comments*
Type | Description
--- | ---
Hot | Shows *trending* posts, based on the score, and the most recent comment time.
New | Newest posts.
Top | Shows the highest scoring posts in the given time frame.
For more detail, check the [Post and Comment Ranking details](about_ranking.md).
## Markdown Guide
Type | Or | … to Get
--- | --- | ---
\*Italic\* | \_Italic\_ | _Italic_
\*\*Bold\*\* | \_\_Bold\_\_ | **Bold**
\# Heading 1 | Heading 1 <br> ========= | <h4>Heading 1</h4>
\## Heading 2 | Heading 2 <br>--------- | <h5>Heading 2</h5>
\[Link\](http://a.com) | \[Link\]\[1\]<br><br>\[1\]: http://b.org | [Link](https://commonmark.org/)
!\[Image\](http://url/a.png) | !\[Image\]\[1\]<br><br>\[1\]: http://url/b.jpg | ![Markdown](https://commonmark.org/help/images/favicon.png)
\> Blockquote | | <blockquote>Blockquote</blockquote>
\* List <br>\* List <br>\* List | \- List <br>\- List <br>\- List <br> | * List <br>* List <br>* List <br>
1\. One <br>2\. Two <br>3\. Three | 1) One<br>2) Two<br>3) Three | 1. One<br>2. Two<br>3. Three
Horizontal Rule <br>\--- | Horizontal Rule<br>\*\*\* | Horizontal Rule <br><hr>
\`Inline code\` with backticks | |`Inline code` with backticks
\`\`\`<br>\# code block <br>print '3 backticks or'<br>print 'indent 4 spaces' <br>\`\`\` | ····\# code block<br>····print '3 backticks or'<br>····print 'indent 4 spaces' | \# code block <br>print '3 backticks or'<br>print 'indent 4 spaces'
::: spoiler hidden or nsfw stuff<br>*a bunch of spoilers here*<br>::: | | <details><summary> hidden or nsfw stuff </summary><p><em>a bunch of spoilers here</em></p></details>
[CommonMark Tutorial](https://commonmark.org/help/tutorial/)

View file

@ -18,7 +18,7 @@ Score = Upvotes - Downvotes
Time = time since submission (in hours) Time = time since submission (in hours)
Gravity = Decay gravity, 1.8 is default Gravity = Decay gravity, 1.8 is default
``` ```
- For posts, in order to bring up active posts, it uses the latest comment time (limited to a max creation age of a month ago)
- Use Max(1, score) to make sure all comments are affected by time decay. - Use Max(1, score) to make sure all comments are affected by time decay.
- Add 3 to the score, so that everything that has less than 3 downvotes will seem new. Otherwise all new comments would stay at zero, near the bottom. - Add 3 to the score, so that everything that has less than 3 downvotes will seem new. Otherwise all new comments would stay at zero, near the bottom.
- The sign and abs of the score are necessary for dealing with the log of negative scores. - The sign and abs of the score are necessary for dealing with the log of negative scores.

3
docs/src/administration.md vendored Normal file
View file

@ -0,0 +1,3 @@
# Admin info
Information for Lemmy instance admins, and those who want to start an instance.

View file

@ -0,0 +1,15 @@
# Configuration
The configuration is based on the file [defaults.hjson](server/config/defaults.hjson). This file also contains documentation for all the available options. To override the defaults, you can copy the options you want to change into your local `config.hjson` file.
Additionally, you can override any config files with environment variables. These have the same name as the config options, and are prefixed with `LEMMY_`. For example, you can override the `database.password` with
`LEMMY__DATABASE__POOL_SIZE=10`.
An additional option `LEMMY_DATABASE_URL` is available, which can be used with a PostgreSQL connection string like `postgres://lemmy:password@lemmy_db:5432/lemmy`, passing all connection details at once.
If the Docker container is not used, manually create the database specified above by running the following commands:
```bash
cd server
./db-init.sh
```

View file

@ -0,0 +1,13 @@
# Ansible Installation
First, you need to [install Ansible on your local computer](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) (e.g. using `sudo apt install ansible`) or the equivalent for you platform.
Then run the following commands on your local computer:
```bash
git clone https://github.com/dessalines/lemmy.git
cd lemmy/ansible/
cp inventory.example inventory
nano inventory # enter your server, domain, contact email
ansible-playbook lemmy.yml --become
```

View file

@ -0,0 +1,31 @@
# Docker Installation
Make sure you have both docker and docker-compose(>=`1.24.0`) installed:
```bash
mkdir lemmy/
cd lemmy/
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/prod/docker-compose.yml
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/lemmy.hjson
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/iframely.config.local.js
# Edit lemmy.hjson, and docker-compose.yml to do more configuration (like adding a custom password)
docker-compose up -d
```
and go to http://localhost:8536.
[A sample nginx config](/ansible/templates/nginx.conf) (Note: Avatar / Image uploading won't work without this), could be setup with:
```bash
wget https://raw.githubusercontent.com/dessalines/lemmy/master/ansible/templates/nginx.conf
# Replace the {{ vars }}
sudo mv nginx.conf /etc/nginx/sites-enabled/lemmy.conf
```
## Updating
To update to the newest version, run:
```bash
wget https://raw.githubusercontent.com/dessalines/lemmy/master/docker/prod/docker-compose.yml
docker-compose up -d
```

View file

@ -0,0 +1,24 @@
# Kubernetes Installation
You'll need to have an existing Kubernetes cluster and [storage class](https://kubernetes.io/docs/concepts/storage/storage-classes/).
Setting this up will vary depending on your provider.
To try it locally, you can use [MicroK8s](https://microk8s.io/) or [Minikube](https://kubernetes.io/docs/tasks/tools/install-minikube/).
Once you have a working cluster, edit the environment variables and volume sizes in `docker/k8s/*.yml`.
You may also want to change the service types to use `LoadBalancer`s depending on where you're running your cluster (add `type: LoadBalancer` to `ports)`, or `NodePort`s.
By default they will use `ClusterIP`s, which will allow access only within the cluster. See the [docs](https://kubernetes.io/docs/concepts/services-networking/service/) for more on networking in Kubernetes.
**Important** Running a database in Kubernetes will work, but is generally not recommended.
If you're deploying on any of the common cloud providers, you should consider using their managed database service instead (RDS, Cloud SQL, Azure Databse, etc.).
Now you can deploy:
```bash
# Add `-n foo` if you want to deploy into a specific namespace `foo`;
# otherwise your resources will be created in the `default` namespace.
kubectl apply -f docker/k8s/db.yml
kubectl apply -f docker/k8s/pictshare.yml
kubectl apply -f docker/k8s/lemmy.yml
```
If you used a `LoadBalancer`, you should see it in your cloud provider's console.

32
docs/src/contributing.md vendored Normal file
View file

@ -0,0 +1,32 @@
# Contributing
Information about contributing to Lemmy, whether it is translating, testing, designing or programming.
## Translating
Go [here](https://github.com/dessalines/lemmy#translations) for translation instructions.
## Architecture
### Front end
- The front end is written in `typescript`, using a react-like framework called [inferno](https://infernojs.org/). All UI elements are reusable `.tsx` components.
- The main page and routing are in `ui/src/index.tsx`.
- The components are located in `ui/src/components`.
### Back end
- The back end is written in `rust`, using `diesel`, and `actix`.
- The server source code is split into main sections in `server/src`. These include:
- `db` - The low level database actions.
- Database additions are done using diesel migrations. Run `diesel migration generate xxxxx` to add new things.
- `api` - The high level user interactions (things like `CreateComment`)
- `routes` - The server endpoints .
- `apub` - The activitypub conversions.
- `websocket` - Creates the websocket server.
## Linting / Formatting
- Every front and back end commit is automatically formatted then linted using `husky`, and `lint-staged`.
- Rust with `cargo fmt` and `cargo clippy`.
- Typescript with `prettier` and `eslint`.

View file

@ -0,0 +1,13 @@
# Docker Development
## Running
```bash
git clone https://github.com/dessalines/lemmy
cd lemmy/docker/dev
./docker_update.sh # This builds and runs it, updating for your changes
```
and go to http://localhost:8536.
Note that compile times when changing `Cargo.toml` are relatively long with Docker, because builds can't be incrementally cached. If this is a problem for you, you should use [Local Development](contributing_local_development.md).

View file

@ -0,0 +1,31 @@
#### Requirements
- [Rust](https://www.rust-lang.org/)
- [Yarn](https://yarnpkg.com/en/)
- [Postgres](https://www.postgresql.org/)
#### Set up Postgres DB
```bash
cd server
./db-init.sh
```
Or run the commands manually:
```bash
psql -c "create user lemmy with password 'password' superuser;" -U postgres
psql -c 'create database lemmy with owner lemmy;' -U postgres
export LEMMY_DATABASE_URL=postgres://lemmy:password@localhost:5432/lemmy
```
#### Running
```bash
git clone https://github.com/dessalines/lemmy
cd lemmy
./install.sh
# For live coding, where both the front and back end, automagically reload on any save, do:
# cd ui && yarn start
# cd server && cargo watch -x run
```

View file

@ -1,130 +1,176 @@
# Lemmy API # Lemmy API
*Note: this may lag behind the actual API endpoints [here](../server/src/api).* *Note: this may lag behind the actual API endpoints [here](../server/src/api).*
<!-- toc --> <!-- toc -->
- [Data types](#data-types) - [Data types](#data-types)
- [Basic usage](#basic-usage) - [Basic usage](#basic-usage)
* [WebSocket Endpoint](#websocket-endpoint) * [WebSocket](#websocket)
* [Testing with Websocat](#testing-with-websocat) + [Testing with Websocat](#testing-with-websocat)
* [Testing with the WebSocket JavaScript API](#testing-with-the-websocket-javascript-api) + [Testing with the WebSocket JavaScript API](#testing-with-the-websocket-javascript-api)
* [HTTP](#http)
+ [Testing with Curl](#testing-with-curl)
- [Get Example](#get-example)
- [Post Example](#post-example)
- [Rate limits](#rate-limits) - [Rate limits](#rate-limits)
- [Errors](#errors) - [Errors](#errors)
- [API documentation](#api-documentation) - [API documentation](#api-documentation)
* [Sort Types](#sort-types) * [Sort Types](#sort-types)
* [Websocket vs HTTP](#websocket-vs-http)
* [User / Authentication / Admin actions](#user--authentication--admin-actions) * [User / Authentication / Admin actions](#user--authentication--admin-actions)
+ [Login](#login) + [Login](#login)
- [Request](#request) - [Request](#request)
- [Response](#response) - [Response](#response)
- [HTTP](#http-1)
+ [Register](#register) + [Register](#register)
- [Request](#request-1) - [Request](#request-1)
- [Response](#response-1) - [Response](#response-1)
- [HTTP](#http-2)
+ [Get User Details](#get-user-details) + [Get User Details](#get-user-details)
- [Request](#request-2) - [Request](#request-2)
- [Response](#response-2) - [Response](#response-2)
- [HTTP](#http-3)
+ [Save User Settings](#save-user-settings) + [Save User Settings](#save-user-settings)
- [Request](#request-3) - [Request](#request-3)
- [Response](#response-3) - [Response](#response-3)
- [HTTP](#http-4)
+ [Get Replies / Inbox](#get-replies--inbox) + [Get Replies / Inbox](#get-replies--inbox)
- [Request](#request-4) - [Request](#request-4)
- [Response](#response-4) - [Response](#response-4)
- [HTTP](#http-5)
+ [Get User Mentions](#get-user-mentions) + [Get User Mentions](#get-user-mentions)
- [Request](#request-5) - [Request](#request-5)
- [Response](#response-5) - [Response](#response-5)
+ [Mark All As Read](#mark-all-as-read) - [HTTP](#http-6)
+ [Edit User Mention](#edit-user-mention)
- [Request](#request-6) - [Request](#request-6)
- [Response](#response-6) - [Response](#response-6)
+ [Delete Account](#delete-account) - [HTTP](#http-7)
+ [Mark All As Read](#mark-all-as-read)
- [Request](#request-7) - [Request](#request-7)
- [Response](#response-7) - [Response](#response-7)
+ [Add admin](#add-admin) - [HTTP](#http-8)
+ [Delete Account](#delete-account)
- [Request](#request-8) - [Request](#request-8)
- [Response](#response-8) - [Response](#response-8)
+ [Ban user](#ban-user) - [HTTP](#http-9)
+ [Add admin](#add-admin)
- [Request](#request-9) - [Request](#request-9)
- [Response](#response-9) - [Response](#response-9)
* [Site](#site) - [HTTP](#http-10)
+ [List Categories](#list-categories) + [Ban user](#ban-user)
- [Request](#request-10) - [Request](#request-10)
- [Response](#response-10) - [Response](#response-10)
+ [Search](#search) - [HTTP](#http-11)
* [Site](#site)
+ [List Categories](#list-categories)
- [Request](#request-11) - [Request](#request-11)
- [Response](#response-11) - [Response](#response-11)
+ [Get Modlog](#get-modlog) - [HTTP](#http-12)
+ [Search](#search)
- [Request](#request-12) - [Request](#request-12)
- [Response](#response-12) - [Response](#response-12)
+ [Create Site](#create-site) - [HTTP](#http-13)
+ [Get Modlog](#get-modlog)
- [Request](#request-13) - [Request](#request-13)
- [Response](#response-13) - [Response](#response-13)
+ [Edit Site](#edit-site) - [HTTP](#http-14)
+ [Create Site](#create-site)
- [Request](#request-14) - [Request](#request-14)
- [Response](#response-14) - [Response](#response-14)
+ [Get Site](#get-site) - [HTTP](#http-15)
+ [Edit Site](#edit-site)
- [Request](#request-15) - [Request](#request-15)
- [Response](#response-15) - [Response](#response-15)
+ [Transfer Site](#transfer-site) - [HTTP](#http-16)
+ [Get Site](#get-site)
- [Request](#request-16) - [Request](#request-16)
- [Response](#response-16) - [Response](#response-16)
* [Community](#community) - [HTTP](#http-17)
+ [Get Community](#get-community) + [Transfer Site](#transfer-site)
- [Request](#request-17) - [Request](#request-17)
- [Response](#response-17) - [Response](#response-17)
+ [Create Community](#create-community) - [HTTP](#http-18)
* [Community](#community)
+ [Get Community](#get-community)
- [Request](#request-18) - [Request](#request-18)
- [Response](#response-18) - [Response](#response-18)
+ [List Communities](#list-communities) - [HTTP](#http-19)
+ [Create Community](#create-community)
- [Request](#request-19) - [Request](#request-19)
- [Response](#response-19) - [Response](#response-19)
+ [Ban from Community](#ban-from-community) - [HTTP](#http-20)
+ [List Communities](#list-communities)
- [Request](#request-20) - [Request](#request-20)
- [Response](#response-20) - [Response](#response-20)
+ [Add Mod to Community](#add-mod-to-community) - [HTTP](#http-21)
+ [Ban from Community](#ban-from-community)
- [Request](#request-21) - [Request](#request-21)
- [Response](#response-21) - [Response](#response-21)
+ [Edit Community](#edit-community) - [HTTP](#http-22)
+ [Add Mod to Community](#add-mod-to-community)
- [Request](#request-22) - [Request](#request-22)
- [Response](#response-22) - [Response](#response-22)
+ [Follow Community](#follow-community) - [HTTP](#http-23)
+ [Edit Community](#edit-community)
- [Request](#request-23) - [Request](#request-23)
- [Response](#response-23) - [Response](#response-23)
+ [Get Followed Communities](#get-followed-communities) - [HTTP](#http-24)
+ [Follow Community](#follow-community)
- [Request](#request-24) - [Request](#request-24)
- [Response](#response-24) - [Response](#response-24)
+ [Transfer Community](#transfer-community) - [HTTP](#http-25)
+ [Get Followed Communities](#get-followed-communities)
- [Request](#request-25) - [Request](#request-25)
- [Response](#response-25) - [Response](#response-25)
* [Post](#post) - [HTTP](#http-26)
+ [Create Post](#create-post) + [Transfer Community](#transfer-community)
- [Request](#request-26) - [Request](#request-26)
- [Response](#response-26) - [Response](#response-26)
+ [Get Post](#get-post) - [HTTP](#http-27)
* [Post](#post)
+ [Create Post](#create-post)
- [Request](#request-27) - [Request](#request-27)
- [Response](#response-27) - [Response](#response-27)
+ [Get Posts](#get-posts) - [HTTP](#http-28)
+ [Get Post](#get-post)
- [Request](#request-28) - [Request](#request-28)
- [Response](#response-28) - [Response](#response-28)
+ [Create Post Like](#create-post-like) - [HTTP](#http-29)
+ [Get Posts](#get-posts)
- [Request](#request-29) - [Request](#request-29)
- [Response](#response-29) - [Response](#response-29)
+ [Edit Post](#edit-post) - [HTTP](#http-30)
+ [Create Post Like](#create-post-like)
- [Request](#request-30) - [Request](#request-30)
- [Response](#response-30) - [Response](#response-30)
+ [Save Post](#save-post) - [HTTP](#http-31)
+ [Edit Post](#edit-post)
- [Request](#request-31) - [Request](#request-31)
- [Response](#response-31) - [Response](#response-31)
* [Comment](#comment) - [HTTP](#http-32)
+ [Create Comment](#create-comment) + [Save Post](#save-post)
- [Request](#request-32) - [Request](#request-32)
- [Response](#response-32) - [Response](#response-32)
+ [Edit Comment](#edit-comment) - [HTTP](#http-33)
* [Comment](#comment)
+ [Create Comment](#create-comment)
- [Request](#request-33) - [Request](#request-33)
- [Response](#response-33) - [Response](#response-33)
+ [Save Comment](#save-comment) - [HTTP](#http-34)
+ [Edit Comment](#edit-comment)
- [Request](#request-34) - [Request](#request-34)
- [Response](#response-34) - [Response](#response-34)
+ [Create Comment Like](#create-comment-like) - [HTTP](#http-35)
+ [Save Comment](#save-comment)
- [Request](#request-35) - [Request](#request-35)
- [Response](#response-35) - [Response](#response-35)
- [HTTP](#http-36)
+ [Create Comment Like](#create-comment-like)
- [Request](#request-36)
- [Response](#response-36)
- [HTTP](#http-37)
* [RSS / Atom feeds](#rss--atom-feeds) * [RSS / Atom feeds](#rss--atom-feeds)
+ [All](#all) + [All](#all)
+ [Community](#community-1) + [Community](#community-1)
@ -144,13 +190,13 @@
Request and response strings are in [JSON format](https://www.json.org). Request and response strings are in [JSON format](https://www.json.org).
### WebSocket Endpoint ### WebSocket
Connect to <code>ws://***host***/api/v1/ws</code> to get started. Connect to <code>ws://***host***/api/v1/ws</code> to get started.
If the ***`host`*** supports secure connections, you can use <code>wss://***host***/api/v1/ws</code>. If the ***`host`*** supports secure connections, you can use <code>wss://***host***/api/v1/ws</code>.
### Testing with Websocat #### Testing with Websocat
[Websocat link](https://github.com/vi/websocat) [Websocat link](https://github.com/vi/websocat)
@ -159,7 +205,7 @@ If the ***`host`*** supports secure connections, you can use <code>wss://***host
A simple test command: A simple test command:
`{"op": "ListCategories"}` `{"op": "ListCategories"}`
### Testing with the WebSocket JavaScript API #### Testing with the WebSocket JavaScript API
[WebSocket JavaScript API](https://developer.mozilla.org/en-US/docs/Web/API/WebSockets_API) [WebSocket JavaScript API](https://developer.mozilla.org/en-US/docs/Web/API/WebSockets_API)
```javascript ```javascript
@ -171,6 +217,32 @@ ws.onopen = function () {
})); }));
}; };
``` ```
### HTTP
Endpoints are at <code>http://***host***/api/v1/***endpoint***</code>. They'll be listed below for each action.
#### Testing with Curl
##### Get Example
```
curl /community/list?sort=Hot
```
##### Post Example
```
curl -i -H \
"Content-Type: application/json" \
-X POST \
-d '{
"comment_id": X,
"post_id": X,
"score": X,
"auth": "..."
}' \
/comment/like
```
## Rate limits ## Rate limits
@ -201,6 +273,11 @@ These go wherever there is a `sort` field. The available sort types are:
- `TopYear` - the most upvoted posts/communities of the current year. - `TopYear` - the most upvoted posts/communities of the current year.
- `TopAll` - the most upvoted posts/communities on the current instance. - `TopAll` - the most upvoted posts/communities on the current instance.
### Websocket vs HTTP
- Below are the websocket JSON requests / responses. For HTTP, ignore all fields except those inside `data`.
- For example, an http login will be a `POST` `{username_or_email: X, password: X}`
### User / Authentication / Admin actions ### User / Authentication / Admin actions
#### Login #### Login
@ -220,13 +297,19 @@ The `jwt` string should be stored and used anywhere `auth` is called for.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "Login",
jwt: String data: {
jwt: String,
}
} }
``` ```
##### HTTP
`POST /user/login`
#### Register #### Register
Only the first user will be able to be the admin. Only the first user will be able to be the admin.
##### Request ##### Request
@ -245,11 +328,17 @@ Only the first user will be able to be the admin.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "Register",
jwt: String data: {
jwt: String,
}
} }
``` ```
##### HTTP
`POST /user/register`
#### Get User Details #### Get User Details
##### Request ##### Request
```rust ```rust
@ -270,14 +359,20 @@ Only the first user will be able to be the admin.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "GetUserDetails",
data: {
user: UserView, user: UserView,
follows: Vec<CommunityFollowerView>, follows: Vec<CommunityFollowerView>,
moderates: Vec<CommunityModeratorView>, moderates: Vec<CommunityModeratorView>,
comments: Vec<CommentView>, comments: Vec<CommentView>,
posts: Vec<PostView>, posts: Vec<PostView>,
}
} }
``` ```
##### HTTP
`GET /user`
#### Save User Settings #### Save User Settings
##### Request ##### Request
```rust ```rust
@ -295,10 +390,16 @@ Only the first user will be able to be the admin.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "SaveUserSettings",
data: {
jwt: String jwt: String
}
} }
``` ```
##### HTTP
`PUT /save_user_settings`
#### Get Replies / Inbox #### Get Replies / Inbox
##### Request ##### Request
```rust ```rust
@ -316,10 +417,16 @@ Only the first user will be able to be the admin.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "GetReplies",
data: {
replies: Vec<ReplyView>, replies: Vec<ReplyView>,
}
} }
``` ```
##### HTTP
`GET /user/replies`
#### Get User Mentions #### Get User Mentions
##### Request ##### Request
@ -338,11 +445,42 @@ Only the first user will be able to be the admin.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "GetUserMentions",
data: {
mentions: Vec<UserMentionView>, mentions: Vec<UserMentionView>,
}
} }
``` ```
##### HTTP
`GET /user/mentions`
#### Edit User Mention
##### Request
```rust
{
op: "EditUserMention",
data: {
user_mention_id: i32,
read: Option<bool>,
auth: String,
}
}
```
##### Response
```rust
{
op: "EditUserMention",
data: {
mention: UserMentionView,
}
}
```
##### HTTP
`PUT /user/mention`
#### Mark All As Read #### Mark All As Read
Marks all user replies and mentions as read. Marks all user replies and mentions as read.
@ -359,11 +497,17 @@ Marks all user replies and mentions as read.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "MarkAllAsRead",
data: {
replies: Vec<ReplyView>, replies: Vec<ReplyView>,
}
} }
``` ```
##### HTTP
`POST /user/mark_all_as_read`
#### Delete Account #### Delete Account
*Permananently deletes your posts and comments* *Permananently deletes your posts and comments*
@ -381,11 +525,17 @@ Marks all user replies and mentions as read.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "DeleteAccount",
data: {
jwt: String, jwt: String,
}
} }
``` ```
##### HTTP
`POST /user/delete_account`
#### Add admin #### Add admin
##### Request ##### Request
```rust ```rust
@ -401,10 +551,15 @@ Marks all user replies and mentions as read.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "AddAdmin",
data: {
admins: Vec<UserView>, admins: Vec<UserView>,
}
} }
``` ```
##### HTTP
`POST /admin/add`
#### Ban user #### Ban user
##### Request ##### Request
@ -423,11 +578,16 @@ Marks all user replies and mentions as read.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "BanUser",
data: {
user: UserView, user: UserView,
banned: bool, banned: bool,
}
} }
``` ```
##### HTTP
`POST /user/ban`
### Site ### Site
#### List Categories #### List Categories
@ -440,13 +600,19 @@ Marks all user replies and mentions as read.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "ListCategories",
data: {
categories: Vec<Category> categories: Vec<Category>
}
} }
``` ```
##### HTTP
`GET /categories`
#### Search #### Search
Search types are `Both, Comments, Posts`.
Search types are `All, Comments, Posts, Communities, Users, Url`
##### Request ##### Request
```rust ```rust
@ -459,17 +625,26 @@ Search types are `Both, Comments, Posts`.
sort: String, sort: String,
page: Option<i64>, page: Option<i64>,
limit: Option<i64>, limit: Option<i64>,
auth?: Option<String>,
} }
} }
``` ```
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "Search",
data: {
type_: String,
comments: Vec<CommentView>, comments: Vec<CommentView>,
posts: Vec<PostView>, posts: Vec<PostView>,
communities: Vec<CommunityView>,
users: Vec<UserView>,
}
} }
``` ```
##### HTTP
`POST /search`
#### Get Modlog #### Get Modlog
##### Request ##### Request
@ -487,7 +662,8 @@ Search types are `Both, Comments, Posts`.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "GetModlog",
data: {
removed_posts: Vec<ModRemovePostView>, removed_posts: Vec<ModRemovePostView>,
locked_posts: Vec<ModLockPostView>, locked_posts: Vec<ModLockPostView>,
removed_comments: Vec<ModRemoveCommentView>, removed_comments: Vec<ModRemoveCommentView>,
@ -496,9 +672,14 @@ Search types are `Both, Comments, Posts`.
banned: Vec<ModBanView>, banned: Vec<ModBanView>,
added_to_community: Vec<ModAddCommunityView>, added_to_community: Vec<ModAddCommunityView>,
added: Vec<ModAddView>, added: Vec<ModAddView>,
}
} }
``` ```
##### HTTP
`GET /modlog`
#### Create Site #### Create Site
##### Request ##### Request
```rust ```rust
@ -514,11 +695,17 @@ Search types are `Both, Comments, Posts`.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "CreateSite",
data: {
site: SiteView, site: SiteView,
}
} }
``` ```
##### HTTP
`POST /site`
#### Edit Site #### Edit Site
##### Request ##### Request
```rust ```rust
@ -534,10 +721,15 @@ Search types are `Both, Comments, Posts`.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "EditSite",
data: {
site: SiteView, site: SiteView,
}
} }
``` ```
##### HTTP
`PUT /site`
#### Get Site #### Get Site
##### Request ##### Request
@ -549,12 +741,17 @@ Search types are `Both, Comments, Posts`.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "GetSite",
data: {
site: Option<SiteView>, site: Option<SiteView>,
admins: Vec<UserView>, admins: Vec<UserView>,
banned: Vec<UserView>, banned: Vec<UserView>,
}
} }
``` ```
##### HTTP
`GET /site`
#### Transfer Site #### Transfer Site
##### Request ##### Request
@ -570,12 +767,17 @@ Search types are `Both, Comments, Posts`.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "TransferSite",
data: {
site: Option<SiteView>, site: Option<SiteView>,
admins: Vec<UserView>, admins: Vec<UserView>,
banned: Vec<UserView>, banned: Vec<UserView>,
}
} }
``` ```
##### HTTP
`POST /site/transfer`
### Community ### Community
#### Get Community #### Get Community
@ -593,12 +795,17 @@ Search types are `Both, Comments, Posts`.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "GetCommunity",
data: {
community: CommunityView, community: CommunityView,
moderators: Vec<CommunityModeratorView>, moderators: Vec<CommunityModeratorView>,
admins: Vec<UserView>, admins: Vec<UserView>,
}
} }
``` ```
##### HTTP
`GET /community`
#### Create Community #### Create Community
##### Request ##### Request
@ -617,10 +824,15 @@ Search types are `Both, Comments, Posts`.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "CreateCommunity",
data: {
community: CommunityView community: CommunityView
}
} }
``` ```
##### HTTP
`POST /community`
#### List Communities #### List Communities
##### Request ##### Request
@ -638,10 +850,15 @@ Search types are `Both, Comments, Posts`.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "ListCommunities",
data: {
communities: Vec<CommunityView> communities: Vec<CommunityView>
}
} }
``` ```
##### HTTP
`GET /community/list`
#### Ban from Community #### Ban from Community
##### Request ##### Request
@ -661,11 +878,16 @@ Search types are `Both, Comments, Posts`.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "BanFromCommunity",
data: {
user: UserView, user: UserView,
banned: bool, banned: bool,
}
} }
``` ```
##### HTTP
`POST /community/ban_user`
#### Add Mod to Community #### Add Mod to Community
##### Request ##### Request
@ -683,10 +905,15 @@ Search types are `Both, Comments, Posts`.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "AddModToCommunity",
data: {
moderators: Vec<CommunityModeratorView>, moderators: Vec<CommunityModeratorView>,
}
} }
``` ```
##### HTTP
`POST /community/mod`
#### Edit Community #### Edit Community
Mods and admins can remove and lock a community, creators can delete it. Mods and admins can remove and lock a community, creators can delete it.
@ -712,10 +939,15 @@ Mods and admins can remove and lock a community, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "EditCommunity",
data: {
community: CommunityView community: CommunityView
}
} }
``` ```
##### HTTP
`PUT /community`
#### Follow Community #### Follow Community
##### Request ##### Request
@ -732,10 +964,15 @@ Mods and admins can remove and lock a community, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "FollowCommunity",
data: {
community: CommunityView community: CommunityView
}
} }
``` ```
##### HTTP
`POST /community/follow`
#### Get Followed Communities #### Get Followed Communities
##### Request ##### Request
@ -750,10 +987,15 @@ Mods and admins can remove and lock a community, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "GetFollowedCommunities",
data: {
communities: Vec<CommunityFollowerView> communities: Vec<CommunityFollowerView>
}
} }
``` ```
##### HTTP
`GET /user/followed_communities`
#### Transfer Community #### Transfer Community
##### Request ##### Request
@ -770,12 +1012,17 @@ Mods and admins can remove and lock a community, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "TransferCommunity",
data: {
community: CommunityView, community: CommunityView,
moderators: Vec<CommunityModeratorView>, moderators: Vec<CommunityModeratorView>,
admins: Vec<UserView>, admins: Vec<UserView>,
}
} }
``` ```
##### HTTP
`POST /community/transfer`
### Post ### Post
#### Create Post #### Create Post
@ -795,10 +1042,15 @@ Mods and admins can remove and lock a community, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "CreatePost",
data: {
post: PostView post: PostView
}
} }
``` ```
##### HTTP
`POST /post`
#### Get Post #### Get Post
##### Request ##### Request
@ -814,16 +1066,22 @@ Mods and admins can remove and lock a community, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "GetPost",
data: {
post: PostView, post: PostView,
comments: Vec<CommentView>, comments: Vec<CommentView>,
community: CommunityView, community: CommunityView,
moderators: Vec<CommunityModeratorView>, moderators: Vec<CommunityModeratorView>,
admins: Vec<UserView>, admins: Vec<UserView>,
}
} }
``` ```
##### HTTP
`GET /post`
#### Get Posts #### Get Posts
Post listing types are `All, Subscribed, Community` Post listing types are `All, Subscribed, Community`
##### Request ##### Request
@ -843,12 +1101,18 @@ Post listing types are `All, Subscribed, Community`
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "GetPosts",
data: {
posts: Vec<PostView>, posts: Vec<PostView>,
}
} }
``` ```
##### HTTP
`GET /post/list`
#### Create Post Like #### Create Post Like
`score` can be 0, -1, or 1 `score` can be 0, -1, or 1
##### Request ##### Request
@ -865,12 +1129,18 @@ Post listing types are `All, Subscribed, Community`
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "CreatePostLike",
data: {
post: PostView post: PostView
}
} }
``` ```
##### HTTP
`POST /post/like`
#### Edit Post #### Edit Post
Mods and admins can remove and lock a post, creators can delete it. Mods and admins can remove and lock a post, creators can delete it.
##### Request ##### Request
@ -895,11 +1165,17 @@ Mods and admins can remove and lock a post, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "EditPost",
data: {
post: PostView post: PostView
}
} }
``` ```
##### HTTP
`PUT /post`
#### Save Post #### Save Post
##### Request ##### Request
```rust ```rust
@ -915,10 +1191,15 @@ Mods and admins can remove and lock a post, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "SavePost",
data: {
post: PostView post: PostView
}
} }
``` ```
##### HTTP
`POST /post/save`
### Comment ### Comment
#### Create Comment #### Create Comment
@ -938,12 +1219,19 @@ Mods and admins can remove and lock a post, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "CreateComment",
data: {
comment: CommentView comment: CommentView
}
} }
``` ```
##### HTTP
`POST /comment`
#### Edit Comment #### Edit Comment
Mods and admins can remove a comment, creators can delete it. Mods and admins can remove a comment, creators can delete it.
##### Request ##### Request
@ -967,10 +1255,15 @@ Mods and admins can remove a comment, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "EditComment",
data: {
comment: CommentView comment: CommentView
}
} }
``` ```
##### HTTP
`PUT /comment`
#### Save Comment #### Save Comment
##### Request ##### Request
@ -987,12 +1280,18 @@ Mods and admins can remove a comment, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "SaveComment",
data: {
comment: CommentView comment: CommentView
}
} }
``` ```
##### HTTP
`POST /comment/save`
#### Create Comment Like #### Create Comment Like
`score` can be 0, -1, or 1 `score` can be 0, -1, or 1
##### Request ##### Request
@ -1010,10 +1309,15 @@ Mods and admins can remove a comment, creators can delete it.
##### Response ##### Response
```rust ```rust
{ {
op: String, op: "CreateCommentLike",
data: {
comment: CommentView comment: CommentView
}
} }
``` ```
##### HTTP
`POST /comment/like`
### RSS / Atom feeds ### RSS / Atom feeds

53
docs/src/lemmy_council.md vendored Normal file
View file

@ -0,0 +1,53 @@
# Lemmy Council
- A group of lemmy developers and users that use a well-defined democratic process to steer the project in a positive direction, keep it aligned to community goals, and resolve conflicts.
## Voting / Decision-Making
### Process
- Anything is open for discussion
- Voting done through matrix chat reacts (thumbs up/thumbs down)
- Require a simple majority for votes. (Maybe 2/3rds for more debated decisions).
- Once a decision is reached democratically, the dicision is binding and all group members have to follow it
- All members of the Lemmy council have equal voting power.
- Voting must stay open for at least 2 days.
### What gets voted on
- Membership (joining, removing)
- Coding direction
- Priorities / Emphasis
- Controversial features (For example, an unpopular feature should be removed)
- Communication mediums
- Conflict resolution
- dev.lemmy.ml (domain and server)
- lemmy.ml and subdomains (excluding communism.lemmy.ml)
- git repo including mirrors (on github, gitea, etc)
- Any official accounts of the Lemmy project, for example the Mastodon account or the Liberapay account
- Changes to these rules
## Joining
- We use the following process: anyone who is active around Lemmy can recommend any other active person to join the council. This has to be approved by a majority of the council.
- Active users are defined as those who contribute to Lemmy in some way for at least an hour per week on average, doing things like reporting bugs, discussing rules and features, translating, promoting, developing, or doing other things that aim to improve Lemmy as a whole.
-> people should have joined at least a month ago.
- The member list is public.
- Note: we would like to have a process where community members can elect candidates for the council, but this is not realistic because a single user could easily create multiple accounts and cheat the vote.
- Limit growth to one new member per month at most.
## Removing members
- Inactive members should be removed from the council after a few months of inactivity, and after receiving a notification about this.
- Members that dont follow binding council decisions should be removed.
- Any member can be removed in a vote.
## Goals
- We encourage the membership of groups such as LGBT, religious or ethnic minorities, abuse victims, etc etc, and strive to create a safe space for them to express their opinions. We also support measures to increase participation by the previously mentioned groups.
- The following are banned, and will always be harshly punished: fascism, abuse, racism, sexism, etc etc,
## Communication
- A private Matrix chat for all council members.
- (Once private communities are done) A private community on dev.lemmy.ml for issues.
## Member List / Contact Info
General Contact [@LemmyDev Mastodon](https://mastodon.social/@LemmyDev)
- Dessalines [Matrix](https://matrix.to/#/@happydooby:matrix.org)
- Nutomic [Matrix](https://matrix.to/#/@nutomic:matrix.org), [Mastodon](https://radical.town/@felix)

32
install.sh vendored
View file

@ -1,13 +1,41 @@
#!/bin/sh #!/bin/bash
set -e set -e
export DATABASE_URL=postgres://lemmy:password@localhost:5432/lemmy # Set the database variable to the default first.
# Don't forget to change this string to your actual database parameters
# if you don't plan to initialize the database in this script.
export LEMMY_DATABASE_URL=postgres://lemmy:password@localhost:5432/lemmy
# Set other environment variables
export JWT_SECRET=changeme export JWT_SECRET=changeme
export HOSTNAME=rrr export HOSTNAME=rrr
# Optionally initialize the database
init_db_valid=0
init_db_final=0
while [ "$init_db_valid" == 0 ]
do
read -p "Initialize database (y/n)? " init_db
case "${init_db,,}" in
y|yes ) init_db_valid=1; init_db_final=1;;
n|no ) init_db_valid=1; init_db_final=0;;
* ) echo "Invalid input" 1>&2;;
esac
echo
done
if [ "$init_db_final" = 1 ]
then
source ./server/db-init.sh
read -n 1 -s -r -p "Press ANY KEY to continue execution of this script, press CTRL+C to quit..."
echo
fi
# Build the web client
cd ui cd ui
yarn yarn
yarn build yarn build
# Build and run the backend
cd ../server cd ../server
cargo run cargo run

2
server/.gitignore vendored
View file

@ -2,3 +2,5 @@
.env .env
.idea .idea
env_setup.sh env_setup.sh
query_testing/*.json
query_testing/*.json.old

View file

@ -1 +1,2 @@
tab_spaces = 2 tab_spaces = 2
edition="2018"

2855
server/Cargo.lock generated vendored

File diff suppressed because it is too large Load diff

39
server/Cargo.toml vendored
View file

@ -5,28 +5,33 @@ authors = ["Dessalines <happydooby@gmail.com>"]
edition = "2018" edition = "2018"
[dependencies] [dependencies]
diesel = { version = "1.4.2", features = ["postgres","chrono"] } diesel = { version = "1.4.2", features = ["postgres","chrono", "r2d2", "64-column-tables"] }
diesel_migrations = "1.4.0" diesel_migrations = "1.4.0"
dotenv = "0.14.1" dotenv = "0.15.0"
bcrypt = "0.5.0" bcrypt = "0.6.1"
activitypub = "0.1.5" activitypub = "0.2.0"
chrono = { version = "0.4.7", features = ["serde"] } chrono = { version = "0.4.7", features = ["serde"] }
failure = "0.1.5" failure = "0.1.5"
serde_json = { version = "1.0.40", features = ["preserve_order"]} serde_json = { version = "1.0.45", features = ["preserve_order"]}
serde = { version = "1.0.94", features = ["derive"] } serde = { version = "1.0.94", features = ["derive"] }
actix = "0.8.3" actix = "0.9.0"
actix-web = "1.0" actix-web = "2.0.0"
actix-files = "0.1.3" actix-files = "0.2.1"
actix-web-actors = "1.0" actix-web-actors = "2.0.0"
env_logger = "0.6.2" actix-rt = "1.0.0"
rand = "0.7.0" env_logger = "0.7.1"
strum = "0.15.0" rand = "0.7.3"
strum_macros = "0.15.0" strum = "0.17.1"
jsonwebtoken = "6.0.1" strum_macros = "0.17.1"
regex = "1.1.9" jsonwebtoken = "7.0.1"
regex = "1.3.4"
lazy_static = "1.3.0" lazy_static = "1.3.0"
lettre = "0.9.2" lettre = "0.9.2"
lettre_email = "0.9.2" lettre_email = "0.9.2"
rust-crypto = "^0.2" sha2 = "0.8.1"
rss = "1.8.0" rss = "1.9.0"
htmlescape = "0.3.1" htmlescape = "0.3.1"
config = "0.10.1"
hjson = "0.8.2"
percent-encoding = "2.1.0"
chttp = "0.5.5"

7
server/clean.sh vendored Executable file
View file

@ -0,0 +1,7 @@
#!/bin/sh
cargo update
cargo fmt
cargo check
cargo clippy
cargo outdated -R

56
server/config/defaults.hjson vendored Normal file
View file

@ -0,0 +1,56 @@
{
# settings related to the postgresql database
database: {
# username to connect to postgres
user: "lemmy"
# password to connect to postgres
password: "password"
# host where postgres is running
host: "localhost"
# port where postgres can be accessed
port: 5432
# name of the postgres database for lemmy
database: "lemmy"
# maximum number of active sql connections
pool_size: 5
}
# the domain name of your instance (eg "dev.lemmy.ml")
hostname: "my_domain"
# address where lemmy should listen for incoming requests
bind: "0.0.0.0"
# port where lemmy should listen for incoming requests
port: 8536
# json web token for authorization between server and client
jwt_secret: "changeme"
# The dir for the front end
front_end_dir: "../ui/dist"
# whether to enable activitypub federation. this feature is in alpha, do not enable in production, as might
# cause problems like remote instances fetching and permanently storing bad data.
federation_enabled: false
# rate limits for various user actions, by user ip
rate_limit: {
# maximum number of messages created in interval
message: 180
# interval length for message limit
message_per_second: 60
# maximum number of posts created in interval
post: 6
# interval length for post limit
post_per_second: 600
# maximum number of registrations in interval
register: 3
# interval length for registration limit
register_per_second: 3600
}
# # email sending configuration
# email: {
# # hostname of the smtp server
# smtp_server: ""
# # login name for smtp server
# smtp_login: ""
# # password to login to the smtp server
# smtp_password: ""
# # address to send emails from, eg "info@your-instance.com"
# smtp_from_address: ""
# }
}

43
server/db-init.sh vendored Executable file
View file

@ -0,0 +1,43 @@
#!/bin/bash
username=lemmy
dbname=lemmy
port=5432
password=""
password_confirm=""
password_valid=0
while [ "$password_valid" == 0 ]
do
read -p "Enter database password: " -s password
echo
read -p "Verify database password: " -s password_confirm
echo
echo
# Start the loop from the top if either check fails
if [ -z "$password" ]
then
echo "Error: Password cannot be empty." 1>&2
echo
continue
fi
if [ "$password" != "$password_confirm" ]
then
echo "Error: Passwords don't match." 1>&2
echo
continue
fi
# Set the password_valid variable to break out of the loop
password_valid=1
done
psql -c "CREATE USER $username WITH PASSWORD '$password' SUPERUSER;" -U postgres
psql -c 'CREATE DATABASE $dbname WITH OWNER $username;' -U postgres
export LEMMY_DATABASE_URL=postgres://$username:$password@localhost:$port/$dbname
echo $LEMMY_DATABASE_URL

View file

@ -0,0 +1,224 @@
-- the views
drop view user_mention_view;
drop view reply_view;
drop view comment_view;
drop view user_view;
-- user
create view user_view as
select id,
name,
fedi_name,
admin,
banned,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
-- post
-- Recreate the view
drop view post_view;
create view post_view as
with all_post as
(
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
-- community
drop view community_view;
create view community_view as
with all_community as
(
select *,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
-- Reply and comment view
create view comment_view as
with all_comment as
(
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;
-- community tables
drop view community_moderator_view;
drop view community_follower_view;
drop view community_user_ban_view;
drop view site_view;
create view community_moderator_view as
select *,
(select name from user_ u where cm.user_id = u.id) as user_name,
(select name from community c where cm.community_id = c.id) as community_name
from community_moderator cm;
create view community_follower_view as
select *,
(select name from user_ u where cf.user_id = u.id) as user_name,
(select name from community c where cf.community_id = c.id) as community_name
from community_follower cf;
create view community_user_ban_view as
select *,
(select name from user_ u where cm.user_id = u.id) as user_name,
(select name from community c where cm.community_id = c.id) as community_name
from community_user_ban cm;
create view site_view as
select *,
(select name from user_ u where s.creator_id = u.id) as creator_name,
(select count(*) from user_) as number_of_users,
(select count(*) from post) as number_of_posts,
(select count(*) from comment) as number_of_comments,
(select count(*) from community) as number_of_communities
from site s;
alter table user_ rename column avatar to icon;
alter table user_ alter column icon type bytea using icon::bytea;

View file

@ -0,0 +1,234 @@
-- Rename to avatar
alter table user_ rename column icon to avatar;
alter table user_ alter column avatar type text;
-- Rebuild nearly all the views, to include the creator avatars
-- user
drop view user_view;
create view user_view as
select id,
name,
avatar,
fedi_name,
admin,
banned,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
-- post
-- Recreate the view
drop view post_view;
create view post_view as
with all_post as
(
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
-- community
drop view community_view;
create view community_view as
with all_community as
(
select *,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select avatar from user_ u where c.creator_id = u.id) as creator_avatar,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
-- reply and comment view
drop view reply_view;
drop view user_mention_view;
drop view comment_view;
create view comment_view as
with all_comment as
(
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;
-- community views
drop view community_moderator_view;
drop view community_follower_view;
drop view community_user_ban_view;
drop view site_view;
create view community_moderator_view as
select *,
(select name from user_ u where cm.user_id = u.id) as user_name,
(select avatar from user_ u where cm.user_id = u.id),
(select name from community c where cm.community_id = c.id) as community_name
from community_moderator cm;
create view community_follower_view as
select *,
(select name from user_ u where cf.user_id = u.id) as user_name,
(select avatar from user_ u where cf.user_id = u.id),
(select name from community c where cf.community_id = c.id) as community_name
from community_follower cf;
create view community_user_ban_view as
select *,
(select name from user_ u where cm.user_id = u.id) as user_name,
(select avatar from user_ u where cm.user_id = u.id),
(select name from community c where cm.community_id = c.id) as community_name
from community_user_ban cm;
create view site_view as
select *,
(select name from user_ u where s.creator_id = u.id) as creator_name,
(select avatar from user_ u where s.creator_id = u.id) as creator_avatar,
(select count(*) from user_) as number_of_users,
(select count(*) from post) as number_of_posts,
(select count(*) from comment) as number_of_comments,
(select count(*) from community) as number_of_communities
from site s;

View file

@ -0,0 +1,15 @@
-- user
drop view user_view;
create view user_view as
select id,
name,
avatar,
fedi_name,
admin,
banned,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;

View file

@ -0,0 +1,16 @@
-- user
drop view user_view;
create view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;

View file

@ -0,0 +1,20 @@
-- Drop the columns
drop view user_view;
alter table user_ drop column show_avatars;
alter table user_ drop column send_notifications_to_email;
-- Rebuild the view
create view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;

View file

@ -0,0 +1,22 @@
-- Add columns
alter table user_ add column show_avatars boolean default true not null;
alter table user_ add column send_notifications_to_email boolean default false not null;
-- Rebuild the user_view
drop view user_view;
create view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;

View file

@ -0,0 +1,16 @@
drop index idx_post_creator;
drop index idx_post_community;
drop index idx_post_like_post;
drop index idx_post_like_user;
drop index idx_comment_creator;
drop index idx_comment_parent;
drop index idx_comment_post;
drop index idx_comment_like_comment;
drop index idx_comment_like_user;
drop index idx_comment_like_post;
drop index idx_community_creator;
drop index idx_community_category;

View file

@ -0,0 +1,17 @@
-- Go through all the tables joins, optimize every view, CTE, etc.
create index idx_post_creator on post (creator_id);
create index idx_post_community on post (community_id);
create index idx_post_like_post on post_like (post_id);
create index idx_post_like_user on post_like (user_id);
create index idx_comment_creator on comment (creator_id);
create index idx_comment_parent on comment (parent_id);
create index idx_comment_post on comment (post_id);
create index idx_comment_like_comment on comment_like (comment_id);
create index idx_comment_like_user on comment_like (user_id);
create index idx_comment_like_post on comment_like (post_id);
create index idx_community_creator on community (creator_id);
create index idx_community_category on community (category_id);

View file

@ -0,0 +1,223 @@
-- functions and triggers
drop trigger refresh_user on user_;
drop function refresh_user();
drop trigger refresh_post on post;
drop function refresh_post();
drop trigger refresh_post_like on post_like;
drop function refresh_post_like();
drop trigger refresh_community on community;
drop function refresh_community();
drop trigger refresh_community_follower on community_follower;
drop function refresh_community_follower();
drop trigger refresh_community_user_ban on community_user_ban;
drop function refresh_community_user_ban();
drop trigger refresh_comment on comment;
drop function refresh_comment();
drop trigger refresh_comment_like on comment_like;
drop function refresh_comment_like();
-- post
-- Recreate the view
drop view post_view;
create view post_view as
with all_post as
(
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
drop view post_mview;
drop materialized view post_aggregates_mview;
drop view post_aggregates_view;
-- user
drop materialized view user_mview;
drop view user_view;
create view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
-- community
drop view community_mview;
drop materialized view community_aggregates_mview;
drop view community_view;
drop view community_aggregates_view;
create view community_view as
with all_community as
(
select *,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select avatar from user_ u where c.creator_id = u.id) as creator_avatar,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
-- reply and comment view
drop view reply_view;
drop view user_mention_view;
drop view comment_view;
drop view comment_mview;
drop materialized view comment_aggregates_mview;
drop view comment_aggregates_view;
create view comment_view as
with all_comment as
(
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;

View file

@ -0,0 +1,437 @@
-- post
create view post_aggregates_view as
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id;
create materialized view post_aggregates_mview as select * from post_aggregates_view;
create unique index idx_post_aggregates_mview_id on post_aggregates_mview (id);
drop view post_view;
create view post_view as
with all_post as (
select
pa.*
from post_aggregates_view pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
create view post_mview as
with all_post as (
select
pa.*
from post_aggregates_mview pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
-- user_view
drop view user_view;
create view user_view as
select
u.id,
u.name,
u.avatar,
u.email,
u.fedi_name,
u.admin,
u.banned,
u.show_avatars,
u.send_notifications_to_email,
u.published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
create materialized view user_mview as select * from user_view;
create unique index idx_user_mview_id on user_mview (id);
-- community
create view community_aggregates_view as
select c.*,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select avatar from user_ u where c.creator_id = u.id) as creator_avatar,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c;
create materialized view community_aggregates_mview as select * from community_aggregates_view;
create unique index idx_community_aggregates_mview_id on community_aggregates_mview (id);
drop view community_view;
create view community_view as
with all_community as
(
select
ca.*
from community_aggregates_view ca
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
create view community_mview as
with all_community as
(
select
ca.*
from community_aggregates_mview ca
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
-- reply and comment view
create view comment_aggregates_view as
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id;
create materialized view comment_aggregates_mview as select * from comment_aggregates_view;
create unique index idx_comment_aggregates_mview_id on comment_aggregates_mview (id);
drop view reply_view;
drop view user_mention_view;
drop view comment_view;
create view comment_view as
with all_comment as
(
select
ca.*
from comment_aggregates_view ca
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view comment_mview as
with all_comment as
(
select
ca.*
from comment_aggregates_mview ca
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;
-- user
create or replace function refresh_user()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently user_mview;
refresh materialized view concurrently comment_aggregates_mview; -- cause of bans
refresh materialized view concurrently post_aggregates_mview;
return null;
end $$;
create trigger refresh_user
after insert or update or delete or truncate
on user_
for each statement
execute procedure refresh_user();
-- post
create or replace function refresh_post()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_aggregates_mview;
refresh materialized view concurrently user_mview;
return null;
end $$;
create trigger refresh_post
after insert or update or delete or truncate
on post
for each statement
execute procedure refresh_post();
-- post_like
create or replace function refresh_post_like()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_aggregates_mview;
refresh materialized view concurrently user_mview;
return null;
end $$;
create trigger refresh_post_like
after insert or update or delete or truncate
on post_like
for each statement
execute procedure refresh_post_like();
-- community
create or replace function refresh_community()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_aggregates_mview;
refresh materialized view concurrently community_aggregates_mview;
refresh materialized view concurrently user_mview;
return null;
end $$;
create trigger refresh_community
after insert or update or delete or truncate
on community
for each statement
execute procedure refresh_community();
-- community_follower
create or replace function refresh_community_follower()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently community_aggregates_mview;
refresh materialized view concurrently post_aggregates_mview;
return null;
end $$;
create trigger refresh_community_follower
after insert or update or delete or truncate
on community_follower
for each statement
execute procedure refresh_community_follower();
-- community_user_ban
create or replace function refresh_community_user_ban()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently comment_aggregates_mview;
refresh materialized view concurrently post_aggregates_mview;
return null;
end $$;
create trigger refresh_community_user_ban
after insert or update or delete or truncate
on community_user_ban
for each statement
execute procedure refresh_community_user_ban();
-- comment
create or replace function refresh_comment()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_aggregates_mview;
refresh materialized view concurrently comment_aggregates_mview;
refresh materialized view concurrently community_aggregates_mview;
refresh materialized view concurrently user_mview;
return null;
end $$;
create trigger refresh_comment
after insert or update or delete or truncate
on comment
for each statement
execute procedure refresh_comment();
-- comment_like
create or replace function refresh_comment_like()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently comment_aggregates_mview;
refresh materialized view concurrently user_mview;
return null;
end $$;
create trigger refresh_comment_like
after insert or update or delete or truncate
on comment_like
for each statement
execute procedure refresh_comment_like();

View file

@ -0,0 +1,34 @@
-- Drop the triggers
drop trigger refresh_private_message on private_message;
drop function refresh_private_message();
-- Drop the view and table
drop view private_message_view cascade;
drop table private_message;
-- Rebuild the old views
drop view user_view cascade;
create view user_view as
select
u.id,
u.name,
u.avatar,
u.email,
u.fedi_name,
u.admin,
u.banned,
u.show_avatars,
u.send_notifications_to_email,
u.published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
create materialized view user_mview as select * from user_view;
create unique index idx_user_mview_id on user_mview (id);
-- Drop the columns
alter table user_ drop column matrix_user_id;

View file

@ -0,0 +1,90 @@
-- Creating private message
create table private_message (
id serial primary key,
creator_id int references user_ on update cascade on delete cascade not null,
recipient_id int references user_ on update cascade on delete cascade not null,
content text not null,
deleted boolean default false not null,
read boolean default false not null,
published timestamp not null default now(),
updated timestamp
);
-- Create the view and materialized view which has the avatar and creator name
create view private_message_view as
select
pm.*,
u.name as creator_name,
u.avatar as creator_avatar,
u2.name as recipient_name,
u2.avatar as recipient_avatar
from private_message pm
inner join user_ u on u.id = pm.creator_id
inner join user_ u2 on u2.id = pm.recipient_id;
create materialized view private_message_mview as select * from private_message_view;
create unique index idx_private_message_mview_id on private_message_mview (id);
-- Create the triggers
create or replace function refresh_private_message()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently private_message_mview;
return null;
end $$;
create trigger refresh_private_message
after insert or update or delete or truncate
on private_message
for each statement
execute procedure refresh_private_message();
-- Update user to include matrix id
alter table user_ add column matrix_user_id text unique;
drop view user_view cascade;
create view user_view as
select
u.id,
u.name,
u.avatar,
u.email,
u.matrix_user_id,
u.fedi_name,
u.admin,
u.banned,
u.show_avatars,
u.send_notifications_to_email,
u.published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
create materialized view user_mview as select * from user_view;
create unique index idx_user_mview_id on user_mview (id);
-- This is what a group pm table would look like
-- Not going to do it now because of the complications
--
-- create table private_message (
-- id serial primary key,
-- creator_id int references user_ on update cascade on delete cascade not null,
-- content text not null,
-- deleted boolean default false not null,
-- published timestamp not null default now(),
-- updated timestamp
-- );
--
-- create table private_message_recipient (
-- id serial primary key,
-- private_message_id int references private_message on update cascade on delete cascade not null,
-- recipient_id int references user_ on update cascade on delete cascade not null,
-- read boolean default false not null,
-- published timestamp not null default now(),
-- unique(private_message_id, recipient_id)
-- )

View file

@ -0,0 +1,25 @@
-- Drop the materialized / built views
drop view reply_view;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;

View file

@ -0,0 +1,27 @@
-- https://github.com/dessalines/lemmy/issues/197
drop view reply_view;
-- Do the reply_view referencing the comment_mview
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_mview cv, closereply
where closereply.id = cv.id
;

View file

@ -0,0 +1 @@
drop view user_mention_mview;

View file

@ -0,0 +1,67 @@
create view user_mention_mview as
with all_comment as
(
select
ca.*
from comment_aggregates_mview ca
)
select
ac.id,
um.id as user_mention_id,
ac.creator_id,
ac.post_id,
ac.parent_id,
ac.content,
ac.removed,
um.read,
ac.published,
ac.updated,
ac.deleted,
ac.community_id,
ac.banned,
ac.banned_from_community,
ac.creator_name,
ac.creator_avatar,
ac.score,
ac.upvotes,
ac.downvotes,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved,
um.recipient_id
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
left join user_mention um on um.comment_id = ac.id
union all
select
ac.id,
um.id as user_mention_id,
ac.creator_id,
ac.post_id,
ac.parent_id,
ac.content,
ac.removed,
um.read,
ac.published,
ac.updated,
ac.deleted,
ac.community_id,
ac.banned,
ac.banned_from_community,
ac.creator_name,
ac.creator_avatar,
ac.score,
ac.upvotes,
ac.downvotes,
null as user_id,
null as my_vote,
null as saved,
um.recipient_id
from all_comment ac
left join user_mention um on um.comment_id = ac.id
;

View file

@ -0,0 +1,2 @@
drop index idx_user_name_lower;
drop index idx_user_email_lower;

View file

@ -0,0 +1,29 @@
-- Add case insensitive username and email uniqueness
-- An example of showing the dupes:
-- select
-- max(id) as id,
-- lower(name) as lname,
-- count(*)
-- from user_
-- group by lower(name)
-- having count(*) > 1;
-- Delete username dupes, keeping the first one
delete
from user_
where id not in (
select min(id)
from user_
group by lower(name), lower(fedi_name)
);
-- The user index
create unique index idx_user_name_lower on user_ (lower(name));
-- Email lower
create unique index idx_user_email_lower on user_ (lower(email));
-- Set empty emails properly to null
update user_ set email = null where email = '';

View file

@ -0,0 +1,132 @@
-- Drop the dependent views
drop view post_view;
drop view post_mview;
drop materialized view post_aggregates_mview;
drop view post_aggregates_view;
drop view mod_remove_post_view;
drop view mod_sticky_post_view;
drop view mod_lock_post_view;
drop view mod_remove_comment_view;
alter table post alter column name type varchar(100);
-- regen post view
create view post_aggregates_view as
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id;
create materialized view post_aggregates_mview as select * from post_aggregates_view;
create unique index idx_post_aggregates_mview_id on post_aggregates_mview (id);
create view post_view as
with all_post as (
select
pa.*
from post_aggregates_view pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
create view post_mview as
with all_post as (
select
pa.*
from post_aggregates_mview pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
-- The mod views
create view mod_remove_post_view as
select mrp.*,
(select name from user_ u where mrp.mod_user_id = u.id) as mod_user_name,
(select name from post p where mrp.post_id = p.id) as post_name,
(select c.id from post p, community c where mrp.post_id = p.id and p.community_id = c.id) as community_id,
(select c.name from post p, community c where mrp.post_id = p.id and p.community_id = c.id) as community_name
from mod_remove_post mrp;
create view mod_lock_post_view as
select mlp.*,
(select name from user_ u where mlp.mod_user_id = u.id) as mod_user_name,
(select name from post p where mlp.post_id = p.id) as post_name,
(select c.id from post p, community c where mlp.post_id = p.id and p.community_id = c.id) as community_id,
(select c.name from post p, community c where mlp.post_id = p.id and p.community_id = c.id) as community_name
from mod_lock_post mlp;
create view mod_remove_comment_view as
select mrc.*,
(select name from user_ u where mrc.mod_user_id = u.id) as mod_user_name,
(select c.id from comment c where mrc.comment_id = c.id) as comment_user_id,
(select name from user_ u, comment c where mrc.comment_id = c.id and u.id = c.creator_id) as comment_user_name,
(select content from comment c where mrc.comment_id = c.id) as comment_content,
(select p.id from post p, comment c where mrc.comment_id = c.id and c.post_id = p.id) as post_id,
(select p.name from post p, comment c where mrc.comment_id = c.id and c.post_id = p.id) as post_name,
(select co.id from comment c, post p, community co where mrc.comment_id = c.id and c.post_id = p.id and p.community_id = co.id) as community_id,
(select co.name from comment c, post p, community co where mrc.comment_id = c.id and c.post_id = p.id and p.community_id = co.id) as community_name
from mod_remove_comment mrc;
create view mod_sticky_post_view as
select msp.*,
(select name from user_ u where msp.mod_user_id = u.id) as mod_user_name,
(select name from post p where msp.post_id = p.id) as post_name,
(select c.id from post p, community c where msp.post_id = p.id and p.community_id = c.id) as community_id,
(select c.name from post p, community c where msp.post_id = p.id and p.community_id = c.id) as community_name
from mod_sticky_post msp;

View file

@ -0,0 +1,133 @@
-- Drop the dependent views
drop view post_view;
drop view post_mview;
drop materialized view post_aggregates_mview;
drop view post_aggregates_view;
drop view mod_remove_post_view;
drop view mod_sticky_post_view;
drop view mod_lock_post_view;
drop view mod_remove_comment_view;
-- Add the extra post limit
alter table post alter column name type varchar(200);
-- regen post view
create view post_aggregates_view as
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id;
create materialized view post_aggregates_mview as select * from post_aggregates_view;
create unique index idx_post_aggregates_mview_id on post_aggregates_mview (id);
create view post_view as
with all_post as (
select
pa.*
from post_aggregates_view pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
create view post_mview as
with all_post as (
select
pa.*
from post_aggregates_mview pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
-- The mod views
create view mod_remove_post_view as
select mrp.*,
(select name from user_ u where mrp.mod_user_id = u.id) as mod_user_name,
(select name from post p where mrp.post_id = p.id) as post_name,
(select c.id from post p, community c where mrp.post_id = p.id and p.community_id = c.id) as community_id,
(select c.name from post p, community c where mrp.post_id = p.id and p.community_id = c.id) as community_name
from mod_remove_post mrp;
create view mod_lock_post_view as
select mlp.*,
(select name from user_ u where mlp.mod_user_id = u.id) as mod_user_name,
(select name from post p where mlp.post_id = p.id) as post_name,
(select c.id from post p, community c where mlp.post_id = p.id and p.community_id = c.id) as community_id,
(select c.name from post p, community c where mlp.post_id = p.id and p.community_id = c.id) as community_name
from mod_lock_post mlp;
create view mod_remove_comment_view as
select mrc.*,
(select name from user_ u where mrc.mod_user_id = u.id) as mod_user_name,
(select c.id from comment c where mrc.comment_id = c.id) as comment_user_id,
(select name from user_ u, comment c where mrc.comment_id = c.id and u.id = c.creator_id) as comment_user_name,
(select content from comment c where mrc.comment_id = c.id) as comment_content,
(select p.id from post p, comment c where mrc.comment_id = c.id and c.post_id = p.id) as post_id,
(select p.name from post p, comment c where mrc.comment_id = c.id and c.post_id = p.id) as post_name,
(select co.id from comment c, post p, community co where mrc.comment_id = c.id and c.post_id = p.id and p.community_id = co.id) as community_id,
(select co.name from comment c, post p, community co where mrc.comment_id = c.id and c.post_id = p.id and p.community_id = co.id) as community_name
from mod_remove_comment mrc;
create view mod_sticky_post_view as
select msp.*,
(select name from user_ u where msp.mod_user_id = u.id) as mod_user_name,
(select name from post p where msp.post_id = p.id) as post_name,
(select c.id from post p, community c where msp.post_id = p.id and p.community_id = c.id) as community_id,
(select c.name from post p, community c where msp.post_id = p.id and p.community_id = c.id) as community_name
from mod_sticky_post msp;

View file

@ -0,0 +1,206 @@
drop view reply_view;
drop view user_mention_view;
drop view user_mention_mview;
drop view comment_view;
drop view comment_mview;
drop materialized view comment_aggregates_mview;
drop view comment_aggregates_view;
-- reply and comment view
create view comment_aggregates_view as
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id;
create materialized view comment_aggregates_mview as select * from comment_aggregates_view;
create unique index idx_comment_aggregates_mview_id on comment_aggregates_mview (id);
create view comment_view as
with all_comment as
(
select
ca.*
from comment_aggregates_view ca
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view comment_mview as
with all_comment as
(
select
ca.*
from comment_aggregates_mview ca
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
-- Do the reply_view referencing the comment_mview
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_mview cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;
create view user_mention_mview as
with all_comment as
(
select
ca.*
from comment_aggregates_mview ca
)
select
ac.id,
um.id as user_mention_id,
ac.creator_id,
ac.post_id,
ac.parent_id,
ac.content,
ac.removed,
um.read,
ac.published,
ac.updated,
ac.deleted,
ac.community_id,
ac.banned,
ac.banned_from_community,
ac.creator_name,
ac.creator_avatar,
ac.score,
ac.upvotes,
ac.downvotes,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved,
um.recipient_id
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
left join user_mention um on um.comment_id = ac.id
union all
select
ac.id,
um.id as user_mention_id,
ac.creator_id,
ac.post_id,
ac.parent_id,
ac.content,
ac.removed,
um.read,
ac.published,
ac.updated,
ac.deleted,
ac.community_id,
ac.banned,
ac.banned_from_community,
ac.creator_name,
ac.creator_avatar,
ac.score,
ac.upvotes,
ac.downvotes,
null as user_id,
null as my_vote,
null as saved,
um.recipient_id
from all_comment ac
left join user_mention um on um.comment_id = ac.id
;

View file

@ -0,0 +1,220 @@
-- Adding community name, hot_rank, to comment_view, user_mention_view, and subscribed to comment_view
-- Rebuild the comment view
drop view reply_view;
drop view user_mention_view;
drop view user_mention_mview;
drop view comment_view;
drop view comment_mview;
drop materialized view comment_aggregates_mview;
drop view comment_aggregates_view;
-- reply and comment view
create view comment_aggregates_view as
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select co.name from post p, community co where p.id = c.post_id and p.community_id = co.id) as community_name,
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(cl.score) , 0), c.published) as hot_rank
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id;
create materialized view comment_aggregates_mview as select * from comment_aggregates_view;
create unique index idx_comment_aggregates_mview_id on comment_aggregates_mview (id);
create view comment_view as
with all_comment as
(
select
ca.*
from comment_aggregates_view ca
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.community_id = cf.community_id) as subscribed,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as subscribed,
null as saved
from all_comment ac
;
create view comment_mview as
with all_comment as
(
select
ca.*
from comment_aggregates_mview ca
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.community_id = cf.community_id) as subscribed,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as subscribed,
null as saved
from all_comment ac
;
-- Do the reply_view referencing the comment_mview
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_mview cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.community_name,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.hot_rank,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;
create view user_mention_mview as
with all_comment as
(
select
ca.*
from comment_aggregates_mview ca
)
select
ac.id,
um.id as user_mention_id,
ac.creator_id,
ac.post_id,
ac.parent_id,
ac.content,
ac.removed,
um.read,
ac.published,
ac.updated,
ac.deleted,
ac.community_id,
ac.community_name,
ac.banned,
ac.banned_from_community,
ac.creator_name,
ac.creator_avatar,
ac.score,
ac.upvotes,
ac.downvotes,
ac.hot_rank,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved,
um.recipient_id
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
left join user_mention um on um.comment_id = ac.id
union all
select
ac.id,
um.id as user_mention_id,
ac.creator_id,
ac.post_id,
ac.parent_id,
ac.content,
ac.removed,
um.read,
ac.published,
ac.updated,
ac.deleted,
ac.community_id,
ac.community_name,
ac.banned,
ac.banned_from_community,
ac.creator_name,
ac.creator_avatar,
ac.score,
ac.upvotes,
ac.downvotes,
ac.hot_rank,
null as user_id,
null as my_vote,
null as saved,
um.recipient_id
from all_comment ac
left join user_mention um on um.comment_id = ac.id
;

View file

@ -0,0 +1,88 @@
drop view post_view;
drop view post_mview;
drop materialized view post_aggregates_mview;
drop view post_aggregates_view;
-- regen post view
create view post_aggregates_view as
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id;
create materialized view post_aggregates_mview as select * from post_aggregates_view;
create unique index idx_post_aggregates_mview_id on post_aggregates_mview (id);
create view post_view as
with all_post as (
select
pa.*
from post_aggregates_view pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
create view post_mview as
with all_post as (
select
pa.*
from post_aggregates_mview pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;

View file

@ -0,0 +1,106 @@
-- Adds a newest_activity_time for the post_views, in order to sort by newest comment
drop view post_view;
drop view post_mview;
drop materialized view post_aggregates_mview;
drop view post_aggregates_view;
-- regen post view
create view post_aggregates_view as
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0),
(
case when (p.published < ('now'::timestamp - '1 month'::interval)) then p.published -- Prevents necro-bumps
else greatest(c.recent_comment_time, p.published)
end
)
) as hot_rank,
(
case when (p.published < ('now'::timestamp - '1 month'::interval)) then p.published -- Prevents necro-bumps
else greatest(c.recent_comment_time, p.published)
end
) as newest_activity_time
from post p
left join post_like pl on p.id = pl.post_id
left join (
select post_id,
max(published) as recent_comment_time
from comment
group by 1
) c on p.id = c.post_id
group by p.id, c.recent_comment_time;
create materialized view post_aggregates_mview as select * from post_aggregates_view;
create unique index idx_post_aggregates_mview_id on post_aggregates_mview (id);
create view post_view as
with all_post as (
select
pa.*
from post_aggregates_view pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
create view post_mview as
with all_post as (
select
pa.*
from post_aggregates_mview pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;

View file

@ -0,0 +1,112 @@
-- Adds a newest_activity_time for the post_views, in order to sort by newest comment
drop view post_view;
drop view post_mview;
drop materialized view post_aggregates_mview;
drop view post_aggregates_view;
-- Drop the columns
alter table post drop column embed_title;
alter table post drop column embed_description;
alter table post drop column embed_html;
alter table post drop column thumbnail_url;
-- regen post view
create view post_aggregates_view as
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0),
(
case when (p.published < ('now'::timestamp - '1 month'::interval)) then p.published -- Prevents necro-bumps
else greatest(c.recent_comment_time, p.published)
end
)
) as hot_rank,
(
case when (p.published < ('now'::timestamp - '1 month'::interval)) then p.published -- Prevents necro-bumps
else greatest(c.recent_comment_time, p.published)
end
) as newest_activity_time
from post p
left join post_like pl on p.id = pl.post_id
left join (
select post_id,
max(published) as recent_comment_time
from comment
group by 1
) c on p.id = c.post_id
group by p.id, c.recent_comment_time;
create materialized view post_aggregates_mview as select * from post_aggregates_view;
create unique index idx_post_aggregates_mview_id on post_aggregates_mview (id);
create view post_view as
with all_post as (
select
pa.*
from post_aggregates_view pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
create view post_mview as
with all_post as (
select
pa.*
from post_aggregates_mview pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;

View file

@ -0,0 +1,115 @@
-- Add the columns
alter table post add column embed_title text;
alter table post add column embed_description text;
alter table post add column embed_html text;
alter table post add column thumbnail_url text;
-- Regenerate the views
-- Adds a newest_activity_time for the post_views, in order to sort by newest comment
drop view post_view;
drop view post_mview;
drop materialized view post_aggregates_mview;
drop view post_aggregates_view;
-- regen post view
create view post_aggregates_view as
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0),
(
case when (p.published < ('now'::timestamp - '1 month'::interval)) then p.published -- Prevents necro-bumps
else greatest(c.recent_comment_time, p.published)
end
)
) as hot_rank,
(
case when (p.published < ('now'::timestamp - '1 month'::interval)) then p.published -- Prevents necro-bumps
else greatest(c.recent_comment_time, p.published)
end
) as newest_activity_time
from post p
left join post_like pl on p.id = pl.post_id
left join (
select post_id,
max(published) as recent_comment_time
from comment
group by 1
) c on p.id = c.post_id
group by p.id, c.recent_comment_time;
create materialized view post_aggregates_mview as select * from post_aggregates_view;
create unique index idx_post_aggregates_mview_id on post_aggregates_mview (id);
create view post_view as
with all_post as (
select
pa.*
from post_aggregates_view pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
create view post_mview as
with all_post as (
select
pa.*
from post_aggregates_mview pa
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;

View file

@ -0,0 +1,211 @@
-- functions and triggers
drop trigger refresh_user on user_;
drop function refresh_user();
drop trigger refresh_post on post;
drop function refresh_post();
drop trigger refresh_post_like on post_like;
drop function refresh_post_like();
drop trigger refresh_community on community;
drop function refresh_community();
drop trigger refresh_community_follower on community_follower;
drop function refresh_community_follower();
drop trigger refresh_comment on comment;
drop function refresh_comment();
drop trigger refresh_comment_like on comment_like;
drop function refresh_comment_like();
-- post
-- Recreate the view
drop materialized view post_view;
create view post_view as
with all_post as
(
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
;
drop materialized view user_view;
create view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
-- community
drop materialized view community_view;
create view community_view as
with all_community as
(
select *,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select avatar from user_ u where c.creator_id = u.id) as creator_avatar,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
-- reply and comment view
drop view reply_view;
drop view user_mention_view;
drop materialized view comment_view;
create view comment_view as
with all_comment as
(
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;

View file

@ -0,0 +1,324 @@
-- post
drop view post_view;
create materialized view post_view as
with all_post as
(
select
p.*,
(select u.banned from user_ u where p.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb where p.creator_id = cb.user_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where p.creator_id = user_.id) as creator_name,
(select avatar from user_ where p.creator_id = user_.id) as creator_avatar,
(select name from community where p.community_id = community.id) as community_name,
(select removed from community c where p.community_id = c.id) as community_removed,
(select deleted from community c where p.community_id = c.id) as community_deleted,
(select nsfw from community c where p.community_id = c.id) as community_nsfw,
(select count(*) from comment where comment.post_id = p.id) as number_of_comments,
coalesce(sum(pl.score), 0) as score,
count (case when pl.score = 1 then 1 else null end) as upvotes,
count (case when pl.score = -1 then 1 else null end) as downvotes,
hot_rank(coalesce(sum(pl.score) , 0), p.published) as hot_rank
from post p
left join post_like pl on p.id = pl.post_id
group by p.id
)
select
ap.*,
u.id as user_id,
coalesce(pl.score, 0) as my_vote,
(select cf.id::bool from community_follower cf where u.id = cf.user_id and cf.community_id = ap.community_id) as subscribed,
(select pr.id::bool from post_read pr where u.id = pr.user_id and pr.post_id = ap.id) as read,
(select ps.id::bool from post_saved ps where u.id = ps.user_id and ps.post_id = ap.id) as saved
from user_ u
cross join all_post ap
left join post_like pl on u.id = pl.user_id and ap.id = pl.post_id
union all
select
ap.*,
null as user_id,
null as my_vote,
null as subscribed,
null as read,
null as saved
from all_post ap
with data
;
create unique index idx_post_view_unique on post_view (id, user_id);
create index idx_post_view_user_id on post_view (user_id);
create index idx_post_view_hot_rank_published on post_view (hot_rank desc, published desc);
create index idx_post_view_published on post_view (published desc);
create index idx_post_view_score on post_view (score desc);
-- user_view
drop view user_view;
create materialized view user_view as
select id,
name,
avatar,
email,
fedi_name,
admin,
banned,
show_avatars,
send_notifications_to_email,
published,
(select count(*) from post p where p.creator_id = u.id) as number_of_posts,
(select coalesce(sum(score), 0) from post p, post_like pl where u.id = p.creator_id and p.id = pl.post_id) as post_score,
(select count(*) from comment c where c.creator_id = u.id) as number_of_comments,
(select coalesce(sum(score), 0) from comment c, comment_like cl where u.id = c.creator_id and c.id = cl.comment_id) as comment_score
from user_ u;
create unique index idx_user_view_unique on user_view (id);
create index idx_user_view_comment_published on user_view (comment_score desc, published desc);
create index idx_user_view_admin on user_view (admin);
create index idx_user_view_banned on user_view (banned);
-- community
drop view community_view;
create materialized view community_view as
with all_community as
(
select *,
(select name from user_ u where c.creator_id = u.id) as creator_name,
(select avatar from user_ u where c.creator_id = u.id) as creator_avatar,
(select name from category ct where c.category_id = ct.id) as category_name,
(select count(*) from community_follower cf where cf.community_id = c.id) as number_of_subscribers,
(select count(*) from post p where p.community_id = c.id) as number_of_posts,
(select count(*) from comment co, post p where c.id = p.community_id and p.id = co.post_id) as number_of_comments,
hot_rank((select count(*) from community_follower cf where cf.community_id = c.id), c.published) as hot_rank
from community c
)
select
ac.*,
u.id as user_id,
(select cf.id::boolean from community_follower cf where u.id = cf.user_id and ac.id = cf.community_id) as subscribed
from user_ u
cross join all_community ac
union all
select
ac.*,
null as user_id,
null as subscribed
from all_community ac
;
create unique index idx_community_view_unique on community_view (id, user_id);
create index idx_community_view_user_id on community_view (user_id);
create index idx_community_view_hot_rank_subscribed on community_view (hot_rank desc, number_of_subscribers desc);
-- reply and comment view
drop view reply_view;
drop view user_mention_view;
drop view comment_view;
create materialized view comment_view as
with all_comment as
(
select
c.*,
(select community_id from post p where p.id = c.post_id),
(select u.banned from user_ u where c.creator_id = u.id) as banned,
(select cb.id::bool from community_user_ban cb, post p where c.creator_id = cb.user_id and p.id = c.post_id and p.community_id = cb.community_id) as banned_from_community,
(select name from user_ where c.creator_id = user_.id) as creator_name,
(select avatar from user_ where c.creator_id = user_.id) as creator_avatar,
coalesce(sum(cl.score), 0) as score,
count (case when cl.score = 1 then 1 else null end) as upvotes,
count (case when cl.score = -1 then 1 else null end) as downvotes
from comment c
left join comment_like cl on c.id = cl.comment_id
group by c.id
)
select
ac.*,
u.id as user_id,
coalesce(cl.score, 0) as my_vote,
(select cs.id::bool from comment_saved cs where u.id = cs.user_id and cs.comment_id = ac.id) as saved
from user_ u
cross join all_comment ac
left join comment_like cl on u.id = cl.user_id and ac.id = cl.comment_id
union all
select
ac.*,
null as user_id,
null as my_vote,
null as saved
from all_comment ac
;
create unique index idx_comment_view_unique on comment_view (id, user_id);
create index idx_comment_view_user_id on comment_view (user_id);
create index idx_comment_view_creator_id on comment_view (creator_id);
create index idx_comment_view_post_id on comment_view (post_id);
create index idx_comment_view_score on comment_view (score desc);
create view reply_view as
with closereply as (
select
c2.id,
c2.creator_id as sender_id,
c.creator_id as recipient_id
from comment c
inner join comment c2 on c.id = c2.parent_id
where c2.creator_id != c.creator_id
-- Do union where post is null
union
select
c.id,
c.creator_id as sender_id,
p.creator_id as recipient_id
from comment c, post p
where c.post_id = p.id and c.parent_id is null and c.creator_id != p.creator_id
)
select cv.*,
closereply.recipient_id
from comment_view cv, closereply
where closereply.id = cv.id
;
-- user mention
create view user_mention_view as
select
c.id,
um.id as user_mention_id,
c.creator_id,
c.post_id,
c.parent_id,
c.content,
c.removed,
um.read,
c.published,
c.updated,
c.deleted,
c.community_id,
c.banned,
c.banned_from_community,
c.creator_name,
c.creator_avatar,
c.score,
c.upvotes,
c.downvotes,
c.user_id,
c.my_vote,
c.saved,
um.recipient_id
from user_mention um, comment_view c
where um.comment_id = c.id;
-- user
create or replace function refresh_user()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently comment_view; -- cause of bans
refresh materialized view concurrently post_view;
return null;
end $$;
create trigger refresh_user
after insert or update or delete or truncate
on user_
for each statement
execute procedure refresh_user();
-- post
create or replace function refresh_post()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_view;
return null;
end $$;
create trigger refresh_post
after insert or update or delete or truncate
on post
for each statement
execute procedure refresh_post();
-- post_like
create or replace function refresh_post_like()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_view;
return null;
end $$;
create trigger refresh_post_like
after insert or update or delete or truncate
on post_like
for each statement
execute procedure refresh_post_like();
-- community
create or replace function refresh_community()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_view;
refresh materialized view concurrently community_view;
return null;
end $$;
create trigger refresh_community
after insert or update or delete or truncate
on community
for each statement
execute procedure refresh_community();
-- community_follower
create or replace function refresh_community_follower()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently community_view;
refresh materialized view concurrently post_view;
return null;
end $$;
create trigger refresh_community_follower
after insert or update or delete or truncate
on community_follower
for each statement
execute procedure refresh_community_follower();
-- comment
create or replace function refresh_comment()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently post_view;
refresh materialized view concurrently comment_view;
return null;
end $$;
create trigger refresh_comment
after insert or update or delete or truncate
on comment
for each statement
execute procedure refresh_comment();
-- comment_like
create or replace function refresh_comment_like()
returns trigger language plpgsql
as $$
begin
refresh materialized view concurrently comment_view;
return null;
end $$;
create trigger refresh_comment_like
after insert or update or delete or truncate
on comment_like
for each statement
execute procedure refresh_comment_like();

26
server/query_testing/apache_bench_report.sh vendored Executable file
View file

@ -0,0 +1,26 @@
#!/bin/bash
set -e
declare -a arr=(
"https://mastodon.social/"
"https://peertube.social/"
"https://dev.lemmy.ml/"
"https://dev.lemmy.ml/feeds/all.xml"
"https://dev.lemmy.ml/.well-known/nodeinfo"
"https://fediverse.blog/.well-known/nodeinfo"
"https://torrents-csv.ml/service/search?q=wheel&page=1&type_=torrent"
)
## now loop through the above array
for i in "${arr[@]}"
do
ab -c 10 -t 10 "$i" > out.abtest
grep "Server Hostname:" out.abtest
grep "Document Path:" out.abtest
grep "Requests per second" out.abtest
grep "(mean, across all concurrent requests)" out.abtest
grep "Transfer rate:" out.abtest
echo "---"
done
rm *.abtest

34
server/query_testing/api_benchmark.sh vendored Executable file
View file

@ -0,0 +1,34 @@
#!/bin/bash
set -e
# By default, this script runs against `http://127.0.0.1:8536`, but you can pass a different Lemmy instance,
# eg `./api_benchmark.sh "https://example.com"`.
DOMAIN=${1:-"http://127.0.0.1:8536"}
declare -a arr=(
"/api/v1/site"
"/api/v1/categories"
"/api/v1/modlog"
"/api/v1/search?q=test&type_=Posts&sort=Hot"
"/api/v1/community"
"/api/v1/community/list?sort=Hot"
"/api/v1/post/list?sort=Hot&type_=All"
)
## now loop through the above array
for path in "${arr[@]}"
do
URL="$DOMAIN$path"
printf "\n\n\n"
echo "testing $URL"
curl --show-error --fail --silent "$URL" >/dev/null
ab -c 64 -t 10 "$URL" > out.abtest
grep "Server Hostname:" out.abtest
grep "Document Path:" out.abtest
grep "Requests per second" out.abtest
grep "(mean, across all concurrent requests)" out.abtest
grep "Transfer rate:" out.abtest
echo "---"
done
rm *.abtest

View file

@ -0,0 +1,32 @@
#!/bin/bash
set -e
# Do the views first
echo "explain (analyze, format json) select * from user_mview" > explain.sql
psql -qAt -U lemmy -f explain.sql > user_view.json
echo "explain (analyze, format json) select * from post_mview where user_id is null order by hot_rank desc, published desc" > explain.sql
psql -qAt -U lemmy -f explain.sql > post_view.json
echo "explain (analyze, format json) select * from comment_mview where user_id is null" > explain.sql
psql -qAt -U lemmy -f explain.sql > comment_view.json
echo "explain (analyze, format json) select * from community_mview where user_id is null order by hot_rank desc" > explain.sql
psql -qAt -U lemmy -f explain.sql > community_view.json
echo "explain (analyze, format json) select * from site_view limit 1" > explain.sql
psql -qAt -U lemmy -f explain.sql > site_view.json
echo "explain (analyze, format json) select * from reply_view where user_id = 34 and recipient_id = 34" > explain.sql
psql -qAt -U lemmy -f explain.sql > reply_view.json
echo "explain (analyze, format json) select * from user_mention_view where user_id = 34 and recipient_id = 34" > explain.sql
psql -qAt -U lemmy -f explain.sql > user_mention_view.json
echo "explain (analyze, format json) select * from user_mention_mview where user_id = 34 and recipient_id = 34" > explain.sql
psql -qAt -U lemmy -f explain.sql > user_mention_mview.json
grep "Execution Time" *.json
rm explain.sql

View file

@ -1,10 +1,14 @@
use super::*; use super::*;
use crate::send_email;
use crate::settings::Settings;
use diesel::PgConnection;
use std::str::FromStr;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct CreateComment { pub struct CreateComment {
content: String, content: String,
parent_id: Option<i32>, parent_id: Option<i32>,
edit_id: Option<i32>, edit_id: Option<i32>, // TODO this isn't used
pub post_id: i32, pub post_id: i32,
auth: String, auth: String,
} }
@ -12,7 +16,7 @@ pub struct CreateComment {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct EditComment { pub struct EditComment {
content: String, content: String,
parent_id: Option<i32>, parent_id: Option<i32>, // TODO why are the parent_id, creator_id, post_id, etc fields required? They aren't going to change
edit_id: i32, edit_id: i32,
creator_id: i32, creator_id: i32,
pub post_id: i32, pub post_id: i32,
@ -32,8 +36,8 @@ pub struct SaveComment {
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct CommentResponse { pub struct CommentResponse {
op: String,
pub comment: CommentView, pub comment: CommentView,
pub recipient_ids: Vec<i32>,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -44,27 +48,43 @@ pub struct CreateCommentLike {
auth: String, auth: String,
} }
#[derive(Serialize, Deserialize)]
pub struct GetComments {
type_: String,
sort: String,
page: Option<i64>,
limit: Option<i64>,
pub community_id: Option<i32>,
auth: Option<String>,
}
#[derive(Serialize, Deserialize)]
pub struct GetCommentsResponse {
comments: Vec<CommentView>,
}
impl Perform<CommentResponse> for Oper<CreateComment> { impl Perform<CommentResponse> for Oper<CreateComment> {
fn perform(&self) -> Result<CommentResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommentResponse, Error> {
let data: &CreateComment = &self.data; let data: &CreateComment = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
let hostname = &format!("https://{}", Settings::get().hostname);
// Check for a community ban // Check for a community ban
let post = Post::read(&conn, data.post_id)?; let post = Post::read(&conn, data.post_id)?;
if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
let content_slurs_removed = remove_slurs(&data.content.to_owned()); let content_slurs_removed = remove_slurs(&data.content.to_owned());
@ -82,24 +102,24 @@ impl Perform<CommentResponse> for Oper<CreateComment> {
let inserted_comment = match Comment::create(&conn, &comment_form) { let inserted_comment = match Comment::create(&conn, &comment_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_create_comment"))?, Err(_e) => return Err(APIError::err("couldnt_create_comment").into()),
}; };
let mut recipient_ids = Vec::new();
// Scan the comment for user mentions, add those rows // Scan the comment for user mentions, add those rows
let extracted_usernames = extract_usernames(&comment_form.content); let extracted_usernames = extract_usernames(&comment_form.content);
for username_mention in &extracted_usernames { for username_mention in &extracted_usernames {
let mention_user = User_::read_from_name(&conn, username_mention.to_string()); if let Ok(mention_user) = User_::read_from_name(&conn, (*username_mention).to_string()) {
if mention_user.is_ok() {
let mention_user_id = mention_user?.id;
// You can't mention yourself // You can't mention yourself
// At some point, make it so you can't tag the parent creator either // At some point, make it so you can't tag the parent creator either
// This can cause two notifications, one for reply and the other for mention // This can cause two notifications, one for reply and the other for mention
if mention_user_id != user_id { if mention_user.id != user_id {
recipient_ids.push(mention_user.id);
let user_mention_form = UserMentionForm { let user_mention_form = UserMentionForm {
recipient_id: mention_user_id, recipient_id: mention_user.id,
comment_id: inserted_comment.id, comment_id: inserted_comment.id,
read: None, read: None,
}; };
@ -109,10 +129,83 @@ impl Perform<CommentResponse> for Oper<CreateComment> {
match UserMention::create(&conn, &user_mention_form) { match UserMention::create(&conn, &user_mention_form) {
Ok(_mention) => (), Ok(_mention) => (),
Err(_e) => eprintln!("{}", &_e), Err(_e) => eprintln!("{}", &_e),
};
// Send an email to those users that have notifications on
if mention_user.send_notifications_to_email {
if let Some(mention_email) = mention_user.email {
let subject = &format!(
"{} - Mentioned by {}",
Settings::get().hostname,
claims.username
);
let html = &format!(
"<h1>User Mention</h1><br><div>{} - {}</div><br><a href={}/inbox>inbox</a>",
claims.username, comment_form.content, hostname
);
match send_email(subject, &mention_email, &mention_user.name, html) {
Ok(_o) => _o,
Err(e) => eprintln!("{}", e),
};
} }
} }
} }
} }
}
// Send notifs to the parent commenter / poster
match data.parent_id {
Some(parent_id) => {
let parent_comment = Comment::read(&conn, parent_id)?;
if parent_comment.creator_id != user_id {
let parent_user = User_::read(&conn, parent_comment.creator_id)?;
recipient_ids.push(parent_user.id);
if parent_user.send_notifications_to_email {
if let Some(comment_reply_email) = parent_user.email {
let subject = &format!(
"{} - Reply from {}",
Settings::get().hostname,
claims.username
);
let html = &format!(
"<h1>Comment Reply</h1><br><div>{} - {}</div><br><a href={}/inbox>inbox</a>",
claims.username, comment_form.content, hostname
);
match send_email(subject, &comment_reply_email, &parent_user.name, html) {
Ok(_o) => _o,
Err(e) => eprintln!("{}", e),
};
}
}
}
}
// Its a post
None => {
if post.creator_id != user_id {
let parent_user = User_::read(&conn, post.creator_id)?;
recipient_ids.push(parent_user.id);
if parent_user.send_notifications_to_email {
if let Some(post_reply_email) = parent_user.email {
let subject = &format!(
"{} - Reply from {}",
Settings::get().hostname,
claims.username
);
let html = &format!(
"<h1>Post Reply</h1><br><div>{} - {}</div><br><a href={}/inbox>inbox</a>",
claims.username, comment_form.content, hostname
);
match send_email(subject, &post_reply_email, &parent_user.name, html) {
Ok(_o) => _o,
Err(e) => eprintln!("{}", e),
};
}
}
}
}
};
// You like your own comment by default // You like your own comment by default
let like_form = CommentLikeForm { let like_form = CommentLikeForm {
@ -124,26 +217,25 @@ impl Perform<CommentResponse> for Oper<CreateComment> {
let _inserted_like = match CommentLike::like(&conn, &like_form) { let _inserted_like = match CommentLike::like(&conn, &like_form) {
Ok(like) => like, Ok(like) => like,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_like_comment"))?, Err(_e) => return Err(APIError::err("couldnt_like_comment").into()),
}; };
let comment_view = CommentView::read(&conn, inserted_comment.id, Some(user_id))?; let comment_view = CommentView::read(&conn, inserted_comment.id, Some(user_id))?;
Ok(CommentResponse { Ok(CommentResponse {
op: self.op.to_string(),
comment: comment_view, comment: comment_view,
recipient_ids,
}) })
} }
} }
impl Perform<CommentResponse> for Oper<EditComment> { impl Perform<CommentResponse> for Oper<EditComment> {
fn perform(&self) -> Result<CommentResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommentResponse, Error> {
let data: &EditComment = &self.data; let data: &EditComment = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -163,17 +255,17 @@ impl Perform<CommentResponse> for Oper<EditComment> {
editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect()); editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect());
if !editors.contains(&user_id) { if !editors.contains(&user_id) {
return Err(APIError::err(&self.op, "no_comment_edit_allowed"))?; return Err(APIError::err("no_comment_edit_allowed").into());
} }
// Check for a community ban // Check for a community ban
if CommunityUserBanView::get(&conn, user_id, orig_comment.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, orig_comment.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
} }
@ -196,14 +288,16 @@ impl Perform<CommentResponse> for Oper<EditComment> {
let _updated_comment = match Comment::update(&conn, data.edit_id, &comment_form) { let _updated_comment = match Comment::update(&conn, data.edit_id, &comment_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_comment"))?, Err(_e) => return Err(APIError::err("couldnt_update_comment").into()),
}; };
let mut recipient_ids = Vec::new();
// Scan the comment for user mentions, add those rows // Scan the comment for user mentions, add those rows
let extracted_usernames = extract_usernames(&comment_form.content); let extracted_usernames = extract_usernames(&comment_form.content);
for username_mention in &extracted_usernames { for username_mention in &extracted_usernames {
let mention_user = User_::read_from_name(&conn, username_mention.to_string()); let mention_user = User_::read_from_name(&conn, (*username_mention).to_string());
if mention_user.is_ok() { if mention_user.is_ok() {
let mention_user_id = mention_user?.id; let mention_user_id = mention_user?.id;
@ -212,6 +306,8 @@ impl Perform<CommentResponse> for Oper<EditComment> {
// At some point, make it so you can't tag the parent creator either // At some point, make it so you can't tag the parent creator either
// This can cause two notifications, one for reply and the other for mention // This can cause two notifications, one for reply and the other for mention
if mention_user_id != user_id { if mention_user_id != user_id {
recipient_ids.push(mention_user_id);
let user_mention_form = UserMentionForm { let user_mention_form = UserMentionForm {
recipient_id: mention_user_id, recipient_id: mention_user_id,
comment_id: data.edit_id, comment_id: data.edit_id,
@ -228,6 +324,21 @@ impl Perform<CommentResponse> for Oper<EditComment> {
} }
} }
// Add to recipient ids
match data.parent_id {
Some(parent_id) => {
let parent_comment = Comment::read(&conn, parent_id)?;
if parent_comment.creator_id != user_id {
let parent_user = User_::read(&conn, parent_comment.creator_id)?;
recipient_ids.push(parent_user.id);
}
}
None => {
let post = Post::read(&conn, data.post_id)?;
recipient_ids.push(post.creator_id);
}
}
// Mod tables // Mod tables
if let Some(removed) = data.removed.to_owned() { if let Some(removed) = data.removed.to_owned() {
let form = ModRemoveCommentForm { let form = ModRemoveCommentForm {
@ -242,20 +353,19 @@ impl Perform<CommentResponse> for Oper<EditComment> {
let comment_view = CommentView::read(&conn, data.edit_id, Some(user_id))?; let comment_view = CommentView::read(&conn, data.edit_id, Some(user_id))?;
Ok(CommentResponse { Ok(CommentResponse {
op: self.op.to_string(),
comment: comment_view, comment: comment_view,
recipient_ids,
}) })
} }
} }
impl Perform<CommentResponse> for Oper<SaveComment> { impl Perform<CommentResponse> for Oper<SaveComment> {
fn perform(&self) -> Result<CommentResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommentResponse, Error> {
let data: &SaveComment = &self.data; let data: &SaveComment = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -268,53 +378,70 @@ impl Perform<CommentResponse> for Oper<SaveComment> {
if data.save { if data.save {
match CommentSaved::save(&conn, &comment_saved_form) { match CommentSaved::save(&conn, &comment_saved_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_save_comment"))?, Err(_e) => return Err(APIError::err("couldnt_save_comment").into()),
}; };
} else { } else {
match CommentSaved::unsave(&conn, &comment_saved_form) { match CommentSaved::unsave(&conn, &comment_saved_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_save_comment"))?, Err(_e) => return Err(APIError::err("couldnt_save_comment").into()),
}; };
} }
let comment_view = CommentView::read(&conn, data.comment_id, Some(user_id))?; let comment_view = CommentView::read(&conn, data.comment_id, Some(user_id))?;
Ok(CommentResponse { Ok(CommentResponse {
op: self.op.to_string(),
comment: comment_view, comment: comment_view,
recipient_ids: Vec::new(),
}) })
} }
} }
impl Perform<CommentResponse> for Oper<CreateCommentLike> { impl Perform<CommentResponse> for Oper<CreateCommentLike> {
fn perform(&self) -> Result<CommentResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommentResponse, Error> {
let data: &CreateCommentLike = &self.data; let data: &CreateCommentLike = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
let mut recipient_ids = Vec::new();
// Don't do a downvote if site has downvotes disabled // Don't do a downvote if site has downvotes disabled
if data.score == -1 { if data.score == -1 {
let site = SiteView::read(&conn)?; let site = SiteView::read(&conn)?;
if site.enable_downvotes == false { if !site.enable_downvotes {
return Err(APIError::err(&self.op, "downvotes_disabled"))?; return Err(APIError::err("downvotes_disabled").into());
} }
} }
// Check for a community ban // Check for a community ban
let post = Post::read(&conn, data.post_id)?; let post = Post::read(&conn, data.post_id)?;
if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
}
let comment = Comment::read(&conn, data.comment_id)?;
// Add to recipient ids
match comment.parent_id {
Some(parent_id) => {
let parent_comment = Comment::read(&conn, parent_id)?;
if parent_comment.creator_id != user_id {
let parent_user = User_::read(&conn, parent_comment.creator_id)?;
recipient_ids.push(parent_user.id);
}
}
None => {
recipient_ids.push(post.creator_id);
}
} }
let like_form = CommentLikeForm { let like_form = CommentLikeForm {
@ -328,11 +455,11 @@ impl Perform<CommentResponse> for Oper<CreateCommentLike> {
CommentLike::remove(&conn, &like_form)?; CommentLike::remove(&conn, &like_form)?;
// Only add the like if the score isnt 0 // Only add the like if the score isnt 0
let do_add = &like_form.score != &0 && (&like_form.score == &1 || &like_form.score == &-1); let do_add = like_form.score != 0 && (like_form.score == 1 || like_form.score == -1);
if do_add { if do_add {
let _inserted_like = match CommentLike::like(&conn, &like_form) { let _inserted_like = match CommentLike::like(&conn, &like_form) {
Ok(like) => like, Ok(like) => like,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_like_comment"))?, Err(_e) => return Err(APIError::err("couldnt_like_comment").into()),
}; };
} }
@ -340,8 +467,45 @@ impl Perform<CommentResponse> for Oper<CreateCommentLike> {
let liked_comment = CommentView::read(&conn, data.comment_id, Some(user_id))?; let liked_comment = CommentView::read(&conn, data.comment_id, Some(user_id))?;
Ok(CommentResponse { Ok(CommentResponse {
op: self.op.to_string(),
comment: liked_comment, comment: liked_comment,
recipient_ids,
}) })
} }
} }
impl Perform<GetCommentsResponse> for Oper<GetComments> {
fn perform(&self, conn: &PgConnection) -> Result<GetCommentsResponse, Error> {
let data: &GetComments = &self.data;
let user_claims: Option<Claims> = match &data.auth {
Some(auth) => match Claims::decode(&auth) {
Ok(claims) => Some(claims.claims),
Err(_e) => None,
},
None => None,
};
let user_id = match &user_claims {
Some(claims) => Some(claims.id),
None => None,
};
let type_ = ListingType::from_str(&data.type_)?;
let sort = SortType::from_str(&data.sort)?;
let comments = match CommentQueryBuilder::create(&conn)
.listing_type(type_)
.sort(&sort)
.for_community_id(data.community_id)
.my_user_id(user_id)
.page(data.page)
.limit(data.limit)
.list()
{
Ok(comments) => comments,
Err(_e) => return Err(APIError::err("couldnt_get_comments").into()),
};
Ok(GetCommentsResponse { comments })
}
}

View file

@ -1,4 +1,5 @@
use super::*; use super::*;
use diesel::PgConnection;
use std::str::FromStr; use std::str::FromStr;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -10,10 +11,10 @@ pub struct GetCommunity {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetCommunityResponse { pub struct GetCommunityResponse {
op: String, pub community: CommunityView,
community: CommunityView,
moderators: Vec<CommunityModeratorView>, moderators: Vec<CommunityModeratorView>,
admins: Vec<UserView>, admins: Vec<UserView>,
pub online: usize,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -28,7 +29,6 @@ pub struct CreateCommunity {
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct CommunityResponse { pub struct CommunityResponse {
op: String,
pub community: CommunityView, pub community: CommunityView,
} }
@ -42,7 +42,6 @@ pub struct ListCommunities {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct ListCommunitiesResponse { pub struct ListCommunitiesResponse {
op: String,
communities: Vec<CommunityView>, communities: Vec<CommunityView>,
} }
@ -58,7 +57,6 @@ pub struct BanFromCommunity {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct BanFromCommunityResponse { pub struct BanFromCommunityResponse {
op: String,
user: UserView, user: UserView,
banned: bool, banned: bool,
} }
@ -73,7 +71,6 @@ pub struct AddModToCommunity {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct AddModToCommunityResponse { pub struct AddModToCommunityResponse {
op: String,
moderators: Vec<CommunityModeratorView>, moderators: Vec<CommunityModeratorView>,
} }
@ -106,7 +103,6 @@ pub struct GetFollowedCommunities {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetFollowedCommunitiesResponse { pub struct GetFollowedCommunitiesResponse {
op: String,
communities: Vec<CommunityFollowerView>, communities: Vec<CommunityFollowerView>,
} }
@ -118,9 +114,8 @@ pub struct TransferCommunity {
} }
impl Perform<GetCommunityResponse> for Oper<GetCommunity> { impl Perform<GetCommunityResponse> for Oper<GetCommunity> {
fn perform(&self) -> Result<GetCommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetCommunityResponse, Error> {
let data: &GetCommunity = &self.data; let data: &GetCommunity = &self.data;
let conn = establish_connection();
let user_id: Option<i32> = match &data.auth { let user_id: Option<i32> = match &data.auth {
Some(auth) => match Claims::decode(&auth) { Some(auth) => match Claims::decode(&auth) {
@ -136,21 +131,24 @@ impl Perform<GetCommunityResponse> for Oper<GetCommunity> {
let community_id = match data.id { let community_id = match data.id {
Some(id) => id, Some(id) => id,
None => { None => {
match Community::read_from_name(&conn, data.name.to_owned().unwrap_or("main".to_string())) { match Community::read_from_name(
&conn,
data.name.to_owned().unwrap_or_else(|| "main".to_string()),
) {
Ok(community) => community.id, Ok(community) => community.id,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_community"))?, Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
} }
} }
}; };
let community_view = match CommunityView::read(&conn, community_id, user_id) { let community_view = match CommunityView::read(&conn, community_id, user_id) {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_community"))?, Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
}; };
let moderators = match CommunityModeratorView::for_community(&conn, community_id) { let moderators = match CommunityModeratorView::for_community(&conn, community_id) {
Ok(moderators) => moderators, Ok(moderators) => moderators,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_community"))?, Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
}; };
let site_creator_id = Site::read(&conn, 1)?.creator_id; let site_creator_id = Site::read(&conn, 1)?.creator_id;
@ -161,36 +159,42 @@ impl Perform<GetCommunityResponse> for Oper<GetCommunity> {
// Return the jwt // Return the jwt
Ok(GetCommunityResponse { Ok(GetCommunityResponse {
op: self.op.to_string(),
community: community_view, community: community_view,
moderators, moderators,
admins, admins,
online: 0,
}) })
} }
} }
impl Perform<CommunityResponse> for Oper<CreateCommunity> { impl Perform<CommunityResponse> for Oper<CreateCommunity> {
fn perform(&self) -> Result<CommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommunityResponse, Error> {
let data: &CreateCommunity = &self.data; let data: &CreateCommunity = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
if has_slurs(&data.name) if let Err(slurs) = slur_check(&data.name) {
|| has_slurs(&data.title) return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
|| (data.description.is_some() && has_slurs(&data.description.to_owned().unwrap())) }
{
return Err(APIError::err(&self.op, "no_slurs"))?; if let Err(slurs) = slur_check(&data.title) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
if let Some(description) = &data.description {
if let Err(slurs) = slur_check(description) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
} }
let user_id = claims.id; let user_id = claims.id;
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
// When you create a community, make sure the user becomes a moderator and a follower // When you create a community, make sure the user becomes a moderator and a follower
@ -208,7 +212,7 @@ impl Perform<CommunityResponse> for Oper<CreateCommunity> {
let inserted_community = match Community::create(&conn, &community_form) { let inserted_community = match Community::create(&conn, &community_form) {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err(&self.op, "community_already_exists"))?, Err(_e) => return Err(APIError::err("community_already_exists").into()),
}; };
let community_moderator_form = CommunityModeratorForm { let community_moderator_form = CommunityModeratorForm {
@ -219,12 +223,7 @@ impl Perform<CommunityResponse> for Oper<CreateCommunity> {
let _inserted_community_moderator = let _inserted_community_moderator =
match CommunityModerator::join(&conn, &community_moderator_form) { match CommunityModerator::join(&conn, &community_moderator_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("community_moderator_already_exists").into()),
return Err(APIError::err(
&self.op,
"community_moderator_already_exists",
))?
}
}; };
let community_follower_form = CommunityFollowerForm { let community_follower_form = CommunityFollowerForm {
@ -235,38 +234,45 @@ impl Perform<CommunityResponse> for Oper<CreateCommunity> {
let _inserted_community_follower = let _inserted_community_follower =
match CommunityFollower::follow(&conn, &community_follower_form) { match CommunityFollower::follow(&conn, &community_follower_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_follower_already_exists"))?, Err(_e) => return Err(APIError::err("community_follower_already_exists").into()),
}; };
let community_view = CommunityView::read(&conn, inserted_community.id, Some(user_id))?; let community_view = CommunityView::read(&conn, inserted_community.id, Some(user_id))?;
Ok(CommunityResponse { Ok(CommunityResponse {
op: self.op.to_string(),
community: community_view, community: community_view,
}) })
} }
} }
impl Perform<CommunityResponse> for Oper<EditCommunity> { impl Perform<CommunityResponse> for Oper<EditCommunity> {
fn perform(&self) -> Result<CommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommunityResponse, Error> {
let data: &EditCommunity = &self.data; let data: &EditCommunity = &self.data;
if has_slurs(&data.name) || has_slurs(&data.title) { if let Err(slurs) = slur_check(&data.name) {
return Err(APIError::err(&self.op, "no_slurs"))?; return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
} }
let conn = establish_connection(); if let Err(slurs) = slur_check(&data.title) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
if let Some(description) = &data.description {
if let Err(slurs) = slur_check(description) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
}
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
// Verify its a mod // Verify its a mod
@ -279,7 +285,7 @@ impl Perform<CommunityResponse> for Oper<EditCommunity> {
); );
editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect()); editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect());
if !editors.contains(&user_id) { if !editors.contains(&user_id) {
return Err(APIError::err(&self.op, "no_community_edit_allowed"))?; return Err(APIError::err("no_community_edit_allowed").into());
} }
let community_form = CommunityForm { let community_form = CommunityForm {
@ -296,7 +302,7 @@ impl Perform<CommunityResponse> for Oper<EditCommunity> {
let _updated_community = match Community::update(&conn, data.edit_id, &community_form) { let _updated_community = match Community::update(&conn, data.edit_id, &community_form) {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_community"))?, Err(_e) => return Err(APIError::err("couldnt_update_community").into()),
}; };
// Mod tables // Mod tables
@ -318,16 +324,14 @@ impl Perform<CommunityResponse> for Oper<EditCommunity> {
let community_view = CommunityView::read(&conn, data.edit_id, Some(user_id))?; let community_view = CommunityView::read(&conn, data.edit_id, Some(user_id))?;
Ok(CommunityResponse { Ok(CommunityResponse {
op: self.op.to_string(),
community: community_view, community: community_view,
}) })
} }
} }
impl Perform<ListCommunitiesResponse> for Oper<ListCommunities> { impl Perform<ListCommunitiesResponse> for Oper<ListCommunities> {
fn perform(&self) -> Result<ListCommunitiesResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<ListCommunitiesResponse, Error> {
let data: &ListCommunities = &self.data; let data: &ListCommunities = &self.data;
let conn = establish_connection();
let user_claims: Option<Claims> = match &data.auth { let user_claims: Option<Claims> = match &data.auth {
Some(auth) => match Claims::decode(&auth) { Some(auth) => match Claims::decode(&auth) {
@ -351,28 +355,24 @@ impl Perform<ListCommunitiesResponse> for Oper<ListCommunities> {
let communities = CommunityQueryBuilder::create(&conn) let communities = CommunityQueryBuilder::create(&conn)
.sort(&sort) .sort(&sort)
.from_user_id(user_id) .for_user(user_id)
.show_nsfw(show_nsfw) .show_nsfw(show_nsfw)
.page(data.page) .page(data.page)
.limit(data.limit) .limit(data.limit)
.list()?; .list()?;
// Return the jwt // Return the jwt
Ok(ListCommunitiesResponse { Ok(ListCommunitiesResponse { communities })
op: self.op.to_string(),
communities,
})
} }
} }
impl Perform<CommunityResponse> for Oper<FollowCommunity> { impl Perform<CommunityResponse> for Oper<FollowCommunity> {
fn perform(&self) -> Result<CommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<CommunityResponse, Error> {
let data: &FollowCommunity = &self.data; let data: &FollowCommunity = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -385,32 +385,30 @@ impl Perform<CommunityResponse> for Oper<FollowCommunity> {
if data.follow { if data.follow {
match CommunityFollower::follow(&conn, &community_follower_form) { match CommunityFollower::follow(&conn, &community_follower_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_follower_already_exists"))?, Err(_e) => return Err(APIError::err("community_follower_already_exists").into()),
}; };
} else { } else {
match CommunityFollower::ignore(&conn, &community_follower_form) { match CommunityFollower::ignore(&conn, &community_follower_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_follower_already_exists"))?, Err(_e) => return Err(APIError::err("community_follower_already_exists").into()),
}; };
} }
let community_view = CommunityView::read(&conn, data.community_id, Some(user_id))?; let community_view = CommunityView::read(&conn, data.community_id, Some(user_id))?;
Ok(CommunityResponse { Ok(CommunityResponse {
op: self.op.to_string(),
community: community_view, community: community_view,
}) })
} }
} }
impl Perform<GetFollowedCommunitiesResponse> for Oper<GetFollowedCommunities> { impl Perform<GetFollowedCommunitiesResponse> for Oper<GetFollowedCommunities> {
fn perform(&self) -> Result<GetFollowedCommunitiesResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetFollowedCommunitiesResponse, Error> {
let data: &GetFollowedCommunities = &self.data; let data: &GetFollowedCommunities = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -418,25 +416,21 @@ impl Perform<GetFollowedCommunitiesResponse> for Oper<GetFollowedCommunities> {
let communities: Vec<CommunityFollowerView> = let communities: Vec<CommunityFollowerView> =
match CommunityFollowerView::for_user(&conn, user_id) { match CommunityFollowerView::for_user(&conn, user_id) {
Ok(communities) => communities, Ok(communities) => communities,
Err(_e) => return Err(APIError::err(&self.op, "system_err_login"))?, Err(_e) => return Err(APIError::err("system_err_login").into()),
}; };
// Return the jwt // Return the jwt
Ok(GetFollowedCommunitiesResponse { Ok(GetFollowedCommunitiesResponse { communities })
op: self.op.to_string(),
communities,
})
} }
} }
impl Perform<BanFromCommunityResponse> for Oper<BanFromCommunity> { impl Perform<BanFromCommunityResponse> for Oper<BanFromCommunity> {
fn perform(&self) -> Result<BanFromCommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<BanFromCommunityResponse, Error> {
let data: &BanFromCommunity = &self.data; let data: &BanFromCommunity = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -449,12 +443,12 @@ impl Perform<BanFromCommunityResponse> for Oper<BanFromCommunity> {
if data.ban { if data.ban {
match CommunityUserBan::ban(&conn, &community_user_ban_form) { match CommunityUserBan::ban(&conn, &community_user_ban_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_user_already_banned"))?, Err(_e) => return Err(APIError::err("community_user_already_banned").into()),
}; };
} else { } else {
match CommunityUserBan::unban(&conn, &community_user_ban_form) { match CommunityUserBan::unban(&conn, &community_user_ban_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_user_already_banned"))?, Err(_e) => return Err(APIError::err("community_user_already_banned").into()),
}; };
} }
@ -477,7 +471,6 @@ impl Perform<BanFromCommunityResponse> for Oper<BanFromCommunity> {
let user_view = UserView::read(&conn, data.user_id)?; let user_view = UserView::read(&conn, data.user_id)?;
Ok(BanFromCommunityResponse { Ok(BanFromCommunityResponse {
op: self.op.to_string(),
user: user_view, user: user_view,
banned: data.ban, banned: data.ban,
}) })
@ -485,13 +478,12 @@ impl Perform<BanFromCommunityResponse> for Oper<BanFromCommunity> {
} }
impl Perform<AddModToCommunityResponse> for Oper<AddModToCommunity> { impl Perform<AddModToCommunityResponse> for Oper<AddModToCommunity> {
fn perform(&self) -> Result<AddModToCommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<AddModToCommunityResponse, Error> {
let data: &AddModToCommunity = &self.data; let data: &AddModToCommunity = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -504,22 +496,12 @@ impl Perform<AddModToCommunityResponse> for Oper<AddModToCommunity> {
if data.added { if data.added {
match CommunityModerator::join(&conn, &community_moderator_form) { match CommunityModerator::join(&conn, &community_moderator_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("community_moderator_already_exists").into()),
return Err(APIError::err(
&self.op,
"community_moderator_already_exists",
))?
}
}; };
} else { } else {
match CommunityModerator::leave(&conn, &community_moderator_form) { match CommunityModerator::leave(&conn, &community_moderator_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("community_moderator_already_exists").into()),
return Err(APIError::err(
&self.op,
"community_moderator_already_exists",
))?
}
}; };
} }
@ -534,21 +516,17 @@ impl Perform<AddModToCommunityResponse> for Oper<AddModToCommunity> {
let moderators = CommunityModeratorView::for_community(&conn, data.community_id)?; let moderators = CommunityModeratorView::for_community(&conn, data.community_id)?;
Ok(AddModToCommunityResponse { Ok(AddModToCommunityResponse { moderators })
op: self.op.to_string(),
moderators,
})
} }
} }
impl Perform<GetCommunityResponse> for Oper<TransferCommunity> { impl Perform<GetCommunityResponse> for Oper<TransferCommunity> {
fn perform(&self) -> Result<GetCommunityResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetCommunityResponse, Error> {
let data: &TransferCommunity = &self.data; let data: &TransferCommunity = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -562,14 +540,8 @@ impl Perform<GetCommunityResponse> for Oper<TransferCommunity> {
admins.insert(0, creator_user); admins.insert(0, creator_user);
// Make sure user is the creator, or an admin // Make sure user is the creator, or an admin
if user_id != read_community.creator_id if user_id != read_community.creator_id && !admins.iter().map(|a| a.id).any(|x| x == user_id) {
&& !admins return Err(APIError::err("not_an_admin").into());
.iter()
.map(|a| a.id)
.collect::<Vec<i32>>()
.contains(&user_id)
{
return Err(APIError::err(&self.op, "not_an_admin"))?;
} }
let community_form = CommunityForm { let community_form = CommunityForm {
@ -586,7 +558,7 @@ impl Perform<GetCommunityResponse> for Oper<TransferCommunity> {
let _updated_community = match Community::update(&conn, data.community_id, &community_form) { let _updated_community = match Community::update(&conn, data.community_id, &community_form) {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_community"))?, Err(_e) => return Err(APIError::err("couldnt_update_community").into()),
}; };
// You also have to re-do the community_moderator table, reordering it. // You also have to re-do the community_moderator table, reordering it.
@ -609,12 +581,7 @@ impl Perform<GetCommunityResponse> for Oper<TransferCommunity> {
let _inserted_community_moderator = let _inserted_community_moderator =
match CommunityModerator::join(&conn, &community_moderator_form) { match CommunityModerator::join(&conn, &community_moderator_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("community_moderator_already_exists").into()),
return Err(APIError::err(
&self.op,
"community_moderator_already_exists",
))?
}
}; };
} }
@ -629,20 +596,20 @@ impl Perform<GetCommunityResponse> for Oper<TransferCommunity> {
let community_view = match CommunityView::read(&conn, data.community_id, Some(user_id)) { let community_view = match CommunityView::read(&conn, data.community_id, Some(user_id)) {
Ok(community) => community, Ok(community) => community,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_community"))?, Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
}; };
let moderators = match CommunityModeratorView::for_community(&conn, data.community_id) { let moderators = match CommunityModeratorView::for_community(&conn, data.community_id) {
Ok(moderators) => moderators, Ok(moderators) => moderators,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_community"))?, Err(_e) => return Err(APIError::err("couldnt_find_community").into()),
}; };
// Return the jwt // Return the jwt
Ok(GetCommunityResponse { Ok(GetCommunityResponse {
op: self.op.to_string(),
community: community_view, community: community_view,
moderators, moderators,
admins, admins,
online: 0,
}) })
} }
} }

View file

@ -8,6 +8,8 @@ use crate::db::moderator_views::*;
use crate::db::password_reset_request::*; use crate::db::password_reset_request::*;
use crate::db::post::*; use crate::db::post::*;
use crate::db::post_view::*; use crate::db::post_view::*;
use crate::db::private_message::*;
use crate::db::private_message_view::*;
use crate::db::site::*; use crate::db::site::*;
use crate::db::site_view::*; use crate::db::site_view::*;
use crate::db::user::*; use crate::db::user::*;
@ -15,7 +17,11 @@ use crate::db::user_mention::*;
use crate::db::user_mention_view::*; use crate::db::user_mention_view::*;
use crate::db::user_view::*; use crate::db::user_view::*;
use crate::db::*; use crate::db::*;
use crate::{extract_usernames, has_slurs, naive_from_unix, naive_now, remove_slurs, Settings}; use crate::{
extract_usernames, fetch_iframely_and_pictshare_data, naive_from_unix, naive_now, remove_slurs,
slur_check, slurs_vec_to_str,
};
use diesel::PgConnection;
use failure::Error; use failure::Error;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -25,78 +31,32 @@ pub mod post;
pub mod site; pub mod site;
pub mod user; pub mod user;
#[derive(EnumString, ToString, Debug)]
pub enum UserOperation {
Login,
Register,
CreateCommunity,
CreatePost,
ListCommunities,
ListCategories,
GetPost,
GetCommunity,
CreateComment,
EditComment,
SaveComment,
CreateCommentLike,
GetPosts,
CreatePostLike,
EditPost,
SavePost,
EditCommunity,
FollowCommunity,
GetFollowedCommunities,
GetUserDetails,
GetReplies,
GetUserMentions,
EditUserMention,
GetModlog,
BanFromCommunity,
AddModToCommunity,
CreateSite,
EditSite,
GetSite,
AddAdmin,
BanUser,
Search,
MarkAllAsRead,
SaveUserSettings,
TransferCommunity,
TransferSite,
DeleteAccount,
PasswordReset,
PasswordChange,
}
#[derive(Fail, Debug)] #[derive(Fail, Debug)]
#[fail(display = "{{\"op\":\"{}\", \"error\":\"{}\"}}", op, message)] #[fail(display = "{{\"error\":\"{}\"}}", message)]
pub struct APIError { pub struct APIError {
pub op: String,
pub message: String, pub message: String,
} }
impl APIError { impl APIError {
pub fn err(op: &UserOperation, msg: &str) -> Self { pub fn err(msg: &str) -> Self {
APIError { APIError {
op: op.to_string(),
message: msg.to_string(), message: msg.to_string(),
} }
} }
} }
pub struct Oper<T> { pub struct Oper<T> {
op: UserOperation,
data: T, data: T,
} }
impl<T> Oper<T> { impl<T> Oper<T> {
pub fn new(op: UserOperation, data: T) -> Oper<T> { pub fn new(data: T) -> Oper<T> {
Oper { op, data } Oper { data }
} }
} }
pub trait Perform<T> { pub trait Perform<T> {
fn perform(&self) -> Result<T, Error> fn perform(&self, conn: &PgConnection) -> Result<T, Error>
where where
T: Sized; T: Sized;
} }

View file

@ -1,4 +1,5 @@
use super::*; use super::*;
use diesel::PgConnection;
use std::str::FromStr; use std::str::FromStr;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -7,13 +8,12 @@ pub struct CreatePost {
url: Option<String>, url: Option<String>,
body: Option<String>, body: Option<String>,
nsfw: bool, nsfw: bool,
community_id: i32, pub community_id: i32,
auth: String, auth: String,
} }
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct PostResponse { pub struct PostResponse {
op: String,
pub post: PostView, pub post: PostView,
} }
@ -25,12 +25,12 @@ pub struct GetPost {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetPostResponse { pub struct GetPostResponse {
op: String,
post: PostView, post: PostView,
comments: Vec<CommentView>, comments: Vec<CommentView>,
community: CommunityView, community: CommunityView,
moderators: Vec<CommunityModeratorView>, moderators: Vec<CommunityModeratorView>,
admins: Vec<UserView>, admins: Vec<UserView>,
pub online: usize,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -39,13 +39,12 @@ pub struct GetPosts {
sort: String, sort: String,
page: Option<i64>, page: Option<i64>,
limit: Option<i64>, limit: Option<i64>,
community_id: Option<i32>, pub community_id: Option<i32>,
auth: Option<String>, auth: Option<String>,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetPostsResponse { pub struct GetPostsResponse {
op: String,
posts: Vec<PostView>, posts: Vec<PostView>,
} }
@ -56,12 +55,6 @@ pub struct CreatePostLike {
auth: String, auth: String,
} }
#[derive(Serialize, Deserialize)]
pub struct CreatePostLikeResponse {
op: String,
post: PostView,
}
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct EditPost { pub struct EditPost {
pub edit_id: i32, pub edit_id: i32,
@ -87,31 +80,40 @@ pub struct SavePost {
} }
impl Perform<PostResponse> for Oper<CreatePost> { impl Perform<PostResponse> for Oper<CreatePost> {
fn perform(&self) -> Result<PostResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<PostResponse, Error> {
let data: &CreatePost = &self.data; let data: &CreatePost = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
if has_slurs(&data.name) || (data.body.is_some() && has_slurs(&data.body.to_owned().unwrap())) { if let Err(slurs) = slur_check(&data.name) {
return Err(APIError::err(&self.op, "no_slurs"))?; return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
if let Some(body) = &data.body {
if let Err(slurs) = slur_check(body) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
} }
let user_id = claims.id; let user_id = claims.id;
// Check for a community ban // Check for a community ban
if CommunityUserBanView::get(&conn, user_id, data.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, data.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
// Fetch Iframely and Pictshare cached image
let (iframely_title, iframely_description, iframely_html, pictshare_thumbnail) =
fetch_iframely_and_pictshare_data(data.url.to_owned());
let post_form = PostForm { let post_form = PostForm {
name: data.name.to_owned(), name: data.name.to_owned(),
url: data.url.to_owned(), url: data.url.to_owned(),
@ -124,11 +126,23 @@ impl Perform<PostResponse> for Oper<CreatePost> {
locked: None, locked: None,
stickied: None, stickied: None,
updated: None, updated: None,
embed_title: iframely_title,
embed_description: iframely_description,
embed_html: iframely_html,
thumbnail_url: pictshare_thumbnail,
}; };
let inserted_post = match Post::create(&conn, &post_form) { let inserted_post = match Post::create(&conn, &post_form) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_create_post"))?, Err(e) => {
let err_type = if e.to_string() == "value too long for type character varying(200)" {
"post_title_too_long"
} else {
"couldnt_create_post"
};
return Err(APIError::err(err_type).into());
}
}; };
// They like their own post by default // They like their own post by default
@ -141,26 +155,22 @@ impl Perform<PostResponse> for Oper<CreatePost> {
// Only add the like if the score isnt 0 // Only add the like if the score isnt 0
let _inserted_like = match PostLike::like(&conn, &like_form) { let _inserted_like = match PostLike::like(&conn, &like_form) {
Ok(like) => like, Ok(like) => like,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_like_post"))?, Err(_e) => return Err(APIError::err("couldnt_like_post").into()),
}; };
// Refetch the view // Refetch the view
let post_view = match PostView::read(&conn, inserted_post.id, Some(user_id)) { let post_view = match PostView::read(&conn, inserted_post.id, Some(user_id)) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_post"))?, Err(_e) => return Err(APIError::err("couldnt_find_post").into()),
}; };
Ok(PostResponse { Ok(PostResponse { post: post_view })
op: self.op.to_string(),
post: post_view,
})
} }
} }
impl Perform<GetPostResponse> for Oper<GetPost> { impl Perform<GetPostResponse> for Oper<GetPost> {
fn perform(&self) -> Result<GetPostResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetPostResponse, Error> {
let data: &GetPost = &self.data; let data: &GetPost = &self.data;
let conn = establish_connection();
let user_id: Option<i32> = match &data.auth { let user_id: Option<i32> = match &data.auth {
Some(auth) => match Claims::decode(&auth) { Some(auth) => match Claims::decode(&auth) {
@ -175,7 +185,7 @@ impl Perform<GetPostResponse> for Oper<GetPost> {
let post_view = match PostView::read(&conn, data.id, user_id) { let post_view = match PostView::read(&conn, data.id, user_id) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_post"))?, Err(_e) => return Err(APIError::err("couldnt_find_post").into()),
}; };
let comments = CommentQueryBuilder::create(&conn) let comments = CommentQueryBuilder::create(&conn)
@ -196,20 +206,19 @@ impl Perform<GetPostResponse> for Oper<GetPost> {
// Return the jwt // Return the jwt
Ok(GetPostResponse { Ok(GetPostResponse {
op: self.op.to_string(),
post: post_view, post: post_view,
comments, comments,
community, community,
moderators, moderators,
admins, admins,
online: 0,
}) })
} }
} }
impl Perform<GetPostsResponse> for Oper<GetPosts> { impl Perform<GetPostsResponse> for Oper<GetPosts> {
fn perform(&self) -> Result<GetPostsResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetPostsResponse, Error> {
let data: &GetPosts = &self.data; let data: &GetPosts = &self.data;
let conn = establish_connection();
let user_claims: Option<Claims> = match &data.auth { let user_claims: Option<Claims> = match &data.auth {
Some(auth) => match Claims::decode(&auth) { Some(auth) => match Claims::decode(&auth) {
@ -243,24 +252,20 @@ impl Perform<GetPostsResponse> for Oper<GetPosts> {
.list() .list()
{ {
Ok(posts) => posts, Ok(posts) => posts,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_get_posts"))?, Err(_e) => return Err(APIError::err("couldnt_get_posts").into()),
}; };
Ok(GetPostsResponse { Ok(GetPostsResponse { posts })
op: self.op.to_string(),
posts,
})
} }
} }
impl Perform<CreatePostLikeResponse> for Oper<CreatePostLike> { impl Perform<PostResponse> for Oper<CreatePostLike> {
fn perform(&self) -> Result<CreatePostLikeResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<PostResponse, Error> {
let data: &CreatePostLike = &self.data; let data: &CreatePostLike = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -268,20 +273,20 @@ impl Perform<CreatePostLikeResponse> for Oper<CreatePostLike> {
// Don't do a downvote if site has downvotes disabled // Don't do a downvote if site has downvotes disabled
if data.score == -1 { if data.score == -1 {
let site = SiteView::read(&conn)?; let site = SiteView::read(&conn)?;
if site.enable_downvotes == false { if !site.enable_downvotes {
return Err(APIError::err(&self.op, "downvotes_disabled"))?; return Err(APIError::err("downvotes_disabled").into());
} }
} }
// Check for a community ban // Check for a community ban
let post = Post::read(&conn, data.post_id)?; let post = Post::read(&conn, data.post_id)?;
if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, post.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
let like_form = PostLikeForm { let like_form = PostLikeForm {
@ -294,39 +299,41 @@ impl Perform<CreatePostLikeResponse> for Oper<CreatePostLike> {
PostLike::remove(&conn, &like_form)?; PostLike::remove(&conn, &like_form)?;
// Only add the like if the score isnt 0 // Only add the like if the score isnt 0
let do_add = &like_form.score != &0 && (&like_form.score == &1 || &like_form.score == &-1); let do_add = like_form.score != 0 && (like_form.score == 1 || like_form.score == -1);
if do_add { if do_add {
let _inserted_like = match PostLike::like(&conn, &like_form) { let _inserted_like = match PostLike::like(&conn, &like_form) {
Ok(like) => like, Ok(like) => like,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_like_post"))?, Err(_e) => return Err(APIError::err("couldnt_like_post").into()),
}; };
} }
let post_view = match PostView::read(&conn, data.post_id, Some(user_id)) { let post_view = match PostView::read(&conn, data.post_id, Some(user_id)) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_find_post"))?, Err(_e) => return Err(APIError::err("couldnt_find_post").into()),
}; };
// just output the score // just output the score
Ok(CreatePostLikeResponse { Ok(PostResponse { post: post_view })
op: self.op.to_string(),
post: post_view,
})
} }
} }
impl Perform<PostResponse> for Oper<EditPost> { impl Perform<PostResponse> for Oper<EditPost> {
fn perform(&self) -> Result<PostResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<PostResponse, Error> {
let data: &EditPost = &self.data; let data: &EditPost = &self.data;
if has_slurs(&data.name) || (data.body.is_some() && has_slurs(&data.body.to_owned().unwrap())) {
return Err(APIError::err(&self.op, "no_slurs"))?; if let Err(slurs) = slur_check(&data.name) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
} }
let conn = establish_connection(); if let Some(body) = &data.body {
if let Err(slurs) = slur_check(body) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
}
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -341,19 +348,23 @@ impl Perform<PostResponse> for Oper<EditPost> {
); );
editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect()); editors.append(&mut UserView::admins(&conn)?.into_iter().map(|a| a.id).collect());
if !editors.contains(&user_id) { if !editors.contains(&user_id) {
return Err(APIError::err(&self.op, "no_post_edit_allowed"))?; return Err(APIError::err("no_post_edit_allowed").into());
} }
// Check for a community ban // Check for a community ban
if CommunityUserBanView::get(&conn, user_id, data.community_id).is_ok() { if CommunityUserBanView::get(&conn, user_id, data.community_id).is_ok() {
return Err(APIError::err(&self.op, "community_ban"))?; return Err(APIError::err("community_ban").into());
} }
// Check for a site ban // Check for a site ban
if UserView::read(&conn, user_id)?.banned { if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err(&self.op, "site_ban"))?; return Err(APIError::err("site_ban").into());
} }
// Fetch Iframely and Pictshare cached image
let (iframely_title, iframely_description, iframely_html, pictshare_thumbnail) =
fetch_iframely_and_pictshare_data(data.url.to_owned());
let post_form = PostForm { let post_form = PostForm {
name: data.name.to_owned(), name: data.name.to_owned(),
url: data.url.to_owned(), url: data.url.to_owned(),
@ -366,11 +377,23 @@ impl Perform<PostResponse> for Oper<EditPost> {
locked: data.locked.to_owned(), locked: data.locked.to_owned(),
stickied: data.stickied.to_owned(), stickied: data.stickied.to_owned(),
updated: Some(naive_now()), updated: Some(naive_now()),
embed_title: iframely_title,
embed_description: iframely_description,
embed_html: iframely_html,
thumbnail_url: pictshare_thumbnail,
}; };
let _updated_post = match Post::update(&conn, data.edit_id, &post_form) { let _updated_post = match Post::update(&conn, data.edit_id, &post_form) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_post"))?, Err(e) => {
let err_type = if e.to_string() == "value too long for type character varying(200)" {
"post_title_too_long"
} else {
"couldnt_update_post"
};
return Err(APIError::err(err_type).into());
}
}; };
// Mod tables // Mod tables
@ -404,21 +427,17 @@ impl Perform<PostResponse> for Oper<EditPost> {
let post_view = PostView::read(&conn, data.edit_id, Some(user_id))?; let post_view = PostView::read(&conn, data.edit_id, Some(user_id))?;
Ok(PostResponse { Ok(PostResponse { post: post_view })
op: self.op.to_string(),
post: post_view,
})
} }
} }
impl Perform<PostResponse> for Oper<SavePost> { impl Perform<PostResponse> for Oper<SavePost> {
fn perform(&self) -> Result<PostResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<PostResponse, Error> {
let data: &SavePost = &self.data; let data: &SavePost = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -431,20 +450,17 @@ impl Perform<PostResponse> for Oper<SavePost> {
if data.save { if data.save {
match PostSaved::save(&conn, &post_saved_form) { match PostSaved::save(&conn, &post_saved_form) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_save_post"))?, Err(_e) => return Err(APIError::err("couldnt_save_post").into()),
}; };
} else { } else {
match PostSaved::unsave(&conn, &post_saved_form) { match PostSaved::unsave(&conn, &post_saved_form) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_save_post"))?, Err(_e) => return Err(APIError::err("couldnt_save_post").into()),
}; };
} }
let post_view = PostView::read(&conn, data.post_id, Some(user_id))?; let post_view = PostView::read(&conn, data.post_id, Some(user_id))?;
Ok(PostResponse { Ok(PostResponse { post: post_view })
op: self.op.to_string(),
post: post_view,
})
} }
} }

View file

@ -1,12 +1,12 @@
use super::*; use super::*;
use diesel::PgConnection;
use std::str::FromStr; use std::str::FromStr;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct ListCategories; pub struct ListCategories {}
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct ListCategoriesResponse { pub struct ListCategoriesResponse {
op: String,
categories: Vec<Category>, categories: Vec<Category>,
} }
@ -18,11 +18,11 @@ pub struct Search {
sort: String, sort: String,
page: Option<i64>, page: Option<i64>,
limit: Option<i64>, limit: Option<i64>,
auth: Option<String>,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct SearchResponse { pub struct SearchResponse {
op: String,
type_: String, type_: String,
comments: Vec<CommentView>, comments: Vec<CommentView>,
posts: Vec<PostView>, posts: Vec<PostView>,
@ -40,7 +40,6 @@ pub struct GetModlog {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetModlogResponse { pub struct GetModlogResponse {
op: String,
removed_posts: Vec<ModRemovePostView>, removed_posts: Vec<ModRemovePostView>,
locked_posts: Vec<ModLockPostView>, locked_posts: Vec<ModLockPostView>,
stickied_posts: Vec<ModStickyPostView>, stickied_posts: Vec<ModStickyPostView>,
@ -73,17 +72,15 @@ pub struct EditSite {
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetSite; pub struct GetSite {}
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct SiteResponse { pub struct SiteResponse {
op: String,
site: SiteView, site: SiteView,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetSiteResponse { pub struct GetSiteResponse {
op: String,
site: Option<SiteView>, site: Option<SiteView>,
admins: Vec<UserView>, admins: Vec<UserView>,
banned: Vec<UserView>, banned: Vec<UserView>,
@ -97,24 +94,19 @@ pub struct TransferSite {
} }
impl Perform<ListCategoriesResponse> for Oper<ListCategories> { impl Perform<ListCategoriesResponse> for Oper<ListCategories> {
fn perform(&self) -> Result<ListCategoriesResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<ListCategoriesResponse, Error> {
let _data: &ListCategories = &self.data; let _data: &ListCategories = &self.data;
let conn = establish_connection();
let categories: Vec<Category> = Category::list_all(&conn)?; let categories: Vec<Category> = Category::list_all(&conn)?;
// Return the jwt // Return the jwt
Ok(ListCategoriesResponse { Ok(ListCategoriesResponse { categories })
op: self.op.to_string(),
categories,
})
} }
} }
impl Perform<GetModlogResponse> for Oper<GetModlog> { impl Perform<GetModlogResponse> for Oper<GetModlog> {
fn perform(&self) -> Result<GetModlogResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetModlogResponse, Error> {
let data: &GetModlog = &self.data; let data: &GetModlog = &self.data;
let conn = establish_connection();
let removed_posts = ModRemovePostView::list( let removed_posts = ModRemovePostView::list(
&conn, &conn,
@ -160,20 +152,18 @@ impl Perform<GetModlogResponse> for Oper<GetModlog> {
)?; )?;
// These arrays are only for the full modlog, when a community isn't given // These arrays are only for the full modlog, when a community isn't given
let mut removed_communities = Vec::new(); let (removed_communities, banned, added) = if data.community_id.is_none() {
let mut banned = Vec::new(); (
let mut added = Vec::new(); ModRemoveCommunityView::list(&conn, data.mod_user_id, data.page, data.limit)?,
ModBanView::list(&conn, data.mod_user_id, data.page, data.limit)?,
if data.community_id.is_none() { ModAddView::list(&conn, data.mod_user_id, data.page, data.limit)?,
removed_communities = )
ModRemoveCommunityView::list(&conn, data.mod_user_id, data.page, data.limit)?; } else {
banned = ModBanView::list(&conn, data.mod_user_id, data.page, data.limit)?; (Vec::new(), Vec::new(), Vec::new())
added = ModAddView::list(&conn, data.mod_user_id, data.page, data.limit)?; };
}
// Return the jwt // Return the jwt
Ok(GetModlogResponse { Ok(GetModlogResponse {
op: self.op.to_string(),
removed_posts, removed_posts,
locked_posts, locked_posts,
stickied_posts, stickied_posts,
@ -188,26 +178,29 @@ impl Perform<GetModlogResponse> for Oper<GetModlog> {
} }
impl Perform<SiteResponse> for Oper<CreateSite> { impl Perform<SiteResponse> for Oper<CreateSite> {
fn perform(&self) -> Result<SiteResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<SiteResponse, Error> {
let data: &CreateSite = &self.data; let data: &CreateSite = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
if has_slurs(&data.name) if let Err(slurs) = slur_check(&data.name) {
|| (data.description.is_some() && has_slurs(&data.description.to_owned().unwrap())) return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
{ }
return Err(APIError::err(&self.op, "no_slurs"))?;
if let Some(description) = &data.description {
if let Err(slurs) = slur_check(description) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
} }
let user_id = claims.id; let user_id = claims.id;
// Make sure user is an admin // Make sure user is an admin
if !UserView::read(&conn, user_id)?.admin { if !UserView::read(&conn, user_id)?.admin {
return Err(APIError::err(&self.op, "not_an_admin"))?; return Err(APIError::err("not_an_admin").into());
} }
let site_form = SiteForm { let site_form = SiteForm {
@ -222,39 +215,39 @@ impl Perform<SiteResponse> for Oper<CreateSite> {
match Site::create(&conn, &site_form) { match Site::create(&conn, &site_form) {
Ok(site) => site, Ok(site) => site,
Err(_e) => return Err(APIError::err(&self.op, "site_already_exists"))?, Err(_e) => return Err(APIError::err("site_already_exists").into()),
}; };
let site_view = SiteView::read(&conn)?; let site_view = SiteView::read(&conn)?;
Ok(SiteResponse { Ok(SiteResponse { site: site_view })
op: self.op.to_string(),
site: site_view,
})
} }
} }
impl Perform<SiteResponse> for Oper<EditSite> { impl Perform<SiteResponse> for Oper<EditSite> {
fn perform(&self) -> Result<SiteResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<SiteResponse, Error> {
let data: &EditSite = &self.data; let data: &EditSite = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
if has_slurs(&data.name) if let Err(slurs) = slur_check(&data.name) {
|| (data.description.is_some() && has_slurs(&data.description.to_owned().unwrap())) return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
{ }
return Err(APIError::err(&self.op, "no_slurs"))?;
if let Some(description) = &data.description {
if let Err(slurs) = slur_check(description) {
return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
}
} }
let user_id = claims.id; let user_id = claims.id;
// Make sure user is an admin // Make sure user is an admin
if UserView::read(&conn, user_id)?.admin == false { if !UserView::read(&conn, user_id)?.admin {
return Err(APIError::err(&self.op, "not_an_admin"))?; return Err(APIError::err("not_an_admin").into());
} }
let found_site = Site::read(&conn, 1)?; let found_site = Site::read(&conn, 1)?;
@ -271,22 +264,18 @@ impl Perform<SiteResponse> for Oper<EditSite> {
match Site::update(&conn, 1, &site_form) { match Site::update(&conn, 1, &site_form) {
Ok(site) => site, Ok(site) => site,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_site"))?, Err(_e) => return Err(APIError::err("couldnt_update_site").into()),
}; };
let site_view = SiteView::read(&conn)?; let site_view = SiteView::read(&conn)?;
Ok(SiteResponse { Ok(SiteResponse { site: site_view })
op: self.op.to_string(),
site: site_view,
})
} }
} }
impl Perform<GetSiteResponse> for Oper<GetSite> { impl Perform<GetSiteResponse> for Oper<GetSite> {
fn perform(&self) -> Result<GetSiteResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetSiteResponse, Error> {
let _data: &GetSite = &self.data; let _data: &GetSite = &self.data;
let conn = establish_connection();
// It can return a null site in order to redirect // It can return a null site in order to redirect
let site_view = match Site::read(&conn, 1) { let site_view = match Site::read(&conn, 1) {
@ -305,7 +294,6 @@ impl Perform<GetSiteResponse> for Oper<GetSite> {
let banned = UserView::banned(&conn)?; let banned = UserView::banned(&conn)?;
Ok(GetSiteResponse { Ok(GetSiteResponse {
op: self.op.to_string(),
site: site_view, site: site_view,
admins, admins,
banned, banned,
@ -315,9 +303,19 @@ impl Perform<GetSiteResponse> for Oper<GetSite> {
} }
impl Perform<SearchResponse> for Oper<Search> { impl Perform<SearchResponse> for Oper<Search> {
fn perform(&self) -> Result<SearchResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<SearchResponse, Error> {
let data: &Search = &self.data; let data: &Search = &self.data;
let conn = establish_connection();
let user_id: Option<i32> = match &data.auth {
Some(auth) => match Claims::decode(&auth) {
Ok(claims) => {
let user_id = claims.claims.id;
Some(user_id)
}
Err(_e) => None,
},
None => None,
};
let sort = SortType::from_str(&data.sort)?; let sort = SortType::from_str(&data.sort)?;
let type_ = SearchType::from_str(&data.type_)?; let type_ = SearchType::from_str(&data.type_)?;
@ -336,6 +334,7 @@ impl Perform<SearchResponse> for Oper<Search> {
.show_nsfw(true) .show_nsfw(true)
.for_community_id(data.community_id) .for_community_id(data.community_id)
.search_term(data.q.to_owned()) .search_term(data.q.to_owned())
.my_user_id(user_id)
.page(data.page) .page(data.page)
.limit(data.limit) .limit(data.limit)
.list()?; .list()?;
@ -344,6 +343,7 @@ impl Perform<SearchResponse> for Oper<Search> {
comments = CommentQueryBuilder::create(&conn) comments = CommentQueryBuilder::create(&conn)
.sort(&sort) .sort(&sort)
.search_term(data.q.to_owned()) .search_term(data.q.to_owned())
.my_user_id(user_id)
.page(data.page) .page(data.page)
.limit(data.limit) .limit(data.limit)
.list()?; .list()?;
@ -370,6 +370,7 @@ impl Perform<SearchResponse> for Oper<Search> {
.show_nsfw(true) .show_nsfw(true)
.for_community_id(data.community_id) .for_community_id(data.community_id)
.search_term(data.q.to_owned()) .search_term(data.q.to_owned())
.my_user_id(user_id)
.page(data.page) .page(data.page)
.limit(data.limit) .limit(data.limit)
.list()?; .list()?;
@ -377,6 +378,7 @@ impl Perform<SearchResponse> for Oper<Search> {
comments = CommentQueryBuilder::create(&conn) comments = CommentQueryBuilder::create(&conn)
.sort(&sort) .sort(&sort)
.search_term(data.q.to_owned()) .search_term(data.q.to_owned())
.my_user_id(user_id)
.page(data.page) .page(data.page)
.limit(data.limit) .limit(data.limit)
.list()?; .list()?;
@ -409,7 +411,6 @@ impl Perform<SearchResponse> for Oper<Search> {
// Return the jwt // Return the jwt
Ok(SearchResponse { Ok(SearchResponse {
op: self.op.to_string(),
type_: data.type_.to_owned(), type_: data.type_.to_owned(),
comments, comments,
posts, posts,
@ -420,13 +421,12 @@ impl Perform<SearchResponse> for Oper<Search> {
} }
impl Perform<GetSiteResponse> for Oper<TransferSite> { impl Perform<GetSiteResponse> for Oper<TransferSite> {
fn perform(&self) -> Result<GetSiteResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetSiteResponse, Error> {
let data: &TransferSite = &self.data; let data: &TransferSite = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -435,7 +435,7 @@ impl Perform<GetSiteResponse> for Oper<TransferSite> {
// Make sure user is the creator // Make sure user is the creator
if read_site.creator_id != user_id { if read_site.creator_id != user_id {
return Err(APIError::err(&self.op, "not_an_admin"))?; return Err(APIError::err("not_an_admin").into());
} }
let site_form = SiteForm { let site_form = SiteForm {
@ -450,7 +450,7 @@ impl Perform<GetSiteResponse> for Oper<TransferSite> {
match Site::update(&conn, 1, &site_form) { match Site::update(&conn, 1, &site_form) {
Ok(site) => site, Ok(site) => site,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_site"))?, Err(_e) => return Err(APIError::err("couldnt_update_site").into()),
}; };
// Mod tables // Mod tables
@ -475,7 +475,6 @@ impl Perform<GetSiteResponse> for Oper<TransferSite> {
let banned = UserView::banned(&conn)?; let banned = UserView::banned(&conn)?;
Ok(GetSiteResponse { Ok(GetSiteResponse {
op: self.op.to_string(),
site: Some(site_view), site: Some(site_view),
admins, admins,
banned, banned,

View file

@ -1,6 +1,8 @@
use super::*; use super::*;
use crate::settings::Settings;
use crate::{generate_random_string, send_email}; use crate::{generate_random_string, send_email};
use bcrypt::verify; use bcrypt::verify;
use diesel::PgConnection;
use std::str::FromStr; use std::str::FromStr;
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
@ -26,12 +28,19 @@ pub struct SaveUserSettings {
default_sort_type: i16, default_sort_type: i16,
default_listing_type: i16, default_listing_type: i16,
lang: String, lang: String,
avatar: Option<String>,
email: Option<String>,
matrix_user_id: Option<String>,
new_password: Option<String>,
new_password_verify: Option<String>,
old_password: Option<String>,
show_avatars: bool,
send_notifications_to_email: bool,
auth: String, auth: String,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct LoginResponse { pub struct LoginResponse {
op: String,
jwt: String, jwt: String,
} }
@ -49,7 +58,6 @@ pub struct GetUserDetails {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetUserDetailsResponse { pub struct GetUserDetailsResponse {
op: String,
user: UserView, user: UserView,
follows: Vec<CommunityFollowerView>, follows: Vec<CommunityFollowerView>,
moderates: Vec<CommunityModeratorView>, moderates: Vec<CommunityModeratorView>,
@ -60,13 +68,11 @@ pub struct GetUserDetailsResponse {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetRepliesResponse { pub struct GetRepliesResponse {
op: String,
replies: Vec<ReplyView>, replies: Vec<ReplyView>,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct GetUserMentionsResponse { pub struct GetUserMentionsResponse {
op: String,
mentions: Vec<UserMentionView>, mentions: Vec<UserMentionView>,
} }
@ -84,7 +90,6 @@ pub struct AddAdmin {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct AddAdminResponse { pub struct AddAdminResponse {
op: String,
admins: Vec<UserView>, admins: Vec<UserView>,
} }
@ -99,7 +104,6 @@ pub struct BanUser {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct BanUserResponse { pub struct BanUserResponse {
op: String,
user: UserView, user: UserView,
banned: bool, banned: bool,
} }
@ -131,7 +135,6 @@ pub struct EditUserMention {
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct UserMentionResponse { pub struct UserMentionResponse {
op: String,
mention: UserMentionView, mention: UserMentionView,
} }
@ -147,9 +150,7 @@ pub struct PasswordReset {
} }
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct PasswordResetResponse { pub struct PasswordResetResponse {}
op: String,
}
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct PasswordChange { pub struct PasswordChange {
@ -158,67 +159,103 @@ pub struct PasswordChange {
password_verify: String, password_verify: String,
} }
#[derive(Serialize, Deserialize)]
pub struct CreatePrivateMessage {
content: String,
pub recipient_id: i32,
auth: String,
}
#[derive(Serialize, Deserialize)]
pub struct EditPrivateMessage {
edit_id: i32,
content: Option<String>,
deleted: Option<bool>,
read: Option<bool>,
auth: String,
}
#[derive(Serialize, Deserialize)]
pub struct GetPrivateMessages {
unread_only: bool,
page: Option<i64>,
limit: Option<i64>,
auth: String,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct PrivateMessagesResponse {
messages: Vec<PrivateMessageView>,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct PrivateMessageResponse {
message: PrivateMessageView,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct UserJoin {
auth: String,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct UserJoinResponse {
pub user_id: i32,
}
impl Perform<LoginResponse> for Oper<Login> { impl Perform<LoginResponse> for Oper<Login> {
fn perform(&self) -> Result<LoginResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<LoginResponse, Error> {
let data: &Login = &self.data; let data: &Login = &self.data;
let conn = establish_connection();
// Fetch that username / email // Fetch that username / email
let user: User_ = match User_::find_by_email_or_username(&conn, &data.username_or_email) { let user: User_ = match User_::find_by_email_or_username(&conn, &data.username_or_email) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("couldnt_find_that_username_or_email").into()),
return Err(APIError::err(
&self.op,
"couldnt_find_that_username_or_email",
))?
}
}; };
// Verify the password // Verify the password
let valid: bool = verify(&data.password, &user.password_encrypted).unwrap_or(false); let valid: bool = verify(&data.password, &user.password_encrypted).unwrap_or(false);
if !valid { if !valid {
return Err(APIError::err(&self.op, "password_incorrect"))?; return Err(APIError::err("password_incorrect").into());
} }
// Return the jwt // Return the jwt
Ok(LoginResponse { Ok(LoginResponse { jwt: user.jwt() })
op: self.op.to_string(),
jwt: user.jwt(),
})
} }
} }
impl Perform<LoginResponse> for Oper<Register> { impl Perform<LoginResponse> for Oper<Register> {
fn perform(&self) -> Result<LoginResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<LoginResponse, Error> {
let data: &Register = &self.data; let data: &Register = &self.data;
let conn = establish_connection();
// Make sure site has open registration // Make sure site has open registration
if let Ok(site) = SiteView::read(&conn) { if let Ok(site) = SiteView::read(&conn) {
if !site.open_registration { if !site.open_registration {
return Err(APIError::err(&self.op, "registration_closed"))?; return Err(APIError::err("registration_closed").into());
} }
} }
// Make sure passwords match // Make sure passwords match
if &data.password != &data.password_verify { if data.password != data.password_verify {
return Err(APIError::err(&self.op, "passwords_dont_match"))?; return Err(APIError::err("passwords_dont_match").into());
} }
if has_slurs(&data.username) { if let Err(slurs) = slur_check(&data.username) {
return Err(APIError::err(&self.op, "no_slurs"))?; return Err(APIError::err(&slurs_vec_to_str(slurs)).into());
} }
// Make sure there are no admins // Make sure there are no admins
if data.admin && UserView::admins(&conn)?.len() > 0 { if data.admin && !UserView::admins(&conn)?.is_empty() {
return Err(APIError::err(&self.op, "admin_already_created"))?; return Err(APIError::err("admin_already_created").into());
} }
// Register the new user // Register the new user
let user_form = UserForm { let user_form = UserForm {
name: data.username.to_owned(), name: data.username.to_owned(),
fedi_name: Settings::get().hostname.into(), fedi_name: Settings::get().hostname.to_owned(),
email: data.email.to_owned(), email: data.email.to_owned(),
matrix_user_id: None,
avatar: None,
password_encrypted: data.password.to_owned(), password_encrypted: data.password.to_owned(),
preferred_username: None, preferred_username: None,
updated: None, updated: None,
@ -229,12 +266,24 @@ impl Perform<LoginResponse> for Oper<Register> {
default_sort_type: SortType::Hot as i16, default_sort_type: SortType::Hot as i16,
default_listing_type: ListingType::Subscribed as i16, default_listing_type: ListingType::Subscribed as i16,
lang: "browser".into(), lang: "browser".into(),
show_avatars: true,
send_notifications_to_email: false,
}; };
// Create the user // Create the user
let inserted_user = match User_::register(&conn, &user_form) { let inserted_user = match User_::register(&conn, &user_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "user_already_exists"))?, Err(e) => {
let err_type = if e.to_string()
== "duplicate key value violates unique constraint \"user__email_key\""
{
"email_already_exists"
} else {
"user_already_exists"
};
return Err(APIError::err(err_type).into());
}
}; };
// Create the main community if it doesn't exist // Create the main community if it doesn't exist
@ -265,7 +314,7 @@ impl Perform<LoginResponse> for Oper<Register> {
let _inserted_community_follower = let _inserted_community_follower =
match CommunityFollower::follow(&conn, &community_follower_form) { match CommunityFollower::follow(&conn, &community_follower_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "community_follower_already_exists"))?, Err(_e) => return Err(APIError::err("community_follower_already_exists").into()),
}; };
// If its an admin, add them as a mod and follower to main // If its an admin, add them as a mod and follower to main
@ -278,42 +327,70 @@ impl Perform<LoginResponse> for Oper<Register> {
let _inserted_community_moderator = let _inserted_community_moderator =
match CommunityModerator::join(&conn, &community_moderator_form) { match CommunityModerator::join(&conn, &community_moderator_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("community_moderator_already_exists").into()),
return Err(APIError::err(
&self.op,
"community_moderator_already_exists",
))?
}
}; };
} }
// Return the jwt // Return the jwt
Ok(LoginResponse { Ok(LoginResponse {
op: self.op.to_string(),
jwt: inserted_user.jwt(), jwt: inserted_user.jwt(),
}) })
} }
} }
impl Perform<LoginResponse> for Oper<SaveUserSettings> { impl Perform<LoginResponse> for Oper<SaveUserSettings> {
fn perform(&self) -> Result<LoginResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<LoginResponse, Error> {
let data: &SaveUserSettings = &self.data; let data: &SaveUserSettings = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
let read_user = User_::read(&conn, user_id)?; let read_user = User_::read(&conn, user_id)?;
let email = match &data.email {
Some(email) => Some(email.to_owned()),
None => read_user.email,
};
let password_encrypted = match &data.new_password {
Some(new_password) => {
match &data.new_password_verify {
Some(new_password_verify) => {
// Make sure passwords match
if new_password != new_password_verify {
return Err(APIError::err("passwords_dont_match").into());
}
// Check the old password
match &data.old_password {
Some(old_password) => {
let valid: bool =
verify(old_password, &read_user.password_encrypted).unwrap_or(false);
if !valid {
return Err(APIError::err("password_incorrect").into());
}
User_::update_password(&conn, user_id, &new_password)?.password_encrypted
}
None => return Err(APIError::err("password_incorrect").into()),
}
}
None => return Err(APIError::err("passwords_dont_match").into()),
}
}
None => read_user.password_encrypted,
};
let user_form = UserForm { let user_form = UserForm {
name: read_user.name, name: read_user.name,
fedi_name: read_user.fedi_name, fedi_name: read_user.fedi_name,
email: read_user.email, email,
password_encrypted: read_user.password_encrypted, matrix_user_id: data.matrix_user_id.to_owned(),
avatar: data.avatar.to_owned(),
password_encrypted,
preferred_username: read_user.preferred_username, preferred_username: read_user.preferred_username,
updated: Some(naive_now()), updated: Some(naive_now()),
admin: read_user.admin, admin: read_user.admin,
@ -323,25 +400,35 @@ impl Perform<LoginResponse> for Oper<SaveUserSettings> {
default_sort_type: data.default_sort_type, default_sort_type: data.default_sort_type,
default_listing_type: data.default_listing_type, default_listing_type: data.default_listing_type,
lang: data.lang.to_owned(), lang: data.lang.to_owned(),
show_avatars: data.show_avatars,
send_notifications_to_email: data.send_notifications_to_email,
}; };
let updated_user = match User_::update(&conn, user_id, &user_form) { let updated_user = match User_::update(&conn, user_id, &user_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_user"))?, Err(e) => {
let err_type = if e.to_string()
== "duplicate key value violates unique constraint \"user__email_key\""
{
"email_already_exists"
} else {
"user_already_exists"
};
return Err(APIError::err(err_type).into());
}
}; };
// Return the jwt // Return the jwt
Ok(LoginResponse { Ok(LoginResponse {
op: self.op.to_string(),
jwt: updated_user.jwt(), jwt: updated_user.jwt(),
}) })
} }
} }
impl Perform<GetUserDetailsResponse> for Oper<GetUserDetails> { impl Perform<GetUserDetailsResponse> for Oper<GetUserDetails> {
fn perform(&self) -> Result<GetUserDetailsResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetUserDetailsResponse, Error> {
let data: &GetUserDetails = &self.data; let data: &GetUserDetails = &self.data;
let conn = establish_connection();
let user_claims: Option<Claims> = match &data.auth { let user_claims: Option<Claims> = match &data.auth {
Some(auth) => match Claims::decode(&auth) { Some(auth) => match Claims::decode(&auth) {
@ -366,15 +453,20 @@ impl Perform<GetUserDetailsResponse> for Oper<GetUserDetails> {
let user_details_id = match data.user_id { let user_details_id = match data.user_id {
Some(id) => id, Some(id) => id,
None => { None => {
User_::read_from_name( match User_::read_from_name(
&conn, &conn,
data.username.to_owned().unwrap_or("admin".to_string()), data
)? .username
.id .to_owned()
.unwrap_or_else(|| "admin".to_string()),
) {
Ok(user) => user.id,
Err(_e) => return Err(APIError::err("couldnt_find_that_username_or_email").into()),
}
} }
}; };
let user_view = UserView::read(&conn, user_details_id)?; let mut user_view = UserView::read(&conn, user_details_id)?;
let mut posts_query = PostQueryBuilder::create(&conn) let mut posts_query = PostQueryBuilder::create(&conn)
.sort(&sort) .sort(&sort)
@ -410,9 +502,17 @@ impl Perform<GetUserDetailsResponse> for Oper<GetUserDetails> {
let creator_user = admins.remove(creator_index); let creator_user = admins.remove(creator_index);
admins.insert(0, creator_user); admins.insert(0, creator_user);
// If its not the same user, remove the email
if let Some(user_id) = user_id {
if user_details_id != user_id {
user_view.email = None;
}
} else {
user_view.email = None;
}
// Return the jwt // Return the jwt
Ok(GetUserDetailsResponse { Ok(GetUserDetailsResponse {
op: self.op.to_string(),
user: user_view, user: user_view,
follows, follows,
moderates, moderates,
@ -424,28 +524,30 @@ impl Perform<GetUserDetailsResponse> for Oper<GetUserDetails> {
} }
impl Perform<AddAdminResponse> for Oper<AddAdmin> { impl Perform<AddAdminResponse> for Oper<AddAdmin> {
fn perform(&self) -> Result<AddAdminResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<AddAdminResponse, Error> {
let data: &AddAdmin = &self.data; let data: &AddAdmin = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
// Make sure user is an admin // Make sure user is an admin
if UserView::read(&conn, user_id)?.admin == false { if !UserView::read(&conn, user_id)?.admin {
return Err(APIError::err(&self.op, "not_an_admin"))?; return Err(APIError::err("not_an_admin").into());
} }
let read_user = User_::read(&conn, data.user_id)?; let read_user = User_::read(&conn, data.user_id)?;
// TODO make addadmin easier
let user_form = UserForm { let user_form = UserForm {
name: read_user.name, name: read_user.name,
fedi_name: read_user.fedi_name, fedi_name: read_user.fedi_name,
email: read_user.email, email: read_user.email,
matrix_user_id: read_user.matrix_user_id,
avatar: read_user.avatar,
password_encrypted: read_user.password_encrypted, password_encrypted: read_user.password_encrypted,
preferred_username: read_user.preferred_username, preferred_username: read_user.preferred_username,
updated: Some(naive_now()), updated: Some(naive_now()),
@ -456,11 +558,13 @@ impl Perform<AddAdminResponse> for Oper<AddAdmin> {
default_sort_type: read_user.default_sort_type, default_sort_type: read_user.default_sort_type,
default_listing_type: read_user.default_listing_type, default_listing_type: read_user.default_listing_type,
lang: read_user.lang, lang: read_user.lang,
show_avatars: read_user.show_avatars,
send_notifications_to_email: read_user.send_notifications_to_email,
}; };
match User_::update(&conn, data.user_id, &user_form) { match User_::update(&conn, data.user_id, &user_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_user"))?, Err(_e) => return Err(APIError::err("couldnt_update_user").into()),
}; };
// Mod tables // Mod tables
@ -478,36 +582,35 @@ impl Perform<AddAdminResponse> for Oper<AddAdmin> {
let creator_user = admins.remove(creator_index); let creator_user = admins.remove(creator_index);
admins.insert(0, creator_user); admins.insert(0, creator_user);
Ok(AddAdminResponse { Ok(AddAdminResponse { admins })
op: self.op.to_string(),
admins,
})
} }
} }
impl Perform<BanUserResponse> for Oper<BanUser> { impl Perform<BanUserResponse> for Oper<BanUser> {
fn perform(&self) -> Result<BanUserResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<BanUserResponse, Error> {
let data: &BanUser = &self.data; let data: &BanUser = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
// Make sure user is an admin // Make sure user is an admin
if UserView::read(&conn, user_id)?.admin == false { if !UserView::read(&conn, user_id)?.admin {
return Err(APIError::err(&self.op, "not_an_admin"))?; return Err(APIError::err("not_an_admin").into());
} }
let read_user = User_::read(&conn, data.user_id)?; let read_user = User_::read(&conn, data.user_id)?;
// TODO make bans and addadmins easier
let user_form = UserForm { let user_form = UserForm {
name: read_user.name, name: read_user.name,
fedi_name: read_user.fedi_name, fedi_name: read_user.fedi_name,
email: read_user.email, email: read_user.email,
matrix_user_id: read_user.matrix_user_id,
avatar: read_user.avatar,
password_encrypted: read_user.password_encrypted, password_encrypted: read_user.password_encrypted,
preferred_username: read_user.preferred_username, preferred_username: read_user.preferred_username,
updated: Some(naive_now()), updated: Some(naive_now()),
@ -518,11 +621,13 @@ impl Perform<BanUserResponse> for Oper<BanUser> {
default_sort_type: read_user.default_sort_type, default_sort_type: read_user.default_sort_type,
default_listing_type: read_user.default_listing_type, default_listing_type: read_user.default_listing_type,
lang: read_user.lang, lang: read_user.lang,
show_avatars: read_user.show_avatars,
send_notifications_to_email: read_user.send_notifications_to_email,
}; };
match User_::update(&conn, data.user_id, &user_form) { match User_::update(&conn, data.user_id, &user_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_user"))?, Err(_e) => return Err(APIError::err("couldnt_update_user").into()),
}; };
// Mod tables // Mod tables
@ -544,7 +649,6 @@ impl Perform<BanUserResponse> for Oper<BanUser> {
let user_view = UserView::read(&conn, data.user_id)?; let user_view = UserView::read(&conn, data.user_id)?;
Ok(BanUserResponse { Ok(BanUserResponse {
op: self.op.to_string(),
user: user_view, user: user_view,
banned: data.ban, banned: data.ban,
}) })
@ -552,13 +656,12 @@ impl Perform<BanUserResponse> for Oper<BanUser> {
} }
impl Perform<GetRepliesResponse> for Oper<GetReplies> { impl Perform<GetRepliesResponse> for Oper<GetReplies> {
fn perform(&self) -> Result<GetRepliesResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetRepliesResponse, Error> {
let data: &GetReplies = &self.data; let data: &GetReplies = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -572,21 +675,17 @@ impl Perform<GetRepliesResponse> for Oper<GetReplies> {
.limit(data.limit) .limit(data.limit)
.list()?; .list()?;
Ok(GetRepliesResponse { Ok(GetRepliesResponse { replies })
op: self.op.to_string(),
replies,
})
} }
} }
impl Perform<GetUserMentionsResponse> for Oper<GetUserMentions> { impl Perform<GetUserMentionsResponse> for Oper<GetUserMentions> {
fn perform(&self) -> Result<GetUserMentionsResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetUserMentionsResponse, Error> {
let data: &GetUserMentions = &self.data; let data: &GetUserMentions = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -600,21 +699,17 @@ impl Perform<GetUserMentionsResponse> for Oper<GetUserMentions> {
.limit(data.limit) .limit(data.limit)
.list()?; .list()?;
Ok(GetUserMentionsResponse { Ok(GetUserMentionsResponse { mentions })
op: self.op.to_string(),
mentions,
})
} }
} }
impl Perform<UserMentionResponse> for Oper<EditUserMention> { impl Perform<UserMentionResponse> for Oper<EditUserMention> {
fn perform(&self) -> Result<UserMentionResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<UserMentionResponse, Error> {
let data: &EditUserMention = &self.data; let data: &EditUserMention = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -630,26 +725,24 @@ impl Perform<UserMentionResponse> for Oper<EditUserMention> {
let _updated_user_mention = let _updated_user_mention =
match UserMention::update(&conn, user_mention.id, &user_mention_form) { match UserMention::update(&conn, user_mention.id, &user_mention_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_comment"))?, Err(_e) => return Err(APIError::err("couldnt_update_comment").into()),
}; };
let user_mention_view = UserMentionView::read(&conn, user_mention.id, user_id)?; let user_mention_view = UserMentionView::read(&conn, user_mention.id, user_id)?;
Ok(UserMentionResponse { Ok(UserMentionResponse {
op: self.op.to_string(),
mention: user_mention_view, mention: user_mention_view,
}) })
} }
} }
impl Perform<GetRepliesResponse> for Oper<MarkAllAsRead> { impl Perform<GetRepliesResponse> for Oper<MarkAllAsRead> {
fn perform(&self) -> Result<GetRepliesResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<GetRepliesResponse, Error> {
let data: &MarkAllAsRead = &self.data; let data: &MarkAllAsRead = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -674,7 +767,7 @@ impl Perform<GetRepliesResponse> for Oper<MarkAllAsRead> {
let _updated_comment = match Comment::update(&conn, reply.id, &comment_form) { let _updated_comment = match Comment::update(&conn, reply.id, &comment_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_comment"))?, Err(_e) => return Err(APIError::err("couldnt_update_comment").into()),
}; };
} }
@ -695,25 +788,45 @@ impl Perform<GetRepliesResponse> for Oper<MarkAllAsRead> {
let _updated_mention = let _updated_mention =
match UserMention::update(&conn, mention.user_mention_id, &mention_form) { match UserMention::update(&conn, mention.user_mention_id, &mention_form) {
Ok(mention) => mention, Ok(mention) => mention,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_comment"))?, Err(_e) => return Err(APIError::err("couldnt_update_comment").into()),
}; };
} }
Ok(GetRepliesResponse { // messages
op: self.op.to_string(), let messages = PrivateMessageQueryBuilder::create(&conn, user_id)
replies: vec![], .page(1)
}) .limit(999)
.unread_only(true)
.list()?;
for message in &messages {
let private_message_form = PrivateMessageForm {
content: None,
creator_id: message.to_owned().creator_id,
recipient_id: message.to_owned().recipient_id,
deleted: None,
read: Some(true),
updated: None,
};
let _updated_message = match PrivateMessage::update(&conn, message.id, &private_message_form)
{
Ok(message) => message,
Err(_e) => return Err(APIError::err("couldnt_update_private_message").into()),
};
}
Ok(GetRepliesResponse { replies: vec![] })
} }
} }
impl Perform<LoginResponse> for Oper<DeleteAccount> { impl Perform<LoginResponse> for Oper<DeleteAccount> {
fn perform(&self) -> Result<LoginResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<LoginResponse, Error> {
let data: &DeleteAccount = &self.data; let data: &DeleteAccount = &self.data;
let conn = establish_connection();
let claims = match Claims::decode(&data.auth) { let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims, Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err(&self.op, "not_logged_in"))?, Err(_e) => return Err(APIError::err("not_logged_in").into()),
}; };
let user_id = claims.id; let user_id = claims.id;
@ -723,7 +836,7 @@ impl Perform<LoginResponse> for Oper<DeleteAccount> {
// Verify the password // Verify the password
let valid: bool = verify(&data.password, &user.password_encrypted).unwrap_or(false); let valid: bool = verify(&data.password, &user.password_encrypted).unwrap_or(false);
if !valid { if !valid {
return Err(APIError::err(&self.op, "password_incorrect"))?; return Err(APIError::err("password_incorrect").into());
} }
// Comments // Comments
@ -746,7 +859,7 @@ impl Perform<LoginResponse> for Oper<DeleteAccount> {
let _updated_comment = match Comment::update(&conn, comment.id, &comment_form) { let _updated_comment = match Comment::update(&conn, comment.id, &comment_form) {
Ok(comment) => comment, Ok(comment) => comment,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_comment"))?, Err(_e) => return Err(APIError::err("couldnt_update_comment").into()),
}; };
} }
@ -770,35 +883,32 @@ impl Perform<LoginResponse> for Oper<DeleteAccount> {
locked: None, locked: None,
stickied: None, stickied: None,
updated: Some(naive_now()), updated: Some(naive_now()),
embed_title: None,
embed_description: None,
embed_html: None,
thumbnail_url: None,
}; };
let _updated_post = match Post::update(&conn, post.id, &post_form) { let _updated_post = match Post::update(&conn, post.id, &post_form) {
Ok(post) => post, Ok(post) => post,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_post"))?, Err(_e) => return Err(APIError::err("couldnt_update_post").into()),
}; };
} }
Ok(LoginResponse { Ok(LoginResponse {
op: self.op.to_string(),
jwt: data.auth.to_owned(), jwt: data.auth.to_owned(),
}) })
} }
} }
impl Perform<PasswordResetResponse> for Oper<PasswordReset> { impl Perform<PasswordResetResponse> for Oper<PasswordReset> {
fn perform(&self) -> Result<PasswordResetResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<PasswordResetResponse, Error> {
let data: &PasswordReset = &self.data; let data: &PasswordReset = &self.data;
let conn = establish_connection();
// Fetch that email // Fetch that email
let user: User_ = match User_::find_by_email(&conn, &data.email) { let user: User_ = match User_::find_by_email(&conn, &data.email) {
Ok(user) => user, Ok(user) => user,
Err(_e) => { Err(_e) => return Err(APIError::err("couldnt_find_that_username_or_email").into()),
return Err(APIError::err(
&self.op,
"couldnt_find_that_username_or_email",
))?
}
}; };
// Generate a random token // Generate a random token
@ -815,57 +925,186 @@ impl Perform<PasswordResetResponse> for Oper<PasswordReset> {
let html = &format!("<h1>Password Reset Request for {}</h1><br><a href={}/password_change/{}>Click here to reset your password</a>", user.name, hostname, &token); let html = &format!("<h1>Password Reset Request for {}</h1><br><a href={}/password_change/{}>Click here to reset your password</a>", user.name, hostname, &token);
match send_email(subject, user_email, &user.name, html) { match send_email(subject, user_email, &user.name, html) {
Ok(_o) => _o, Ok(_o) => _o,
Err(_e) => return Err(APIError::err(&self.op, &_e.to_string()))?, Err(_e) => return Err(APIError::err(&_e).into()),
}; };
Ok(PasswordResetResponse { Ok(PasswordResetResponse {})
op: self.op.to_string(),
})
} }
} }
impl Perform<LoginResponse> for Oper<PasswordChange> { impl Perform<LoginResponse> for Oper<PasswordChange> {
fn perform(&self) -> Result<LoginResponse, Error> { fn perform(&self, conn: &PgConnection) -> Result<LoginResponse, Error> {
let data: &PasswordChange = &self.data; let data: &PasswordChange = &self.data;
let conn = establish_connection();
// Fetch the user_id from the token // Fetch the user_id from the token
let user_id = PasswordResetRequest::read_from_token(&conn, &data.token)?.user_id; let user_id = PasswordResetRequest::read_from_token(&conn, &data.token)?.user_id;
// Make sure passwords match // Make sure passwords match
if &data.password != &data.password_verify { if data.password != data.password_verify {
return Err(APIError::err(&self.op, "passwords_dont_match"))?; return Err(APIError::err("passwords_dont_match").into());
} }
// Fetch the user
let read_user = User_::read(&conn, user_id)?;
// Update the user with the new password // Update the user with the new password
let user_form = UserForm { let updated_user = match User_::update_password(&conn, user_id, &data.password) {
name: read_user.name,
fedi_name: read_user.fedi_name,
email: read_user.email,
password_encrypted: data.password.to_owned(),
preferred_username: read_user.preferred_username,
updated: Some(naive_now()),
admin: read_user.admin,
banned: read_user.banned,
show_nsfw: read_user.show_nsfw,
theme: read_user.theme,
default_sort_type: read_user.default_sort_type,
default_listing_type: read_user.default_listing_type,
lang: read_user.lang,
};
let updated_user = match User_::update_password(&conn, user_id, &user_form) {
Ok(user) => user, Ok(user) => user,
Err(_e) => return Err(APIError::err(&self.op, "couldnt_update_user"))?, Err(_e) => return Err(APIError::err("couldnt_update_user").into()),
}; };
// Return the jwt // Return the jwt
Ok(LoginResponse { Ok(LoginResponse {
op: self.op.to_string(),
jwt: updated_user.jwt(), jwt: updated_user.jwt(),
}) })
} }
} }
impl Perform<PrivateMessageResponse> for Oper<CreatePrivateMessage> {
fn perform(&self, conn: &PgConnection) -> Result<PrivateMessageResponse, Error> {
let data: &CreatePrivateMessage = &self.data;
let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err("not_logged_in").into()),
};
let user_id = claims.id;
let hostname = &format!("https://{}", Settings::get().hostname);
// Check for a site ban
if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err("site_ban").into());
}
let content_slurs_removed = remove_slurs(&data.content.to_owned());
let private_message_form = PrivateMessageForm {
content: Some(content_slurs_removed.to_owned()),
creator_id: user_id,
recipient_id: data.recipient_id,
deleted: None,
read: None,
updated: None,
};
let inserted_private_message = match PrivateMessage::create(&conn, &private_message_form) {
Ok(private_message) => private_message,
Err(_e) => {
return Err(APIError::err("couldnt_create_private_message").into());
}
};
// Send notifications to the recipient
let recipient_user = User_::read(&conn, data.recipient_id)?;
if recipient_user.send_notifications_to_email {
if let Some(email) = recipient_user.email {
let subject = &format!(
"{} - Private Message from {}",
Settings::get().hostname,
claims.username
);
let html = &format!(
"<h1>Private Message</h1><br><div>{} - {}</div><br><a href={}/inbox>inbox</a>",
claims.username, &content_slurs_removed, hostname
);
match send_email(subject, &email, &recipient_user.name, html) {
Ok(_o) => _o,
Err(e) => eprintln!("{}", e),
};
}
}
let message = PrivateMessageView::read(&conn, inserted_private_message.id)?;
Ok(PrivateMessageResponse { message })
}
}
impl Perform<PrivateMessageResponse> for Oper<EditPrivateMessage> {
fn perform(&self, conn: &PgConnection) -> Result<PrivateMessageResponse, Error> {
let data: &EditPrivateMessage = &self.data;
let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err("not_logged_in").into()),
};
let user_id = claims.id;
let orig_private_message = PrivateMessage::read(&conn, data.edit_id)?;
// Check for a site ban
if UserView::read(&conn, user_id)?.banned {
return Err(APIError::err("site_ban").into());
}
// Check to make sure they are the creator (or the recipient marking as read
if !(data.read.is_some() && orig_private_message.recipient_id.eq(&user_id)
|| orig_private_message.creator_id.eq(&user_id))
{
return Err(APIError::err("no_private_message_edit_allowed").into());
}
let content_slurs_removed = match &data.content {
Some(content) => Some(remove_slurs(content)),
None => None,
};
let private_message_form = PrivateMessageForm {
content: content_slurs_removed,
creator_id: orig_private_message.creator_id,
recipient_id: orig_private_message.recipient_id,
deleted: data.deleted.to_owned(),
read: data.read.to_owned(),
updated: if data.read.is_some() {
orig_private_message.updated
} else {
Some(naive_now())
},
};
let _updated_private_message =
match PrivateMessage::update(&conn, data.edit_id, &private_message_form) {
Ok(private_message) => private_message,
Err(_e) => return Err(APIError::err("couldnt_update_private_message").into()),
};
let message = PrivateMessageView::read(&conn, data.edit_id)?;
Ok(PrivateMessageResponse { message })
}
}
impl Perform<PrivateMessagesResponse> for Oper<GetPrivateMessages> {
fn perform(&self, conn: &PgConnection) -> Result<PrivateMessagesResponse, Error> {
let data: &GetPrivateMessages = &self.data;
let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err("not_logged_in").into()),
};
let user_id = claims.id;
let messages = PrivateMessageQueryBuilder::create(&conn, user_id)
.page(data.page)
.limit(data.limit)
.unread_only(data.unread_only)
.list()?;
Ok(PrivateMessagesResponse { messages })
}
}
impl Perform<UserJoinResponse> for Oper<UserJoin> {
fn perform(&self, _conn: &PgConnection) -> Result<UserJoinResponse, Error> {
let data: &UserJoin = &self.data;
let claims = match Claims::decode(&data.auth) {
Ok(claims) => claims.claims,
Err(_e) => return Err(APIError::err("not_logged_in").into()),
};
let user_id = claims.id;
Ok(UserJoinResponse { user_id })
}
}

View file

@ -1,90 +0,0 @@
extern crate activitypub;
use self::activitypub::{actor::Person, context};
use crate::db::user::User_;
impl User_ {
pub fn person(&self) -> Person {
use crate::{to_datetime_utc, Settings};
let base_url = &format!("{}/user/{}", Settings::get().api_endpoint(), self.name);
let mut person = Person::default();
person.object_props.set_context_object(context()).ok();
person.object_props.set_id_string(base_url.to_string()).ok();
person
.object_props
.set_name_string(self.name.to_owned())
.ok();
person
.object_props
.set_published_utctime(to_datetime_utc(self.published))
.ok();
if let Some(i) = self.updated {
person
.object_props
.set_updated_utctime(to_datetime_utc(i))
.ok();
}
// person.object_props.summary = self.summary;
person
.ap_actor_props
.set_inbox_string(format!("{}/inbox", &base_url))
.ok();
person
.ap_actor_props
.set_outbox_string(format!("{}/outbox", &base_url))
.ok();
person
.ap_actor_props
.set_following_string(format!("{}/following", &base_url))
.ok();
person
.ap_actor_props
.set_liked_string(format!("{}/liked", &base_url))
.ok();
if let Some(i) = &self.preferred_username {
person
.ap_actor_props
.set_preferred_username_string(i.to_string())
.ok();
}
person
}
}
#[cfg(test)]
mod tests {
use super::User_;
use crate::db::{ListingType, SortType};
use crate::naive_now;
#[test]
fn test_person() {
let expected_user = User_ {
id: 52,
name: "thom".into(),
fedi_name: "rrf".into(),
preferred_username: None,
password_encrypted: "here".into(),
email: None,
icon: None,
published: naive_now(),
admin: false,
banned: false,
updated: None,
show_nsfw: false,
theme: "darkly".into(),
default_sort_type: SortType::Hot as i16,
default_listing_type: ListingType::Subscribed as i16,
lang: "browser".into(),
};
let person = expected_user.person();
assert_eq!(
"rrr/api/v1/user/thom",
person.object_props.id_string().unwrap()
);
let json = serde_json::to_string_pretty(&person).unwrap();
println!("{}", json);
}
}

View file

@ -0,0 +1,109 @@
use crate::apub::make_apub_endpoint;
use crate::db::community::Community;
use crate::db::community_view::CommunityFollowerView;
use crate::db::establish_unpooled_connection;
use crate::to_datetime_utc;
use activitypub::{actor::Group, collection::UnorderedCollection, context};
use actix_web::body::Body;
use actix_web::web::Path;
use actix_web::HttpResponse;
use serde::Deserialize;
impl Community {
pub fn as_group(&self) -> Group {
let base_url = make_apub_endpoint("c", &self.name);
let mut group = Group::default();
group.object_props.set_context_object(context()).ok();
group.object_props.set_id_string(base_url.to_string()).ok();
group
.object_props
.set_name_string(self.name.to_owned())
.ok();
group
.object_props
.set_published_utctime(to_datetime_utc(self.published))
.ok();
if let Some(updated) = self.updated {
group
.object_props
.set_updated_utctime(to_datetime_utc(updated))
.ok();
}
if let Some(description) = &self.description {
group
.object_props
.set_summary_string(description.to_string())
.ok();
}
group
.ap_actor_props
.set_inbox_string(format!("{}/inbox", &base_url))
.ok();
group
.ap_actor_props
.set_outbox_string(format!("{}/outbox", &base_url))
.ok();
group
.ap_actor_props
.set_followers_string(format!("{}/followers", &base_url))
.ok();
group
}
pub fn followers_as_collection(&self) -> UnorderedCollection {
let base_url = make_apub_endpoint("c", &self.name);
let mut collection = UnorderedCollection::default();
collection.object_props.set_context_object(context()).ok();
collection.object_props.set_id_string(base_url).ok();
let connection = establish_unpooled_connection();
//As we are an object, we validated that the community id was valid
let community_followers = CommunityFollowerView::for_community(&connection, self.id).unwrap();
let ap_followers = community_followers
.iter()
.map(|follower| make_apub_endpoint("u", &follower.user_name))
.collect();
collection
.collection_props
.set_items_string_vec(ap_followers)
.unwrap();
collection
}
}
#[derive(Deserialize)]
pub struct CommunityQuery {
community_name: String,
}
pub async fn get_apub_community(info: Path<CommunityQuery>) -> HttpResponse<Body> {
let connection = establish_unpooled_connection();
if let Ok(community) = Community::read_from_name(&connection, info.community_name.to_owned()) {
HttpResponse::Ok()
.content_type("application/activity+json")
.body(serde_json::to_string(&community.as_group()).unwrap())
} else {
HttpResponse::NotFound().finish()
}
}
pub async fn get_apub_community_followers(info: Path<CommunityQuery>) -> HttpResponse<Body> {
let connection = establish_unpooled_connection();
if let Ok(community) = Community::read_from_name(&connection, info.community_name.to_owned()) {
HttpResponse::Ok()
.content_type("application/activity+json")
.body(serde_json::to_string(&community.followers_as_collection()).unwrap())
} else {
HttpResponse::NotFound().finish()
}
}

107
server/src/apub/mod.rs Normal file
View file

@ -0,0 +1,107 @@
pub mod community;
pub mod post;
pub mod user;
use crate::Settings;
use std::fmt::Display;
#[cfg(test)]
mod tests {
use crate::db::community::Community;
use crate::db::post::Post;
use crate::db::user::User_;
use crate::db::{ListingType, SortType};
use crate::{naive_now, Settings};
#[test]
fn test_person() {
let user = User_ {
id: 52,
name: "thom".into(),
fedi_name: "rrf".into(),
preferred_username: None,
password_encrypted: "here".into(),
email: None,
matrix_user_id: None,
avatar: None,
published: naive_now(),
admin: false,
banned: false,
updated: None,
show_nsfw: false,
theme: "darkly".into(),
default_sort_type: SortType::Hot as i16,
default_listing_type: ListingType::Subscribed as i16,
lang: "browser".into(),
show_avatars: true,
send_notifications_to_email: false,
};
let person = user.as_person();
assert_eq!(
format!("https://{}/federation/u/thom", Settings::get().hostname),
person.object_props.id_string().unwrap()
);
}
#[test]
fn test_community() {
let community = Community {
id: 42,
name: "Test".into(),
title: "Test Title".into(),
description: Some("Test community".into()),
category_id: 32,
creator_id: 52,
removed: false,
published: naive_now(),
updated: Some(naive_now()),
deleted: false,
nsfw: false,
};
let group = community.as_group();
assert_eq!(
format!("https://{}/federation/c/Test", Settings::get().hostname),
group.object_props.id_string().unwrap()
);
}
#[test]
fn test_post() {
let post = Post {
id: 62,
name: "A test post".into(),
url: None,
body: None,
creator_id: 52,
community_id: 42,
published: naive_now(),
removed: false,
locked: false,
stickied: false,
nsfw: false,
deleted: false,
updated: None,
embed_title: None,
embed_description: None,
embed_html: None,
thumbnail_url: None,
};
let page = post.as_page();
assert_eq!(
format!("https://{}/federation/post/62", Settings::get().hostname),
page.object_props.id_string().unwrap()
);
}
}
pub fn make_apub_endpoint<S: Display, T: Display>(point: S, value: T) -> String {
format!(
"https://{}/federation/{}/{}",
Settings::get().hostname,
point,
value
)
}

38
server/src/apub/post.rs Normal file
View file

@ -0,0 +1,38 @@
use crate::apub::make_apub_endpoint;
use crate::db::post::Post;
use crate::to_datetime_utc;
use activitypub::{context, object::Page};
impl Post {
pub fn as_page(&self) -> Page {
let base_url = make_apub_endpoint("post", self.id);
let mut page = Page::default();
page.object_props.set_context_object(context()).ok();
page.object_props.set_id_string(base_url).ok();
page.object_props.set_name_string(self.name.to_owned()).ok();
if let Some(body) = &self.body {
page.object_props.set_content_string(body.to_owned()).ok();
}
if let Some(url) = &self.url {
page.object_props.set_url_string(url.to_owned()).ok();
}
//page.object_props.set_attributed_to_string
page
.object_props
.set_published_utctime(to_datetime_utc(self.published))
.ok();
if let Some(updated) = self.updated {
page
.object_props
.set_updated_utctime(to_datetime_utc(updated))
.ok();
}
page
}
}

74
server/src/apub/user.rs Normal file
View file

@ -0,0 +1,74 @@
use crate::apub::make_apub_endpoint;
use crate::db::establish_unpooled_connection;
use crate::db::user::User_;
use crate::to_datetime_utc;
use activitypub::{actor::Person, context};
use actix_web::body::Body;
use actix_web::web::Path;
use actix_web::HttpResponse;
use serde::Deserialize;
impl User_ {
pub fn as_person(&self) -> Person {
let base_url = make_apub_endpoint("u", &self.name);
let mut person = Person::default();
person.object_props.set_context_object(context()).ok();
person.object_props.set_id_string(base_url.to_string()).ok();
person
.object_props
.set_name_string(self.name.to_owned())
.ok();
person
.object_props
.set_published_utctime(to_datetime_utc(self.published))
.ok();
if let Some(updated) = self.updated {
person
.object_props
.set_updated_utctime(to_datetime_utc(updated))
.ok();
}
person
.ap_actor_props
.set_inbox_string(format!("{}/inbox", &base_url))
.ok();
person
.ap_actor_props
.set_outbox_string(format!("{}/outbox", &base_url))
.ok();
person
.ap_actor_props
.set_following_string(format!("{}/following", &base_url))
.ok();
person
.ap_actor_props
.set_liked_string(format!("{}/liked", &base_url))
.ok();
if let Some(i) = &self.preferred_username {
person
.ap_actor_props
.set_preferred_username_string(i.to_string())
.ok();
}
person
}
}
#[derive(Deserialize)]
pub struct UserQuery {
user_name: String,
}
pub async fn get_apub_user(info: Path<UserQuery>) -> HttpResponse<Body> {
let connection = establish_unpooled_connection();
if let Ok(user) = User_::find_by_email_or_username(&connection, &info.user_name) {
HttpResponse::Ok()
.content_type("application/activity+json")
.body(serde_json::to_string(&user.as_person()).unwrap())
} else {
HttpResponse::NotFound().finish()
}
}

View file

@ -52,7 +52,7 @@ mod tests {
use super::*; use super::*;
#[test] #[test]
fn test_crud() { fn test_crud() {
let conn = establish_connection(); let conn = establish_unpooled_connection();
let categories = Category::list_all(&conn).unwrap(); let categories = Category::list_all(&conn).unwrap();
let expected_first_category = Category { let expected_first_category = Category {

Some files were not shown because too many files have changed in this diff Show more