mirror of
https://github.com/spikecodes/libreddit
synced 2024-11-11 04:39:19 +01:00
Compare commits
694 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
64d7b30eca | ||
|
b69fae2383 | ||
|
9d948abadc | ||
|
2815dc5209 | ||
|
00697c6ae4 | ||
|
7a14975fb8 | ||
|
ea696687be | ||
|
13394b4a5e | ||
|
ba89b76332 | ||
|
96e9e0ea9f | ||
|
c1dd1a091e | ||
|
05ae39f743 | ||
|
221260c282 | ||
|
f3c835bee7 | ||
|
f9fd54aa3c | ||
|
510d967777 | ||
|
0bcebff6f2 | ||
|
0c74305617 | ||
|
97f0f69059 | ||
|
b5fc4bef28 | ||
|
81a6e6458c | ||
|
193a6effbf | ||
|
09551fca29 | ||
|
38ee0d9428 | ||
|
ca7ad9f812 | ||
|
98e2833881 | ||
|
4d5c52b83b | ||
|
6c47ea921b | ||
|
6c0e5cfe93 | ||
|
0c591149d5 | ||
|
8b4b2dd268 | ||
|
ac58bb532a | ||
|
af8fe176ea | ||
|
bfa9c084bb | ||
|
3c892d3cfd | ||
|
4a1b448abb | ||
|
991677cd1e | ||
|
3b8a13d050 | ||
|
0e90ebc1a1 | ||
|
af89d4c88f | ||
|
5f87875b8e | ||
|
aaf05de1a8 | ||
|
17f7f6a9d1 | ||
|
ec226e0cab | ||
|
2b8931c032 | ||
|
62771bf4a3 | ||
|
22e3e0eb91 | ||
|
94a781c82c | ||
|
75af984154 | ||
|
8bed342a6d | ||
|
de5d8d5f86 | ||
|
f465394f93 | ||
|
1e418619f1 | ||
|
8be69f6fe5 | ||
|
a0726c5903 | ||
|
c1c867a5ff | ||
|
5dc3279ac3 | ||
|
dead990ba0 | ||
|
e046144bf3 | ||
|
e25622dac2 | ||
|
6bcc4aa368 | ||
|
6d652fc38c | ||
|
f62f7bf200 | ||
|
aece392a86 | ||
|
aeeb066e47 | ||
|
51cdf574f7 | ||
|
af6722c053 | ||
|
412ce8f1f3 | ||
|
dfa57c890d | ||
|
01f9907aaf | ||
|
bf19ff513f | ||
|
ffc9ca2e98 | ||
|
cef9266648 | ||
|
d3b4f4e379 | ||
|
b90b41c009 | ||
|
0eccb9bcf2 | ||
|
eb07a2ce7c | ||
|
0b39d4f059 | ||
|
58fa213be8 | ||
|
5e03d701e4 | ||
|
e3df3a9470 | ||
|
35504eda14 | ||
|
a05cfe60fe | ||
|
2774d15298 | ||
|
f544daf8c0 | ||
|
089315f9bb | ||
|
1f7e14dd4e | ||
|
37f71c48d1 | ||
|
fa68bf561b | ||
|
a4eecb251e | ||
|
9bf6194b09 | ||
|
f405f509c4 | ||
|
8be5fdee2d | ||
|
7efa26e811 | ||
|
755fff0818 | ||
|
53e1e302d5 | ||
|
3d0287f04f | ||
|
7cb132af01 | ||
|
63b0b936aa | ||
|
412122d7d9 | ||
|
eb9ef9f6d9 | ||
|
27091db53b | ||
|
2a54043afc | ||
|
e238a7b168 | ||
|
1e554acd20 | ||
|
dff91da877 | ||
|
f6bb53e388 | ||
|
709292339a | ||
|
799e5b882b | ||
|
0ff92cbfe3 | ||
|
e9891236cd | ||
|
e2c48c3438 | ||
|
9a7b3b29f5 | ||
|
10add895fb | ||
|
050eaedf15 | ||
|
5b06a3fc64 | ||
|
4817f51bc0 | ||
|
c83a4e0cc8 | ||
|
c15f305be0 | ||
|
222d216854 | ||
|
6a785baa2c | ||
|
6d8aaba8bb | ||
|
6cf3748642 | ||
|
9c938c6210 | ||
|
b1182e7cf5 | ||
|
a49d399f72 | ||
|
9178b50b73 | ||
|
b5d04f1a50 | ||
|
9e434e7db6 | ||
|
ab30b8bbec | ||
|
1fa9f27619 | ||
|
37d1939dc0 | ||
|
08a20b89a6 | ||
|
5d518cfc18 | ||
|
7e752b3d81 | ||
|
87729d0daa | ||
|
dc06ae3b29 | ||
|
225380b7d9 | ||
|
7391a5bc7a | ||
|
3ff5aff32f | ||
|
e579b97442 | ||
|
8fa8a449cf | ||
|
473a498bea | ||
|
92f5286667 | ||
|
0a6bf6bbee | ||
|
618b074ad5 | ||
|
d86cebf975 | ||
|
ab39b62533 | ||
|
5aee695bae | ||
|
c9633e1464 | ||
|
0152752913 | ||
|
6912307349 | ||
|
f76243e0af | ||
|
f0fa2f2709 | ||
|
88bed73e5e | ||
|
3a33c70e7c | ||
|
40dfddc44d | ||
|
3f3d9e9c3b | ||
|
501b47894c | ||
|
d8c661177b | ||
|
fade305f90 | ||
|
e62d33ccae | ||
|
465d9b7ba7 | ||
|
5c366e14a3 | ||
|
d4ca376e8d | ||
|
371b7b2635 | ||
|
cc27dc2a26 | ||
|
bfe03578f0 | ||
|
c6487799ed | ||
|
584cd4aac1 | ||
|
377634841c | ||
|
c0e37443ae | ||
|
8348e20724 | ||
|
ae3ea2da7c | ||
|
8435b8eab9 | ||
|
510c8679d6 | ||
|
98674310bc | ||
|
170ea384fb | ||
|
1b5e9a4279 | ||
|
b170a8dd99 | ||
|
aa54301054 | ||
|
b4d3f03335 | ||
|
1a1ff2e600 | ||
|
4fc07c02b5 | ||
|
8d58cf61d2 | ||
|
711e3c205d | ||
|
0704eb10b8 | ||
|
ef86c1be86 | ||
|
8141b74817 | ||
|
57d304161b | ||
|
b5f21bcb97 | ||
|
36c560144a | ||
|
2bc714d0c5 | ||
|
ff4a515e24 | ||
|
93f089c2cf | ||
|
23569206cc | ||
|
5f20e8ee27 | ||
|
a8a8980b98 | ||
|
fd7d977835 | ||
|
50f26333cb | ||
|
f5cd48b07f | ||
|
50665bbeb3 | ||
|
d558127306 | ||
|
0c757023f9 | ||
|
90828cc71c | ||
|
7f5bfc04b3 | ||
|
322aa97a18 | ||
|
7e07ca3df1 | ||
|
428dc58e3c | ||
|
0ec8e4e9a2 | ||
|
60c7b6b23f | ||
|
1c8bcf33c1 | ||
|
3bdc21f90a | ||
|
c3dade257d | ||
|
62b2bbb231 | ||
|
653aee9294 | ||
|
bb7fb1313d | ||
|
01bc729a80 | ||
|
39e6e6bf81 | ||
|
8c94c0dd17 | ||
|
1c50c8f30d | ||
|
3facaefb53 | ||
|
aec45311cc | ||
|
47ab857103 | ||
|
a9ef5bc08b | ||
|
eb6c5e5e1e | ||
|
ed11135af8 | ||
|
3a1af78e26 | ||
|
345770c64d | ||
|
9eb42932df | ||
|
f0a6bdc21b | ||
|
3eef60d486 | ||
|
59043456ba | ||
|
90c7088da2 | ||
|
9e65a65556 | ||
|
8cfbde2710 | ||
|
70ff150ab4 | ||
|
388779c1f2 | ||
|
6b605d859f | ||
|
0ae48c400c | ||
|
a6ed18d674 | ||
|
838cdd95d1 | ||
|
bc95b08ffd | ||
|
e6190267e4 | ||
|
3ceeac5fb0 | ||
|
60eb0137c2 | ||
|
b6bca68d4e | ||
|
91bff826f0 | ||
|
af6606a855 | ||
|
977cd0763a | ||
|
fcadd44cb3 | ||
|
9c325c2cbf | ||
|
e9038f4fe2 | ||
|
8b8f55e09a | ||
|
f1b3749cf0 | ||
|
0708fdfb37 | ||
|
cad29e9544 | ||
|
6b59976fcf | ||
|
f9b3981448 | ||
|
db3196df5a | ||
|
b3d4f6f91c | ||
|
45b875b85d | ||
|
992d7889c4 | ||
|
3188f9d8e7 | ||
|
90fa0b5496 | ||
|
7aeabfc4bc | ||
|
150ebe38f3 | ||
|
2905d114fa | ||
|
40e97cc75d | ||
|
7c73e352ce | ||
|
341c623be8 | ||
|
4c8b724a9d | ||
|
227d74b187 | ||
|
f05a818edd | ||
|
ceee13cfb7 | ||
|
a39495b3cb | ||
|
38cfe4ad71 | ||
|
0b89539c2b | ||
|
046b8b3edc | ||
|
0656756d21 | ||
|
43551f70fd | ||
|
364c29c4d5 | ||
|
e6c978a2f7 | ||
|
91cc140091 | ||
|
6f29d94337 | ||
|
67e26479ae | ||
|
1a1dee36b8 | ||
|
b63000a93f | ||
|
401ee2ee41 | ||
|
99a83ea11b | ||
|
888e7b302d | ||
|
beada1f2b2 | ||
|
bd413060c6 | ||
|
3054b9f4a0 | ||
|
1cccef12a4 | ||
|
8e332b0630 | ||
|
85ae7c1f60 | ||
|
6d73024183 | ||
|
923ff776bd | ||
|
e181e3f57d | ||
|
79bb913fa6 | ||
|
632b64c98b | ||
|
2878d9c799 | ||
|
9f8d36cb00 | ||
|
25e641e7b3 | ||
|
4faa9d46d6 | ||
|
7220190811 | ||
|
768820cd4c | ||
|
2ef7957a66 | ||
|
7df8e7b4c6 | ||
|
67d3be06e1 | ||
|
6be5eb8991 | ||
|
5d9c320a7e | ||
|
f7de5285e4 | ||
|
c2053524c7 | ||
|
3a9e6b4ca0 | ||
|
731a407466 | ||
|
34ea679519 | ||
|
0f7ba3c61d | ||
|
2486347b14 | ||
|
c298109a7b | ||
|
a0509890b7 | ||
|
5644d621f7 | ||
|
1fc5bda486 | ||
|
b3255c22cf | ||
|
1d4ea50a45 | ||
|
546c8a4cda | ||
|
03336ecafd | ||
|
957e1c7728 | ||
|
09053ef0ad | ||
|
aff030fc3a | ||
|
97555dbfdd | ||
|
32360e5165 | ||
|
350b796571 | ||
|
567556711b | ||
|
1ff725ba2e | ||
|
6a4191f3b5 | ||
|
668493b72c | ||
|
db04dcb238 | ||
|
cc0a1e0324 | ||
|
e073fc87aa | ||
|
982f57efd9 | ||
|
52a1b45014 | ||
|
6f88fdfc75 | ||
|
015d0b3414 | ||
|
b41eabecf7 | ||
|
5cb5f46fa2 | ||
|
a900339529 | ||
|
41b3dc5739 | ||
|
b3b5782373 | ||
|
5c753ee171 | ||
|
229518c40b | ||
|
45a5778571 | ||
|
be253d40dd | ||
|
e571cc3b1e | ||
|
345f8e7b80 | ||
|
a190890239 | ||
|
ee51ce1a76 | ||
|
81a2df98cb | ||
|
e79a4b704a | ||
|
56998b8332 | ||
|
5418303b08 | ||
|
5ab41c4e6e | ||
|
807b3ffeca | ||
|
85deb4947d | ||
|
d2002c9027 | ||
|
f84f4c0326 | ||
|
ca3f6c0579 | ||
|
decc9e5139 | ||
|
d27bd782ce | ||
|
4defb58f2a | ||
|
ba42fc066f | ||
|
2cd35fb3b6 | ||
|
b9af6f47f3 | ||
|
73732a2a44 | ||
|
43ed9756dc | ||
|
8bb247af3b | ||
|
ed05f5a092 | ||
|
4f09333cd7 | ||
|
31bf8c802e | ||
|
e4f9bd7b8d | ||
|
83a667347d | ||
|
499a56aed4 | ||
|
928907086c | ||
|
dc9fbc1a05 | ||
|
7ae7a88eed | ||
|
536a766960 | ||
|
e34329cfee | ||
|
97a0680bd0 | ||
|
c1560f4eba | ||
|
242ffab0da | ||
|
1211d781d0 | ||
|
9e4066658c | ||
|
560de4e91f | ||
|
bd1c890961 | ||
|
6f799b2617 | ||
|
38e176f59f | ||
|
8248eca95c | ||
|
ffc3bfe72d | ||
|
d713746407 | ||
|
21b45760eb | ||
|
e3fb93946a | ||
|
b6134a39d0 | ||
|
c844655c98 | ||
|
cac83493da | ||
|
b47cfd1ba5 | ||
|
28ca3589ed | ||
|
3cf787cf98 | ||
|
46e22cf74e | ||
|
5c2e134924 | ||
|
c6244585fa | ||
|
9f1ba274eb | ||
|
93ed1c6f0c | ||
|
6ce82c36fb | ||
|
2974d92e30 | ||
|
34dfcb2512 | ||
|
6b42e97bda | ||
|
49bfe4d27c | ||
|
c8965ae51b | ||
|
0b64a52a63 | ||
|
a18db1e2b7 | ||
|
3b53e5be4c | ||
|
42e8351285 | ||
|
b3e4b7bfae | ||
|
4a42a25ed3 | ||
|
2bacaa163f | ||
|
48c3a8c0d0 | ||
|
c23d2dc50b | ||
|
46dbd88d91 | ||
|
f0f484288e | ||
|
90d39b121f | ||
|
44dee302c9 | ||
|
c7f9386c01 | ||
|
66ac72beab | ||
|
14f9ac4ca7 | ||
|
6a7f725c12 | ||
|
2533e8cef5 | ||
|
772d20615b | ||
|
0bb1677520 | ||
|
da4883db29 | ||
|
d50b6ca4b3 | ||
|
4c66e75f6b | ||
|
966e0ce921 | ||
|
ab886d1e67 | ||
|
dc7e087ed0 | ||
|
0d6e18d97d | ||
|
f872baa1fe | ||
|
9b5176f7b9 | ||
|
60c89197e5 | ||
|
7d94876d90 | ||
|
467342edf4 | ||
|
3c5b4037e2 | ||
|
a81502dde1 | ||
|
0ce2d9054e | ||
|
a5203fe8dd | ||
|
038fafa378 | ||
|
e15c15c390 | ||
|
07363e47a9 | ||
|
fb7faf6477 | ||
|
b14b4ff551 | ||
|
4b1195f221 | ||
|
a472461ee8 | ||
|
baf5e3d7ee | ||
|
f209757ed6 | ||
|
4173362ce1 | ||
|
b2ae5e486f | ||
|
cda19a1912 | ||
|
f0b69f8a4a | ||
|
118ff9485c | ||
|
4a51b7cfb0 | ||
|
f877face80 | ||
|
f0e8deb000 | ||
|
e70dfe2c0b | ||
|
2e89a85858 | ||
|
e59b2b1346 | ||
|
1c36549134 | ||
|
5fb88d4744 | ||
|
6c7188a1b9 | ||
|
84009fbb8e | ||
|
bf783c2f3a | ||
|
213babb057 | ||
|
7dbc02d930 | ||
|
10873dd0c6 | ||
|
c0d1519341 | ||
|
8709c49f39 | ||
|
56cfeba9e5 | ||
|
890d5ae625 | ||
|
caa8f1d49e | ||
|
dd51b23dc4 | ||
|
52d9698879 | ||
|
20f6945160 | ||
|
10c73fad7f | ||
|
2bddc952cb | ||
|
1de01d7283 | ||
|
9183ce1921 | ||
|
a197df89ff | ||
|
be2a1d876b | ||
|
686d61801f | ||
|
5d643277bc | ||
|
a3ec44149c | ||
|
83ba0fb913 | ||
|
55e9915bb0 | ||
|
5cd5b553b0 | ||
|
2b2bd8421b | ||
|
47d01a0dca | ||
|
0a69937238 | ||
|
6d08f2dd24 | ||
|
4a06882dc8 | ||
|
3e567d9acf | ||
|
8034594006 | ||
|
2f3315dcfc | ||
|
df118764df | ||
|
d78f82649e | ||
|
80fb3a5c18 | ||
|
518d5753a7 | ||
|
de38f7ef18 | ||
|
dd67b52199 | ||
|
9cfab348eb | ||
|
e1f7b6d0c0 | ||
|
a606e48435 | ||
|
2091f26bda | ||
|
b3341b49c0 | ||
|
65e4ceff7b | ||
|
bacb22f7f9 | ||
|
902c9a6e42 | ||
|
c586de66ba | ||
|
e466be8946 | ||
|
bed3465475 | ||
|
8560e8a37a | ||
|
3652342f46 | ||
|
58127b17d8 | ||
|
2f4deb221a | ||
|
38230ed473 | ||
|
71501b064c | ||
|
47a58ea05c | ||
|
14ecf3cf60 | ||
|
aa7c8c85df | ||
|
0cb7031c36 | ||
|
93cfc713c6 | ||
|
ff8685ae4c | ||
|
f06320a4ae | ||
|
809be42e01 | ||
|
58ca085521 | ||
|
4a40e16277 | ||
|
fee2cb1b56 | ||
|
8785bc95f5 | ||
|
16454213cf | ||
|
6feb347c27 | ||
|
e731cfbac4 | ||
|
008924fff8 | ||
|
ebbdd7185f | ||
|
402b3149e1 | ||
|
ac5ef89dff | ||
|
7edca18f8d | ||
|
cf45d53fdd | ||
|
2a475d127a | ||
|
3fa523e67b | ||
|
3fbb433e37 | ||
|
5fbcfd850f | ||
|
c758db84ec | ||
|
90d3063f93 | ||
|
82a601d534 | ||
|
12a1b3f459 | ||
|
e23eaf0be0 | ||
|
821709c8d2 | ||
|
653b0e7024 | ||
|
c7a2c43287 | ||
|
9824370771 | ||
|
d87b96d0ea | ||
|
6eae4bc47a | ||
|
1bcb070fbb | ||
|
24bc758090 | ||
|
ffbb1cf7cd | ||
|
cbf1f540d6 | ||
|
f8e0d2d4b9 | ||
|
8a27b2bac8 | ||
|
69941d9efd | ||
|
956de50419 | ||
|
d790264a62 | ||
|
f4f2d8a377 | ||
|
dd908c9f68 | ||
|
9e1948733d | ||
|
9df1dfae32 | ||
|
cfbee1bb81 | ||
|
8430cbc6f3 | ||
|
a9dd2e6f2c | ||
|
36964982fb | ||
|
0742a33304 | ||
|
7f320b3143 | ||
|
58f4fc4e77 | ||
|
7d8faefad0 | ||
|
ba9b5afd4e | ||
|
ae09f77bf6 | ||
|
5030c418de | ||
|
4ccd6b1751 | ||
|
7d17aa0627 | ||
|
4b73e2d914 | ||
|
0a140a6ffc | ||
|
e837d84105 | ||
|
f6d791ccd9 | ||
|
effaeb7508 | ||
|
6257faf9dc | ||
|
ee0da63862 | ||
|
971f14bb55 | ||
|
9a1733ac99 | ||
|
c32d62fbd5 | ||
|
1a0d12d2ff | ||
|
2a27850914 | ||
|
bfcc4c985d | ||
|
1653d4fb4c | ||
|
79027c4c75 | ||
|
269bb0bfb6 | ||
|
7933d840b3 | ||
|
b875e9377e | ||
|
8c80946121 | ||
|
21d96e261f | ||
|
9c58d23b41 | ||
|
4ae2191392 | ||
|
d62a3ab86b | ||
|
9b7cd1da5a | ||
|
a301f1ecb6 | ||
|
f14639ee00 | ||
|
b527735f6f | ||
|
8cc01c58f3 | ||
|
a1d800a0f0 | ||
|
449899962a | ||
|
dc2030e6f3 | ||
|
ef5a1cd66e | ||
|
11e4ff42ed | ||
|
c71df35b22 | ||
|
345308a9ac | ||
|
75bbcefbec | ||
|
49a6168607 | ||
|
f55ea5a353 | ||
|
30c33d91e1 | ||
|
00b135fb0f | ||
|
5fe9ce8d7b | ||
|
8c04365049 | ||
|
d5b1c3a5bb | ||
|
f038aa61f4 | ||
|
f72c9d39be | ||
|
e6c2d08425 | ||
|
e901e99278 | ||
|
acd2cff747 | ||
|
8f913e696c | ||
|
226d39328c | ||
|
b2ad2f636c | ||
|
18fe7ff8cf | ||
|
077c222a4e | ||
|
2270b6cf95 | ||
|
758b627660 | ||
|
baf7272cfd | ||
|
6641e242af | ||
|
610fcfbf87 | ||
|
dea7f33910 | ||
|
c299e128ab | ||
|
53fa946c75 | ||
|
5d44a071f9 | ||
|
e29e203188 | ||
|
6ead6e08dc | ||
|
7360503234 | ||
|
140c1b1bfa | ||
|
040982f1fd | ||
|
4b0677d10e | ||
|
616751e054 | ||
|
5df957f193 | ||
|
7f9cb1b35a | ||
|
c030771d36 | ||
|
a562395c26 | ||
|
2bcdf68e40 | ||
|
72eaa685d0 | ||
|
899a414cf6 | ||
|
524538eeb8 | ||
|
a184559c21 | ||
|
1c9fd46e98 | ||
|
738941d830 | ||
|
06ab7a4181 | ||
|
6981d94417 | ||
|
dd60cb5b2b | ||
|
1d57e29d56 | ||
|
2d973707f3 | ||
|
cbb937b494 | ||
|
d45ee03122 | ||
|
162e00b243 | ||
|
7a32ba087e | ||
|
801216dfe9 | ||
|
21763c51cd | ||
|
138f8320e9 | ||
|
571ba3392c | ||
|
090ca1a140 | ||
|
6127f2a90c | ||
|
ef9bc791e1 | ||
|
894323becf | ||
|
4c89d31948 |
14
.devcontainer/devcontainer.json
Normal file
14
.devcontainer/devcontainer.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "Rust",
|
||||
"image": "mcr.microsoft.com/devcontainers/rust:0-1-bullseye",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {}
|
||||
},
|
||||
"portsAttributes": {
|
||||
"8080": {
|
||||
"label": "libreddit",
|
||||
"onAutoForward": "notify"
|
||||
}
|
||||
},
|
||||
"postCreateCommand": "cargo build"
|
||||
}
|
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@ -0,0 +1 @@
|
||||
Dockerfile.* linguist-language=Dockerfile
|
2
.github/FUNDING.yml
vendored
Normal file
2
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
liberapay: spike
|
||||
custom: ['https://www.buymeacoffee.com/spikecodes']
|
27
.github/ISSUE_TEMPLATE/bug_report.md
vendored
27
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,24 +1,33 @@
|
||||
---
|
||||
name: Bug report
|
||||
name: 🐛 Bug report
|
||||
about: Create a report to help us improve
|
||||
title: Bug Report | [title]
|
||||
title: '🐛 Bug Report: '
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
## Describe the bug
|
||||
<!--
|
||||
A clear and concise description of what the bug is.
|
||||
-->
|
||||
|
||||
**To reproduce**
|
||||
## Steps to reproduce the bug
|
||||
|
||||
<!--
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
-->
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
## What's the expected behavior?
|
||||
<!--
|
||||
A clear and concise description of what you expected to happen.
|
||||
-->
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
## Additional context / screenshot
|
||||
<!--
|
||||
Add any other context about the problem here.
|
||||
-->
|
||||
|
28
.github/ISSUE_TEMPLATE/feature_parity.md
vendored
Normal file
28
.github/ISSUE_TEMPLATE/feature_parity.md
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
---
|
||||
name: ✨ Feature parity
|
||||
about: Suggest implementing a feature into Libreddit that is found in Reddit.com
|
||||
title: '✨ Feature parity: '
|
||||
labels: feature parity
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## How does this feature work on Reddit?
|
||||
<!--
|
||||
A clear and concise description of what the feature is.
|
||||
-->
|
||||
|
||||
## Describe how this could be implemented into Libreddit
|
||||
<!--
|
||||
A clear and concise description of what you want to happen.
|
||||
-->
|
||||
|
||||
## Describe alternatives you've considered
|
||||
<!--
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
-->
|
||||
|
||||
## Additional context / screenshot
|
||||
<!--
|
||||
Add any other context or screenshots about the feature parity request here.
|
||||
-->
|
30
.github/ISSUE_TEMPLATE/feature_request.md
vendored
30
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,20 +1,28 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: Feature Request | [title]
|
||||
name: 💡 Feature request
|
||||
about: Suggest a feature for Libreddit that is not found in Reddit
|
||||
title: '💡 Feature request: '
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
## Is your feature request related to a problem? Please describe.
|
||||
<!--
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
-->
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
## Describe the feature you would like to be implemented
|
||||
<!--
|
||||
A clear and concise description of what you want to happen.
|
||||
-->
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
## Describe alternatives you've considered
|
||||
<!--
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
-->
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
## Additional context / screenshot
|
||||
<!--
|
||||
Add any other context or screenshots about the feature request here.
|
||||
-->
|
||||
|
58
.github/workflows/main-docker.yml
vendored
Normal file
58
.github/workflows/main-docker.yml
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
name: Docker Build
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
branches:
|
||||
- 'main'
|
||||
- 'master'
|
||||
|
||||
jobs:
|
||||
build-docker:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
config:
|
||||
- { platform: 'linux/amd64', tag: 'latest', dockerfile: 'Dockerfile' }
|
||||
- { platform: 'linux/arm64', tag: 'latest-arm', dockerfile: 'Dockerfile.arm' }
|
||||
- { platform: 'linux/arm/v7', tag: 'latest-armv7', dockerfile: 'Dockerfile.armv7' }
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
platforms: all
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: latest
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Docker Hub Description
|
||||
uses: peter-evans/dockerhub-description@v3
|
||||
if: matrix.config.platform == 'linux/amd64'
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
repository: libreddit/libreddit
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./${{ matrix.config.dockerfile }}
|
||||
platforms: ${{ matrix.config.platform }}
|
||||
push: true
|
||||
tags: libreddit/libreddit:${{ matrix.config.tag }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
78
.github/workflows/main-rust.yml
vendored
Normal file
78
.github/workflows/main-rust.yml
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
name: Rust Build & Publish
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
|
||||
branches:
|
||||
- 'main'
|
||||
- 'master'
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Cache Packages
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
# Building actions
|
||||
- name: Build
|
||||
run: RUSTFLAGS='-C target-feature=+crt-static' cargo build --release --target x86_64-unknown-linux-gnu
|
||||
|
||||
- name: Calculate SHA512 checksum
|
||||
run: sha512sum target/x86_64-unknown-linux-gnu/release/libreddit > libreddit.sha512
|
||||
|
||||
- name: Calculate SHA256 checksum
|
||||
run: sha256sum target/x86_64-unknown-linux-gnu/release/libreddit > libreddit.sha256
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
name: Upload a Build Artifact
|
||||
with:
|
||||
name: libreddit
|
||||
path: |
|
||||
target/x86_64-unknown-linux-gnu/release/libreddit
|
||||
libreddit.sha512
|
||||
libreddit.sha256
|
||||
|
||||
- name: Versions
|
||||
id: version
|
||||
run: echo "VERSION=$(cargo metadata --format-version 1 --no-deps | jq .packages[0].version -r | sed 's/^/v/')" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Publishing actions
|
||||
|
||||
- name: Publish to crates.io
|
||||
if: github.event_name == 'release'
|
||||
run: cargo publish --no-verify --token ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: github.base_ref != 'master' && github.event_name == 'release'
|
||||
with:
|
||||
tag_name: ${{ steps.version.outputs.VERSION }}
|
||||
name: ${{ steps.version.outputs.VERSION }} - ${{ github.event.head_commit.message }}
|
||||
draft: true
|
||||
files: |
|
||||
target/x86_64-unknown-linux-gnu/release/libreddit
|
||||
libreddit.sha512
|
||||
libreddit.sha256
|
||||
body: |
|
||||
- ${{ github.event.head_commit.message }} ${{ github.sha }}
|
||||
generate_release_notes: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
62
.github/workflows/pull-request.yml
vendored
Normal file
62
.github/workflows/pull-request.yml
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
name: Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
- 'master'
|
||||
|
||||
pull_request:
|
||||
branches:
|
||||
- 'main'
|
||||
- 'master'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: cargo test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- name: Run cargo test
|
||||
run: cargo test
|
||||
|
||||
format:
|
||||
name: cargo fmt --all -- --check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain with rustfmt component
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: stable
|
||||
components: rustfmt
|
||||
|
||||
- name: Run cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
clippy:
|
||||
name: cargo clippy -- -D warnings
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain with clippy component
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: stable
|
||||
components: clippy
|
||||
|
||||
- name: Run cargo clippy
|
||||
run: cargo clippy -- -D warnings
|
29
.github/workflows/rust.yml
vendored
29
.github/workflows/rust.yml
vendored
@ -1,29 +0,0 @@
|
||||
name: Rust
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
branches: [master]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Cache Packages
|
||||
uses: Swatinem/rust-cache@v1.0.1
|
||||
|
||||
- name: Build
|
||||
run: cargo build --release
|
||||
|
||||
- uses: actions/upload-artifact@v2.2.1
|
||||
name: Upload a Build Artifact
|
||||
with:
|
||||
name: libreddit
|
||||
path: target/release/libreddit
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -1 +1,4 @@
|
||||
/target
|
||||
|
||||
# Idea Files
|
||||
.idea/
|
2
.replit
Normal file
2
.replit
Normal file
@ -0,0 +1,2 @@
|
||||
run = "while :; do set -ex; nix-env -iA nixpkgs.unzip; curl -o./libreddit.zip -fsSL -- https://nightly.link/libreddit/libreddit/workflows/main-rust/master/libreddit.zip; unzip -n libreddit.zip; mv target/x86_64-unknown-linux-gnu/release/libreddit .; chmod +x libreddit; set +e; ./libreddit -H 63115200; sleep 1; done"
|
||||
language = "bash"
|
96
CREDITS
Normal file
96
CREDITS
Normal file
@ -0,0 +1,96 @@
|
||||
5trongthany <65565784+5trongthany@users.noreply.github.com>
|
||||
674Y3r <87250374+674Y3r@users.noreply.github.com>
|
||||
accountForIssues <52367365+accountForIssues@users.noreply.github.com>
|
||||
Adrian Lebioda <adrianlebioda@gmail.com>
|
||||
alefvanoon <53198048+alefvanoon@users.noreply.github.com>
|
||||
Alexandre Iooss <erdnaxe@crans.org>
|
||||
alyaeanyx <alexandra.hollmeier@mailbox.org>
|
||||
AndreVuillemot160 <84594011+AndreVuillemot160@users.noreply.github.com>
|
||||
Andrew Kaufman <57281817+andrew-kaufman@users.noreply.github.com>
|
||||
Artemis <51862164+artemislena@users.noreply.github.com>
|
||||
arthomnix <35371030+arthomnix@users.noreply.github.com>
|
||||
Arya K <73596856+gi-yt@users.noreply.github.com>
|
||||
Austin Huang <im@austinhuang.me>
|
||||
Basti <pred2k@users.noreply.github.com>
|
||||
Ben Smith <37027883+smithbm2316@users.noreply.github.com>
|
||||
BobIsMyManager <ahoumatt@yahoo.com>
|
||||
curlpipe <11898833+curlpipe@users.noreply.github.com>
|
||||
dacousb <53299044+dacousb@users.noreply.github.com>
|
||||
Daniel Valentine <Daniel-Valentine@users.noreply.github.com>
|
||||
Daniel Valentine <daniel@vielle.ws>
|
||||
dbrennand <52419383+dbrennand@users.noreply.github.com>
|
||||
dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
|
||||
Diego Magdaleno <38844659+DiegoMagdaleno@users.noreply.github.com>
|
||||
domve <domve@posteo.net>
|
||||
Dyras <jevwmguf@duck.com>
|
||||
Edward <101938856+EdwardLangdon@users.noreply.github.com>
|
||||
elliot <75391956+ellieeet123@users.noreply.github.com>
|
||||
erdnaxe <erdnaxe@users.noreply.github.com>
|
||||
Esmail EL BoB <github.defilable@simplelogin.co>
|
||||
FireMasterK <20838718+FireMasterK@users.noreply.github.com>
|
||||
George Roubos <cowkingdom@hotmail.com>
|
||||
git-bruh <e817509a-8ee9-4332-b0ad-3a6bdf9ab63f@aleeas.com>
|
||||
gmnsii <95436780+gmnsii@users.noreply.github.com>
|
||||
guaddy <67671414+guaddy@users.noreply.github.com>
|
||||
Harsh Mishra <erbeusgriffincasper@gmail.com>
|
||||
igna <igna@intent.cool>
|
||||
imabritishcow <bcow@protonmail.com>
|
||||
Johannes Schleifenbaum <johannes@js-webcoding.de>
|
||||
Josiah <70736638+fres7h@users.noreply.github.com>
|
||||
JPyke3 <pyke.jacob1@gmail.com>
|
||||
Kavin <20838718+FireMasterK@users.noreply.github.com>
|
||||
Kazi <kzshantonu@users.noreply.github.com>
|
||||
Kieran <42723993+EnderDev@users.noreply.github.com>
|
||||
Kieran <kieran@dothq.co>
|
||||
Kyle Roth <kylrth@gmail.com>
|
||||
laazyCmd <laazy.pr00gramming@protonmail.com>
|
||||
Laurențiu Nicola <lnicola@users.noreply.github.com>
|
||||
Lena <102762572+MarshDeer@users.noreply.github.com>
|
||||
Macic <46872282+Macic-Dev@users.noreply.github.com>
|
||||
Mario A <10923513+Midblyte@users.noreply.github.com>
|
||||
Matthew Crossman <matt@crossman.page>
|
||||
Matthew E <matt@matthew.science>
|
||||
Matthew Esposito <matt@matthew.science>
|
||||
Mennaruuk <52135169+Mennaruuk@users.noreply.github.com>
|
||||
mikupls <93015331+mikupls@users.noreply.github.com>
|
||||
Nainar <nainar.mb@gmail.com>
|
||||
Nathan Moos <moosingin3space@gmail.com>
|
||||
Nicholas Christopher <nchristopher@tuta.io>
|
||||
Nick Lowery <ClockVapor@users.noreply.github.com>
|
||||
Nico <github@dr460nf1r3.org>
|
||||
NKIPSC <15067635+NKIPSC@users.noreply.github.com>
|
||||
o69mar <119129086+o69mar@users.noreply.github.com>
|
||||
obeho <71698631+obeho@users.noreply.github.com>
|
||||
obscurity <z@x4.pm>
|
||||
Om G <34579088+OxyMagnesium@users.noreply.github.com>
|
||||
pin <90570748+0323pin@users.noreply.github.com>
|
||||
potatoesAreGod <118043038+potatoesAreGod@users.noreply.github.com>
|
||||
RiversideRocks <59586759+RiversideRocks@users.noreply.github.com>
|
||||
robin <8597693+robrobinbin@users.noreply.github.com>
|
||||
Robin <8597693+robrobinbin@users.noreply.github.com>
|
||||
robrobinbin <>
|
||||
robrobinbin <8597693+robrobinbin@users.noreply.github.com>
|
||||
robrobinbin <robindepril@gmail.com>
|
||||
Ruben Elshof <15641671+rubenelshof@users.noreply.github.com>
|
||||
Rupert Angermeier <rangermeier@users.noreply.github.com>
|
||||
Scoder12 <34356756+Scoder12@users.noreply.github.com>
|
||||
Slayer <51095261+GhostSlayer@users.noreply.github.com>
|
||||
Soheb <somoso@users.noreply.github.com>
|
||||
somini <somini@users.noreply.github.com>
|
||||
somoso <github@soheb.anonaddy.com>
|
||||
Spenser Black <spenserblack01@gmail.com>
|
||||
Spike <19519553+spikecodes@users.noreply.github.com>
|
||||
spikecodes <19519553+spikecodes@users.noreply.github.com>
|
||||
sybenx <syb@duck.com>
|
||||
TheCultLeader666 <65368815+TheCultLeader666@users.noreply.github.com>
|
||||
TheFrenchGhosty <47571719+TheFrenchGhosty@users.noreply.github.com>
|
||||
The TwilightBlood <hwengerstickel@protonmail.com>
|
||||
tirz <36501933+tirz@users.noreply.github.com>
|
||||
Tokarak <63452145+Tokarak@users.noreply.github.com>
|
||||
Tsvetomir Bonev <invakid404@riseup.net>
|
||||
Vladislav Nepogodin <nepogodin.vlad@gmail.com>
|
||||
Walkx <walkxnl@gmail.com>
|
||||
Wichai <1482605+Chengings@users.noreply.github.com>
|
||||
wsy2220 <wsy@dogben.com>
|
||||
xatier <xatierlike@gmail.com>
|
||||
Zach <72994911+zachjmurphy@users.noreply.github.com>
|
2351
Cargo.lock
generated
2351
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
45
Cargo.toml
45
Cargo.toml
@ -3,18 +3,39 @@ name = "libreddit"
|
||||
description = " Alternative private front-end to Reddit"
|
||||
license = "AGPL-3.0"
|
||||
repository = "https://github.com/spikecodes/libreddit"
|
||||
version = "0.2.7"
|
||||
version = "0.30.1"
|
||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.13.0"
|
||||
actix-web = { version = "3.3.2", features = ["rustls"] }
|
||||
askama = "0.10.5"
|
||||
ureq = "2.0.1"
|
||||
serde = { version = "1.0.118", default_features = false, features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
async-recursion = "0.3.1"
|
||||
url = "2.2.0"
|
||||
regex = "1.4.2"
|
||||
time = "0.2.23"
|
||||
askama = { version = "0.12.0", default-features = false }
|
||||
cached = "0.43.0"
|
||||
clap = { version = "4.1.1", default-features = false, features = ["std", "env"] }
|
||||
regex = "1.7.1"
|
||||
serde = { version = "1.0.152", features = ["derive"] }
|
||||
cookie = "0.17.0"
|
||||
futures-lite = "1.12.0"
|
||||
hyper = { version = "0.14.23", features = ["full"] }
|
||||
hyper-rustls = "0.24.0"
|
||||
percent-encoding = "2.2.0"
|
||||
route-recognizer = "0.3.1"
|
||||
serde_json = "1.0.91"
|
||||
tokio = { version = "1.24.2", features = ["full"] }
|
||||
time = { version = "0.3.17", features = ["local-offset"] }
|
||||
url = "2.3.1"
|
||||
rust-embed = { version = "6.4.2", features = ["include-exclude"] }
|
||||
libflate = "1.2.0"
|
||||
brotli = { version = "3.3.4", features = ["std"] }
|
||||
toml = "0.7.4"
|
||||
once_cell = "1.17.0"
|
||||
serde_yaml = "0.9.16"
|
||||
build_html = "2.2.0"
|
||||
|
||||
[dev-dependencies]
|
||||
lipsum = "0.9.0"
|
||||
sealed_test = "1.0.0"
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
lto = true
|
||||
strip = "symbols"
|
||||
|
39
Dockerfile
39
Dockerfile
@ -1,9 +1,36 @@
|
||||
FROM rust:alpine as builder
|
||||
WORKDIR /usr/src/libreddit
|
||||
COPY . .
|
||||
RUN apk add --no-cache g++ openssl-dev
|
||||
RUN cargo install --path .
|
||||
####################################################################################################
|
||||
## Builder
|
||||
####################################################################################################
|
||||
FROM rust:alpine AS builder
|
||||
|
||||
RUN apk add --no-cache musl-dev
|
||||
|
||||
WORKDIR /libreddit
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN cargo build --target x86_64-unknown-linux-musl --release
|
||||
|
||||
####################################################################################################
|
||||
## Final image
|
||||
####################################################################################################
|
||||
FROM alpine:latest
|
||||
COPY --from=builder /usr/local/cargo/bin/libreddit /usr/local/bin/libreddit
|
||||
|
||||
# Import ca-certificates from builder
|
||||
COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
||||
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||
|
||||
# Copy our build
|
||||
COPY --from=builder /libreddit/target/x86_64-unknown-linux-musl/release/libreddit /usr/local/bin/libreddit
|
||||
|
||||
# Use an unprivileged user.
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||
USER libreddit
|
||||
|
||||
# Tell Docker to expose port 8080
|
||||
EXPOSE 8080
|
||||
|
||||
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||
|
||||
CMD ["libreddit"]
|
45
Dockerfile.arm
Normal file
45
Dockerfile.arm
Normal file
@ -0,0 +1,45 @@
|
||||
####################################################################################################
|
||||
## Builder
|
||||
####################################################################################################
|
||||
FROM rust:alpine AS builder
|
||||
|
||||
RUN apk add --no-cache g++ git
|
||||
|
||||
WORKDIR /usr/src/libreddit
|
||||
|
||||
# cache dependencies in their own layer
|
||||
COPY Cargo.lock Cargo.toml .
|
||||
RUN mkdir src && echo "fn main() {}" > src/main.rs && cargo install --config net.git-fetch-with-cli=true --path . && rm -rf ./src
|
||||
|
||||
COPY . .
|
||||
|
||||
# net.git-fetch-with-cli is specified in order to prevent a potential OOM kill
|
||||
# in low memory environments. See:
|
||||
# https://users.rust-lang.org/t/cargo-uses-too-much-memory-being-run-in-qemu/76531
|
||||
# This is tracked under issue #641. This also requires us to install git in the
|
||||
# builder.
|
||||
RUN cargo install --config net.git-fetch-with-cli=true --path .
|
||||
|
||||
####################################################################################################
|
||||
## Final image
|
||||
####################################################################################################
|
||||
FROM alpine:latest
|
||||
|
||||
# Import ca-certificates from builder
|
||||
COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
||||
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||
|
||||
# Copy our build
|
||||
COPY --from=builder /usr/local/cargo/bin/libreddit /usr/local/bin/libreddit
|
||||
|
||||
# Use an unprivileged user.
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||
USER libreddit
|
||||
|
||||
# Tell Docker to expose port 8080
|
||||
EXPOSE 8080
|
||||
|
||||
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||
|
||||
CMD ["libreddit"]
|
43
Dockerfile.armv7
Normal file
43
Dockerfile.armv7
Normal file
@ -0,0 +1,43 @@
|
||||
####################################################################################################
|
||||
## Builder
|
||||
####################################################################################################
|
||||
FROM --platform=$BUILDPLATFORM rust:slim AS builder
|
||||
|
||||
ENV CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER=arm-linux-gnueabihf-gcc
|
||||
ENV CC_armv7_unknown_linux_musleabihf=arm-linux-gnueabihf-gcc
|
||||
|
||||
RUN apt-get update && apt-get -y install gcc-arm-linux-gnueabihf \
|
||||
binutils-arm-linux-gnueabihf \
|
||||
musl-tools
|
||||
|
||||
RUN rustup target add armv7-unknown-linux-musleabihf
|
||||
|
||||
WORKDIR /libreddit
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN cargo build --target armv7-unknown-linux-musleabihf --release
|
||||
|
||||
####################################################################################################
|
||||
## Final image
|
||||
####################################################################################################
|
||||
FROM alpine:latest
|
||||
|
||||
# Import ca-certificates from builder
|
||||
COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
||||
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||
|
||||
# Copy our build
|
||||
COPY --from=builder /libreddit/target/armv7-unknown-linux-musleabihf/release/libreddit /usr/local/bin/libreddit
|
||||
|
||||
# Use an unprivileged user.
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||
USER libreddit
|
||||
|
||||
# Tell Docker to expose port 8080
|
||||
EXPOSE 8080
|
||||
|
||||
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||
|
||||
CMD ["libreddit"]
|
270
README.md
270
README.md
@ -2,95 +2,90 @@
|
||||
|
||||
> An alternative private front-end to Reddit
|
||||
|
||||
Libre + Reddit = [Libreddit](https://libredd.it)
|
||||
# ➡️ Discontinued. Use [Redlib](https://github.com/redlib-org/redlib) instead.
|
||||
|
||||
- 🚀 Fast: written in Rust for blazing fast speeds and safety
|
||||
- ☁️ Light: no JavaScript, no ads, no tracking
|
||||
## As of July 12th, 2023, Libreddit is currently not operational as Reddit's API changes, that were designed to kill third-party apps and content scrapers who don't pay [large fees](https://www.theverge.com/2023/5/31/23743993/reddit-apollo-client-api-cost), went into effect. [Read the full announcement here.](https://github.com/libreddit/libreddit/issues/840)
|
||||
|
||||
![screenshot](https://i.ibb.co/QYbqTQt/libreddit-rust.png)
|
||||
|
||||
---
|
||||
|
||||
**10-second pitch:** Libreddit is a portmanteau of "libre" (meaning freedom) and "Reddit". It is a private front-end like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libreddit.spike.codes/r/unpopularopinion) without being [tracked](#reddit).
|
||||
|
||||
- 🚀 Fast: written in Rust for blazing-fast speeds and memory safety
|
||||
- ☁️ Light: no JavaScript, no ads, no tracking, no bloat
|
||||
- 🕵 Private: all requests are proxied through the server, including media
|
||||
- 🔒 Secure: strong [Content Security Policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP) prevents browser requests to Reddit
|
||||
|
||||
Like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libredd.it/r/unpopularopinion) without being [tracked](#reddit).
|
||||
---
|
||||
|
||||
## Contents
|
||||
- [Screenshot](#screenshot)
|
||||
- [Instances](#instances)
|
||||
- [About](#about)
|
||||
- [Elsewhere](#elsewhere)
|
||||
- [Info](#info)
|
||||
- [Teddit Comparison](#how-does-it-compare-to-teddit)
|
||||
- [Comparison](#comparison)
|
||||
- [Speed](#speed)
|
||||
- [Privacy](#privacy)
|
||||
- [Installation](#installation)
|
||||
- [Cargo](#a-cargo)
|
||||
- [Docker](#b-docker)
|
||||
- [AUR](#c-aur)
|
||||
- [GitHub Releases](#d-github-releases)
|
||||
- [Repl.it](#e-replit)
|
||||
- Developing
|
||||
- [Deployment](#deployment)
|
||||
- [Building](#building)
|
||||
I appreciate any donations! Your support allows me to continue developing Libreddit.
|
||||
|
||||
## Screenshot
|
||||
<a href="https://www.buymeacoffee.com/spikecodes" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 40px" ></a>
|
||||
<a href="https://liberapay.com/spike/donate"><img alt="Donate using Liberapay" src="https://liberapay.com/assets/widgets/donate.svg" style="height: 40px"></a>
|
||||
|
||||
![](https://i.ibb.co/6mXqb4G/libreddit-rust.png)
|
||||
|
||||
## Instances
|
||||
**Bitcoin:** `bc1qwyxjnafpu3gypcpgs025cw9wa7ryudtecmwa6y`
|
||||
|
||||
Feel free to [open an issue](https://github.com/spikecodes/libreddit/issues/new) to have your [selfhosted instance](#deployment) listed here!
|
||||
**Monero:** `45FJrEuFPtG2o7QZz2Nps77TbHD4sPqxViwbdyV9A6ktfHiWs47UngG5zXPcLoDXAc8taeuBgeNjfeprwgeXYXhN3C9tVSR`
|
||||
|
||||
| Website | Country | Cloudflare |
|
||||
|-|-|-|
|
||||
| [libredd.it](https://libredd.it) (official) | 🇺🇸 US | |
|
||||
| [libreddit.spike.codes](https://libreddit.spike.codes) (official) | 🇺🇸 US | |
|
||||
| [libreddit.dothq.co](https://libreddit.dothq.co) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.insanity.wtf](https://libreddit.insanity.wtf) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.kavin.rocks](https://libreddit.kavin.rocks) | 🇮🇳 IN | ✅ |
|
||||
| [spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion](http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion) | 🇮🇳 IN | |
|
||||
---
|
||||
|
||||
A checkmark in the "Cloudflare" category here refers to the use of the reverse proxy, [Cloudflare](https://cloudflare). The checkmark will not be listed for a site which uses Cloudflare DNS but rather the proxying service which grants Cloudflare the ability to monitor traffic to the website.
|
||||
# Instances
|
||||
|
||||
## About
|
||||
🔗 **Want to automatically redirect Reddit links to Libreddit? Use [LibRedirect](https://github.com/libredirect/libredirect) or [Privacy Redirect](https://github.com/SimonBrazell/privacy-redirect)!**
|
||||
|
||||
### Elsewhere
|
||||
Find Libreddit on...
|
||||
- 💬 Matrix: [#libreddit:kde.org](https://matrix.to/#/#libreddit:matrix.org)
|
||||
- 🐋 Docker: [spikecodes/libreddit](https://hub.docker.com/r/spikecodes/libreddit)
|
||||
- :octocat: GitHub: [spikecodes/libreddit](https://github.com/spikecodes/libreddit)
|
||||
- 🦊 GitLab: [spikecodes/libreddit](https://gitlab.com/spikecodes/libreddit)
|
||||
[Follow this link](https://github.com/libreddit/libreddit-instances/blob/master/instances.md) for an up-to-date table of instances in Markdown format. This list is also available as [a machine-readable JSON](https://github.com/libreddit/libreddit-instances/blob/master/instances.json).
|
||||
|
||||
### Info
|
||||
Both files are part of the [libreddit-instances](https://github.com/libreddit/libreddit-instances) repository. To contribute your [self-hosted instance](#deployment) to the list, see the [libreddit-instances README](https://github.com/libreddit/libreddit-instances/blob/master/README.md).
|
||||
|
||||
---
|
||||
|
||||
# About
|
||||
|
||||
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/libreddit/libreddit), :octocat: [GitHub](https://github.com/libreddit/libreddit), and 🦊 [GitLab](https://gitlab.com/libreddit/libreddit).
|
||||
|
||||
## Built with
|
||||
|
||||
- [Rust](https://www.rust-lang.org/) - Programming language
|
||||
- [Hyper](https://github.com/hyperium/hyper) - HTTP server and client
|
||||
- [Askama](https://github.com/djc/askama) - Templating engine
|
||||
- [Rustls](https://github.com/ctz/rustls) - TLS library
|
||||
|
||||
## Info
|
||||
Libreddit hopes to provide an easier way to browse Reddit, without the ads, trackers, and bloat. Libreddit was inspired by other alternative front-ends to popular services such as [Invidious](https://github.com/iv-org/invidious) for YouTube, [Nitter](https://github.com/zedeus/nitter) for Twitter, and [Bibliogram](https://sr.ht/~cadence/bibliogram/) for Instagram.
|
||||
|
||||
Libreddit currently implements most of Reddit's (signed-out) functionalities but still lacks [a few features](https://github.com/spikecodes/libreddit/issues).
|
||||
Libreddit currently implements most of Reddit's (signed-out) functionalities but still lacks [a few features](https://github.com/libreddit/libreddit/issues).
|
||||
|
||||
### How does it compare to Teddit?
|
||||
## How does it compare to Teddit?
|
||||
|
||||
Teddit is another awesome open source project designed to provide an alternative frontend to Reddit. There is no connection between the two and you're welcome to use whichever one you favor. Competition fosters innovation and Teddit's release has motivated me to build Libreddit into an even more polished product.
|
||||
Teddit is another awesome open source project designed to provide an alternative frontend to Reddit. There is no connection between the two, and you're welcome to use whichever one you favor. Competition fosters innovation and Teddit's release has motivated me to build Libreddit into an even more polished product.
|
||||
|
||||
If you are looking to compare, the biggest differences I have noticed are:
|
||||
- Libreddit is themed around Reddit's redesign whereas Teddit appears to stick much closer to Reddit's old design. This may suit some users better as design is always subjective.
|
||||
- Libreddit is written in [Rust](https://www.rust-lang.org) for speed and memory safety. It uses [Actix Web](https://actix.rs), which was [benchmarked as the fastest web server for single queries](https://www.techempower.com/benchmarks/#hw=ph&test=db).
|
||||
- Libreddit is written in [Rust](https://www.rust-lang.org) for speed and memory safety. It uses [Hyper](https://hyper.rs), a speedy and lightweight HTTP server/client implementation.
|
||||
|
||||
## Comparison
|
||||
---
|
||||
|
||||
# Comparison
|
||||
|
||||
This section outlines how Libreddit compares to Reddit.
|
||||
|
||||
### Speed
|
||||
## Speed
|
||||
|
||||
Lasted tested December 21, 2020.
|
||||
Lasted tested Nov 11, 2022.
|
||||
|
||||
Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdotdevsite.appspot.com/lh/html?url=https%3A%2F%2Flibredd.it), [Reddit Report](https://lighthouse-dot-webdotdevsite.appspot.com/lh/html?url=https%3A%2F%2Fwww.reddit.com%2F)).
|
||||
Results from Google PageSpeed Insights ([Libreddit Report](https://pagespeed.web.dev/report?url=https%3A%2F%2Flibreddit.spike.codes%2F), [Reddit Report](https://pagespeed.web.dev/report?url=https://www.reddit.com)).
|
||||
|
||||
| | Libreddit | Reddit |
|
||||
|---------------------|---------------|-----------|
|
||||
| Requests | 22 | 70 |
|
||||
| Resource Size | 135 KiB | 2,222 KiB |
|
||||
| Time to Interactive | **1.7 s** | **11.5 s**|
|
||||
| | Libreddit | Reddit |
|
||||
|------------------------|-------------|-----------|
|
||||
| Requests | 60 | 83 |
|
||||
| Speed Index | 2.0s | 10.4s |
|
||||
| Time to Interactive | **2.8s** | **12.4s** |
|
||||
|
||||
### Privacy
|
||||
## Privacy
|
||||
|
||||
#### Reddit
|
||||
### Reddit
|
||||
|
||||
**Logging:** According to Reddit's [privacy policy](https://www.redditinc.com/policies/privacy-policy), they "may [automatically] log information" including:
|
||||
- IP address
|
||||
@ -105,7 +100,7 @@ Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdot
|
||||
- The requested URL
|
||||
- Search terms
|
||||
|
||||
**Location:** The same privacy policy goes on to describe location data may be collected through the use of:
|
||||
**Location:** The same privacy policy goes on to describe that location data may be collected through the use of:
|
||||
- GPS (consensual)
|
||||
- Bluetooth (consensual)
|
||||
- Content associated with a location (consensual)
|
||||
@ -119,21 +114,31 @@ Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdot
|
||||
- Third-Party Cookies
|
||||
- Third-Party Site
|
||||
|
||||
#### Libreddit
|
||||
### Libreddit
|
||||
|
||||
For transparency, I hope to describe all the ways Libreddit handles user privacy.
|
||||
|
||||
**Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs nothing. When debugging (running from source without `--release`), Libreddit logs post IDs and URL paths fetched to aid with troubleshooting.
|
||||
#### Server
|
||||
|
||||
**DNS:** Both official domains (`libredd.it` and `libreddit.spike.codes`) use Cloudflare as the DNS resolver. Though, the sites are not proxied through Cloudflare meaning Cloudflare doesn't have access to user traffic.
|
||||
* **Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs nothing. When debugging (running from source without `--release`), Libreddit logs post IDs fetched to aid with troubleshooting.
|
||||
|
||||
**Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libredd.it/settings). This is not a cross-site cookie and the cookie holds no personal data, only a value of the possible layout.
|
||||
* **Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libreddit.spike.codes/settings). These are not cross-site cookies and the cookies hold no personal data.
|
||||
|
||||
**Hosting:** The official instances (`libredd.it` and `libreddit.spike.codes`) are hosted on [Repl.it](https://repl.it/) which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models and therefore, selfhosting and browsing through Tor are welcomed.
|
||||
#### Official instance (libreddit.spike.codes)
|
||||
|
||||
## Installation
|
||||
The official instance is hosted at https://libreddit.spike.codes.
|
||||
|
||||
### A) Cargo
|
||||
* **Server:** The official instance runs a production binary, and thus logs nothing.
|
||||
|
||||
* **DNS:** The domain for the official instance uses Cloudflare as the DNS resolver. However, this site is not proxied through Cloudflare, and thus Cloudflare doesn't have access to user traffic.
|
||||
|
||||
* **Hosting:** The official instance is hosted on [Replit](https://replit.com/), which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models, and therefore, self-hosting, using unofficial instances, and browsing through Tor are welcomed.
|
||||
|
||||
---
|
||||
|
||||
# Installation
|
||||
|
||||
## 1) Cargo
|
||||
|
||||
Make sure Rust stable is installed along with `cargo`, Rust's package manager.
|
||||
|
||||
@ -141,59 +146,150 @@ Make sure Rust stable is installed along with `cargo`, Rust's package manager.
|
||||
cargo install libreddit
|
||||
```
|
||||
|
||||
### B) Docker
|
||||
## 2) Docker
|
||||
|
||||
Deploy the Docker image of Libreddit:
|
||||
Deploy the [Docker image](https://hub.docker.com/r/libreddit/libreddit) of Libreddit:
|
||||
```
|
||||
docker run -d --name libreddit -p 8080:8080 spikecodes/libreddit
|
||||
docker pull libreddit/libreddit
|
||||
docker run -d --name libreddit -p 8080:8080 libreddit/libreddit
|
||||
```
|
||||
|
||||
Deploy using a different port (in this case, port 80):
|
||||
```
|
||||
docker run -d --name libreddit -p 80:8080 spikecodes/libreddit
|
||||
docker pull libreddit/libreddit
|
||||
docker run -d --name libreddit -p 80:8080 libreddit/libreddit
|
||||
```
|
||||
|
||||
### C) AUR
|
||||
To deploy on `arm64` platforms, simply replace `libreddit/libreddit` in the commands above with `libreddit/libreddit:arm`.
|
||||
|
||||
To deploy on `armv7` platforms, simply replace `libreddit/libreddit` in the commands above with `libreddit/libreddit:armv7`.
|
||||
|
||||
## 3) AUR
|
||||
|
||||
For ArchLinux users, Libreddit is available from the AUR as [`libreddit-git`](https://aur.archlinux.org/packages/libreddit-git).
|
||||
|
||||
Install:
|
||||
```
|
||||
yay -S libreddit-git
|
||||
```
|
||||
## 4) NetBSD/pkgsrc
|
||||
|
||||
### D) GitHub Releases
|
||||
For NetBSD users, Libreddit is available from the official repositories.
|
||||
|
||||
If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/spikecodes/libreddit/releases/latest).
|
||||
Currently, Libreddit does not have Windows or macOS binaries but those will be available soon.
|
||||
```
|
||||
pkgin install libreddit
|
||||
```
|
||||
|
||||
### E) Repl.it
|
||||
Or, if you prefer to build from source
|
||||
|
||||
**Note:** Repl.it is a free option but they are *not* private and are monitor server usage to prevent abuse. If you really need a free and easy setup, this method may work best for you.
|
||||
```
|
||||
cd /usr/pkgsrc/libreddit
|
||||
make install
|
||||
```
|
||||
|
||||
1. Create a Repl.it account (see note above)
|
||||
2. Visit [the official Repl](https://repl.it/@spikethecoder/libreddit) and fork it
|
||||
3. Hit the run button to download the latest Libreddit version and start it
|
||||
## 5) GitHub Releases
|
||||
|
||||
In the web preview (defaults to top right), you should see your instance hosted where you can assign a [custom domain](https://docs.repl.it/repls/web-hosting#custom-domains).
|
||||
If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/libreddit/libreddit/releases/latest).
|
||||
|
||||
## Deployment
|
||||
## 6) Replit/Heroku/Glitch
|
||||
|
||||
Once installed, deploy Libreddit (unless you're using Docker) by running:
|
||||
> **Warning**
|
||||
> These are free hosting options, but they are *not* private and will monitor server usage to prevent abuse. If you need a free and easy setup, this method may work best for you.
|
||||
|
||||
<a href="https://repl.it/github/libreddit/libreddit"><img src="https://repl.it/badge/github/libreddit/libreddit" alt="Run on Repl.it" height="32" /></a>
|
||||
[![Deploy](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy?template=https://github.com/libreddit/libreddit)
|
||||
[![Remix on Glitch](https://cdn.glitch.com/2703baf2-b643-4da7-ab91-7ee2a2d00b5b%2Fremix-button-v2.svg)](https://glitch.com/edit/#!/remix/libreddit)
|
||||
|
||||
---
|
||||
|
||||
# Deployment
|
||||
|
||||
Once installed, deploy Libreddit to `0.0.0.0:8080` by running:
|
||||
|
||||
```
|
||||
libreddit
|
||||
```
|
||||
|
||||
Specify a custom address for the server by passing the `-a` or `--address` argument:
|
||||
## Instance settings
|
||||
|
||||
Assign a default value for each instance-specific setting by passing environment variables to Libreddit in the format `LIBREDDIT_{X}`. Replace `{X}` with the setting name (see list below) in capital letters.
|
||||
|
||||
| Name | Possible values | Default value | Description |
|
||||
|---------------------------|-----------------|------------------|-----------------------------------------------------------------------------------------------------------|
|
||||
| `SFW_ONLY` | `["on", "off"]` | `off` | Enables SFW-only mode for the instance, i.e. all NSFW content is filtered. |
|
||||
| `BANNER` | String | (empty) | Allows the server to set a banner to be displayed. Currently this is displayed on the instance info page. |
|
||||
| `ROBOTS_DISABLE_INDEXING` | `["on", "off"]` | `off` | Disables indexing of the instance by search engines. |
|
||||
| `PUSHSHIFT_FRONTEND` | String | `www.unddit.com` | Allows the server to set the Pushshift frontend to be used with "removed" links. |
|
||||
|
||||
## Default User Settings
|
||||
|
||||
Assign a default value for each user-modifiable setting by passing environment variables to Libreddit in the format `LIBREDDIT_DEFAULT_{Y}`. Replace `{Y}` with the setting name (see list below) in capital letters.
|
||||
|
||||
| Name | Possible values | Default value |
|
||||
|-------------------------------------|------------------------------------------------------------------------------------------------------------------------------------|---------------|
|
||||
| `THEME` | `["system", "light", "dark", "black", "dracula", "nord", "laserwave", "violet", "gold", "rosebox", "gruvboxdark", "gruvboxlight"]` | `system` |
|
||||
| `FRONT_PAGE` | `["default", "popular", "all"]` | `default` |
|
||||
| `LAYOUT` | `["card", "clean", "compact"]` | `card` |
|
||||
| `WIDE` | `["on", "off"]` | `off` |
|
||||
| `POST_SORT` | `["hot", "new", "top", "rising", "controversial"]` | `hot` |
|
||||
| `COMMENT_SORT` | `["confidence", "top", "new", "controversial", "old"]` | `confidence` |
|
||||
| `SHOW_NSFW` | `["on", "off"]` | `off` |
|
||||
| `BLUR_NSFW` | `["on", "off"]` | `off` |
|
||||
| `USE_HLS` | `["on", "off"]` | `off` |
|
||||
| `HIDE_HLS_NOTIFICATION` | `["on", "off"]` | `off` |
|
||||
| `AUTOPLAY_VIDEOS` | `["on", "off"]` | `off` |
|
||||
| `SUBSCRIPTIONS` | `+`-delimited list of subreddits (`sub1+sub2+sub3+...`) | _(none)_ |
|
||||
| `HIDE_AWARDS` | `["on", "off"]` | `off` |
|
||||
| `DISABLE_VISIT_REDDIT_CONFIRMATION` | `["on", "off"]` | `off` |
|
||||
|
||||
You can also configure Libreddit with a configuration file. An example `libreddit.toml` can be found below:
|
||||
|
||||
```toml
|
||||
LIBREDDIT_DEFAULT_WIDE = "on"
|
||||
LIBREDDIT_DEFAULT_USE_HLS = "on"
|
||||
```
|
||||
libreddit --address=0.0.0.0:8111
|
||||
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
LIBREDDIT_DEFAULT_SHOW_NSFW=on libreddit
|
||||
```
|
||||
|
||||
```bash
|
||||
LIBREDDIT_DEFAULT_WIDE=on LIBREDDIT_DEFAULT_THEME=dark libreddit -r
|
||||
```
|
||||
|
||||
## Proxying using NGINX
|
||||
|
||||
> **Note**
|
||||
> If you're [proxying Libreddit through an NGINX Reverse Proxy](https://github.com/libreddit/libreddit/issues/122#issuecomment-782226853), add
|
||||
> ```nginx
|
||||
> proxy_http_version 1.1;
|
||||
> ```
|
||||
> to your NGINX configuration file above your `proxy_pass` line.
|
||||
|
||||
## systemd
|
||||
|
||||
You can use the systemd service available in `contrib/libreddit.service`
|
||||
(install it on `/etc/systemd/system/libreddit.service`).
|
||||
|
||||
That service can be optionally configured in terms of environment variables by
|
||||
creating a file in `/etc/libreddit.conf`. Use the `contrib/libreddit.conf` as a
|
||||
template. You can also add the `LIBREDDIT_DEFAULT__{X}` settings explained
|
||||
above.
|
||||
|
||||
When "Proxying using NGINX" where the proxy is on the same machine, you should
|
||||
guarantee nginx waits for this service to start. Edit
|
||||
`/etc/systemd/system/libreddit.service.d/reverse-proxy.conf`:
|
||||
|
||||
```conf
|
||||
[Unit]
|
||||
Before=nginx.service
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
```
|
||||
git clone https://github.com/spikecodes/libreddit
|
||||
git clone https://github.com/libreddit/libreddit
|
||||
cd libreddit
|
||||
cargo run
|
||||
```
|
||||
|
66
app.json
Normal file
66
app.json
Normal file
@ -0,0 +1,66 @@
|
||||
{
|
||||
"name": "Libreddit",
|
||||
"description": "Private front-end for Reddit",
|
||||
"buildpacks": [
|
||||
{
|
||||
"url": "https://github.com/emk/heroku-buildpack-rust"
|
||||
},
|
||||
{
|
||||
"url": "emk/rust"
|
||||
}
|
||||
],
|
||||
"stack": "container",
|
||||
"env": {
|
||||
"LIBREDDIT_DEFAULT_THEME": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_FRONT_PAGE": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_LAYOUT": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_WIDE": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_COMMENT_SORT": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_POST_SORT": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_SHOW_NSFW": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_BLUR_NSFW": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_USE_HLS": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_HIDE_HLS_NOTIFICATION": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_SFW_ONLY": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_HIDE_AWARDS": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_BANNER": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_ROBOTS_DISABLE_INDEXING": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_SUBSCRIPTIONS": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_PUSHSHIFT_FRONTEND": {
|
||||
"required": false
|
||||
}
|
||||
}
|
||||
}
|
24
build.rs
Normal file
24
build.rs
Normal file
@ -0,0 +1,24 @@
|
||||
use std::process::{Command, ExitStatus, Output};
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
use std::os::unix::process::ExitStatusExt;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::os::windows::process::ExitStatusExt;
|
||||
|
||||
fn main() {
|
||||
let output = String::from_utf8(
|
||||
Command::new("git")
|
||||
.args(["rev-parse", "HEAD"])
|
||||
.output()
|
||||
.unwrap_or(Output {
|
||||
stdout: vec![],
|
||||
stderr: vec![],
|
||||
status: ExitStatus::from_raw(0),
|
||||
})
|
||||
.stdout,
|
||||
)
|
||||
.unwrap_or_default();
|
||||
let git_hash = if output == String::default() { "dev".into() } else { output };
|
||||
println!("cargo:rustc-env=GIT_HASH={git_hash}");
|
||||
}
|
16
contrib/libreddit.conf
Normal file
16
contrib/libreddit.conf
Normal file
@ -0,0 +1,16 @@
|
||||
ADDRESS=0.0.0.0
|
||||
PORT=12345
|
||||
#LIBREDDIT_DEFAULT_THEME=default
|
||||
#LIBREDDIT_DEFAULT_FRONT_PAGE=default
|
||||
#LIBREDDIT_DEFAULT_LAYOUT=card
|
||||
#LIBREDDIT_DEFAULT_WIDE=off
|
||||
#LIBREDDIT_DEFAULT_POST_SORT=hot
|
||||
#LIBREDDIT_DEFAULT_COMMENT_SORT=confidence
|
||||
#LIBREDDIT_DEFAULT_SHOW_NSFW=off
|
||||
#LIBREDDIT_DEFAULT_BLUR_NSFW=off
|
||||
#LIBREDDIT_DEFAULT_USE_HLS=off
|
||||
#LIBREDDIT_DEFAULT_HIDE_HLS_NOTIFICATION=off
|
||||
#LIBREDDIT_DEFAULT_AUTOPLAY_VIDEOS=off
|
||||
#LIBREDDIT_DEFAULT_SUBSCRIPTIONS=off (sub1+sub2+sub3)
|
||||
#LIBREDDIT_DEFAULT_HIDE_AWARDS=off
|
||||
#LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION=off
|
37
contrib/libreddit.service
Normal file
37
contrib/libreddit.service
Normal file
@ -0,0 +1,37 @@
|
||||
[Unit]
|
||||
Description=libreddit daemon
|
||||
After=network.service
|
||||
|
||||
[Service]
|
||||
DynamicUser=yes
|
||||
# Default Values
|
||||
#Environment=ADDRESS=0.0.0.0
|
||||
#Environment=PORT=8080
|
||||
# Optional Override
|
||||
EnvironmentFile=-/etc/libreddit.conf
|
||||
ExecStart=/usr/bin/libreddit -a ${ADDRESS} -p ${PORT}
|
||||
|
||||
# Hardening
|
||||
DeviceAllow=
|
||||
LockPersonality=yes
|
||||
MemoryDenyWriteExecute=yes
|
||||
PrivateDevices=yes
|
||||
ProcSubset=pid
|
||||
ProtectClock=yes
|
||||
ProtectControlGroups=yes
|
||||
ProtectHome=yes
|
||||
ProtectHostname=yes
|
||||
ProtectKernelLogs=yes
|
||||
ProtectKernelModules=yes
|
||||
ProtectKernelTunables=yes
|
||||
ProtectProc=invisible
|
||||
RestrictAddressFamilies=AF_INET AF_INET6
|
||||
RestrictNamespaces=yes
|
||||
RestrictRealtime=yes
|
||||
RestrictSUIDSGID=yes
|
||||
SystemCallArchitectures=native
|
||||
SystemCallFilter=@system-service ~@privileged ~@resources
|
||||
UMask=0077
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
24
docker-compose.yml
Normal file
24
docker-compose.yml
Normal file
@ -0,0 +1,24 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
web:
|
||||
build: .
|
||||
restart: always
|
||||
container_name: "libreddit"
|
||||
ports:
|
||||
- 8080:8080
|
||||
user: nobody
|
||||
read_only: true
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
- ALL
|
||||
networks:
|
||||
- libreddit
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "--tries=1", "http://localhost:8080/settings"]
|
||||
interval: 5m
|
||||
timeout: 3s
|
||||
|
||||
networks:
|
||||
libreddit:
|
3
heroku.yml
Normal file
3
heroku.yml
Normal file
@ -0,0 +1,3 @@
|
||||
build:
|
||||
docker:
|
||||
web: Dockerfile
|
15
scripts/gen-credits.sh
Executable file
15
scripts/gen-credits.sh
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This scripts generates the CREDITS file in the repository root, which
|
||||
# contains a list of all contributors ot the Libreddit project.
|
||||
#
|
||||
# We use git-log to surface the names and emails of all authors and committers,
|
||||
# and grep will filter any automated commits due to GitHub.
|
||||
|
||||
set -o pipefail
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/../" || exit 1
|
||||
git --no-pager log --pretty='%an <%ae>%n%cn <%ce>' master \
|
||||
| sort -t'<' -u -k1,1 -k2,2 \
|
||||
| grep -Fv -- 'GitHub <noreply@github.com>' \
|
||||
> CREDITS
|
307
src/client.rs
Normal file
307
src/client.rs
Normal file
@ -0,0 +1,307 @@
|
||||
use cached::proc_macro::cached;
|
||||
use futures_lite::{future::Boxed, FutureExt};
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::{body, body::Buf, client, header, Body, Client, Method, Request, Response, Uri};
|
||||
use hyper_rustls::HttpsConnector;
|
||||
use libflate::gzip;
|
||||
use once_cell::sync::Lazy;
|
||||
use percent_encoding::{percent_encode, CONTROLS};
|
||||
use serde_json::Value;
|
||||
use std::{io, result::Result};
|
||||
|
||||
use crate::dbg_msg;
|
||||
use crate::server::RequestExt;
|
||||
|
||||
const REDDIT_URL_BASE: &str = "https://www.reddit.com";
|
||||
|
||||
static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| {
|
||||
let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http1().build();
|
||||
client::Client::builder().build(https)
|
||||
});
|
||||
|
||||
/// Gets the canonical path for a resource on Reddit. This is accomplished by
|
||||
/// making a `HEAD` request to Reddit at the path given in `path`.
|
||||
///
|
||||
/// This function returns `Ok(Some(path))`, where `path`'s value is identical
|
||||
/// to that of the value of the argument `path`, if Reddit responds to our
|
||||
/// `HEAD` request with a 2xx-family HTTP code. It will also return an
|
||||
/// `Ok(Some(String))` if Reddit responds to our `HEAD` request with a
|
||||
/// `Location` header in the response, and the HTTP code is in the 3xx-family;
|
||||
/// the `String` will contain the path as reported in `Location`. The return
|
||||
/// value is `Ok(None)` if Reddit responded with a 3xx, but did not provide a
|
||||
/// `Location` header. An `Err(String)` is returned if Reddit responds with a
|
||||
/// 429, or if we were unable to decode the value in the `Location` header.
|
||||
#[cached(size = 1024, time = 600, result = true)]
|
||||
pub async fn canonical_path(path: String) -> Result<Option<String>, String> {
|
||||
let res = reddit_head(path.clone(), true).await?;
|
||||
|
||||
if res.status() == 429 {
|
||||
return Err("Too many requests.".to_string());
|
||||
};
|
||||
|
||||
// If Reddit responds with a 2xx, then the path is already canonical.
|
||||
if res.status().to_string().starts_with('2') {
|
||||
return Ok(Some(path));
|
||||
}
|
||||
|
||||
// If Reddit responds with anything other than 3xx (except for the 2xx as
|
||||
// above), return a None.
|
||||
if !res.status().to_string().starts_with('3') {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
Ok(
|
||||
res
|
||||
.headers()
|
||||
.get(header::LOCATION)
|
||||
.map(|val| percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string()),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, String> {
|
||||
let mut url = format!("{}?{}", format, req.uri().query().unwrap_or_default());
|
||||
|
||||
// For each parameter in request
|
||||
for (name, value) in req.params().iter() {
|
||||
// Fill the parameter value in the url
|
||||
url = url.replace(&format!("{{{}}}", name), value);
|
||||
}
|
||||
|
||||
stream(&url, &req).await
|
||||
}
|
||||
|
||||
async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String> {
|
||||
// First parameter is target URL (mandatory).
|
||||
let uri = url.parse::<Uri>().map_err(|_| "Couldn't parse URL".to_string())?;
|
||||
|
||||
// Build the hyper client from the HTTPS connector.
|
||||
let client: client::Client<_, hyper::Body> = CLIENT.clone();
|
||||
|
||||
let mut builder = Request::get(uri);
|
||||
|
||||
// Copy useful headers from original request
|
||||
for &key in &["Range", "If-Modified-Since", "Cache-Control"] {
|
||||
if let Some(value) = req.headers().get(key) {
|
||||
builder = builder.header(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
let stream_request = builder.body(Body::empty()).map_err(|_| "Couldn't build empty body in stream".to_string())?;
|
||||
|
||||
client
|
||||
.request(stream_request)
|
||||
.await
|
||||
.map(|mut res| {
|
||||
let mut rm = |key: &str| res.headers_mut().remove(key);
|
||||
|
||||
rm("access-control-expose-headers");
|
||||
rm("server");
|
||||
rm("vary");
|
||||
rm("etag");
|
||||
rm("x-cdn");
|
||||
rm("x-cdn-client-region");
|
||||
rm("x-cdn-name");
|
||||
rm("x-cdn-server-region");
|
||||
rm("x-reddit-cdn");
|
||||
rm("x-reddit-video-features");
|
||||
rm("Nel");
|
||||
rm("Report-To");
|
||||
|
||||
res
|
||||
})
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
/// Makes a GET request to Reddit at `path`. By default, this will honor HTTP
|
||||
/// 3xx codes Reddit returns and will automatically redirect.
|
||||
fn reddit_get(path: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
request(&Method::GET, path, true, quarantine)
|
||||
}
|
||||
|
||||
/// Makes a HEAD request to Reddit at `path`. This will not follow redirects.
|
||||
fn reddit_head(path: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
request(&Method::HEAD, path, false, quarantine)
|
||||
}
|
||||
|
||||
/// Makes a request to Reddit. If `redirect` is `true`, request_with_redirect
|
||||
/// will recurse on the URL that Reddit provides in the Location HTTP header
|
||||
/// in its response.
|
||||
fn request(method: &'static Method, path: String, redirect: bool, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
// Build Reddit URL from path.
|
||||
let url = format!("{}{}", REDDIT_URL_BASE, path);
|
||||
|
||||
// Construct the hyper client from the HTTPS connector.
|
||||
let client: client::Client<_, hyper::Body> = CLIENT.clone();
|
||||
|
||||
// Build request to Reddit. When making a GET, request gzip compression.
|
||||
// (Reddit doesn't do brotli yet.)
|
||||
let builder = Request::builder()
|
||||
.method(method)
|
||||
.uri(&url)
|
||||
.header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION")))
|
||||
.header("Host", "www.reddit.com")
|
||||
.header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
|
||||
.header("Accept-Encoding", if method == Method::GET { "gzip" } else { "identity" })
|
||||
.header("Accept-Language", "en-US,en;q=0.5")
|
||||
.header("Connection", "keep-alive")
|
||||
.header(
|
||||
"Cookie",
|
||||
if quarantine {
|
||||
"_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D"
|
||||
} else {
|
||||
""
|
||||
},
|
||||
)
|
||||
.body(Body::empty());
|
||||
|
||||
async move {
|
||||
match builder {
|
||||
Ok(req) => match client.request(req).await {
|
||||
Ok(mut response) => {
|
||||
// Reddit may respond with a 3xx. Decide whether or not to
|
||||
// redirect based on caller params.
|
||||
if response.status().to_string().starts_with('3') {
|
||||
if !redirect {
|
||||
return Ok(response);
|
||||
};
|
||||
|
||||
return request(
|
||||
method,
|
||||
response
|
||||
.headers()
|
||||
.get(header::LOCATION)
|
||||
.map(|val| {
|
||||
// We need to make adjustments to the URI
|
||||
// we get back from Reddit. Namely, we
|
||||
// must:
|
||||
//
|
||||
// 1. Remove the authority (e.g.
|
||||
// https://www.reddit.com) that may be
|
||||
// present, so that we recurse on the
|
||||
// path (and query parameters) as
|
||||
// required.
|
||||
//
|
||||
// 2. Percent-encode the path.
|
||||
let new_path = percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string();
|
||||
format!("{}{}raw_json=1", new_path, if new_path.contains('?') { "&" } else { "?" })
|
||||
})
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
true,
|
||||
quarantine,
|
||||
)
|
||||
.await;
|
||||
};
|
||||
|
||||
match response.headers().get(header::CONTENT_ENCODING) {
|
||||
// Content not compressed.
|
||||
None => Ok(response),
|
||||
|
||||
// Content encoded (hopefully with gzip).
|
||||
Some(hdr) => {
|
||||
match hdr.to_str() {
|
||||
Ok(val) => match val {
|
||||
"gzip" => {}
|
||||
"identity" => return Ok(response),
|
||||
_ => return Err("Reddit response was encoded with an unsupported compressor".to_string()),
|
||||
},
|
||||
Err(_) => return Err("Reddit response was invalid".to_string()),
|
||||
}
|
||||
|
||||
// We get here if the body is gzip-compressed.
|
||||
|
||||
// The body must be something that implements
|
||||
// std::io::Read, hence the conversion to
|
||||
// bytes::buf::Buf and then transformation into a
|
||||
// Reader.
|
||||
let mut decompressed: Vec<u8>;
|
||||
{
|
||||
let mut aggregated_body = match body::aggregate(response.body_mut()).await {
|
||||
Ok(b) => b.reader(),
|
||||
Err(e) => return Err(e.to_string()),
|
||||
};
|
||||
|
||||
let mut decoder = match gzip::Decoder::new(&mut aggregated_body) {
|
||||
Ok(decoder) => decoder,
|
||||
Err(e) => return Err(e.to_string()),
|
||||
};
|
||||
|
||||
decompressed = Vec::<u8>::new();
|
||||
if let Err(e) = io::copy(&mut decoder, &mut decompressed) {
|
||||
return Err(e.to_string());
|
||||
};
|
||||
}
|
||||
|
||||
response.headers_mut().remove(header::CONTENT_ENCODING);
|
||||
response.headers_mut().insert(header::CONTENT_LENGTH, decompressed.len().into());
|
||||
*(response.body_mut()) = Body::from(decompressed);
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
dbg_msg!("{} {}: {}", method, path, e);
|
||||
|
||||
Err(e.to_string())
|
||||
}
|
||||
},
|
||||
Err(_) => Err("Post url contains non-ASCII characters".to_string()),
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
// Make a request to a Reddit API and parse the JSON response
|
||||
#[cached(size = 100, time = 30, result = true)]
|
||||
pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
||||
// Closure to quickly build errors
|
||||
let err = |msg: &str, e: String| -> Result<Value, String> {
|
||||
// eprintln!("{} - {}: {}", url, msg, e);
|
||||
Err(format!("{}: {}", msg, e))
|
||||
};
|
||||
|
||||
// Fetch the url...
|
||||
match reddit_get(path.clone(), quarantine).await {
|
||||
Ok(response) => {
|
||||
let status = response.status();
|
||||
|
||||
// asynchronously aggregate the chunks of the body
|
||||
match hyper::body::aggregate(response).await {
|
||||
Ok(body) => {
|
||||
// Parse the response from Reddit as JSON
|
||||
match serde_json::from_reader(body.reader()) {
|
||||
Ok(value) => {
|
||||
let json: Value = value;
|
||||
// If Reddit returned an error
|
||||
if json["error"].is_i64() {
|
||||
Err(
|
||||
json["reason"]
|
||||
.as_str()
|
||||
.unwrap_or_else(|| {
|
||||
json["message"].as_str().unwrap_or_else(|| {
|
||||
eprintln!("{}{} - Error parsing reddit error", REDDIT_URL_BASE, path);
|
||||
"Error parsing reddit error"
|
||||
})
|
||||
})
|
||||
.to_string(),
|
||||
)
|
||||
} else {
|
||||
Ok(json)
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
if status.is_server_error() {
|
||||
Err("Reddit is having issues, check if there's an outage".to_string())
|
||||
} else {
|
||||
err("Failed to parse page JSON data", e.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => err("Failed receiving body from Reddit", e.to_string()),
|
||||
}
|
||||
}
|
||||
Err(e) => err("Couldn't send request to Reddit", e),
|
||||
}
|
||||
}
|
181
src/config.rs
Normal file
181
src/config.rs
Normal file
@ -0,0 +1,181 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{env::var, fs::read_to_string};
|
||||
|
||||
// Waiting for https://github.com/rust-lang/rust/issues/74465 to land, so we
|
||||
// can reduce reliance on once_cell.
|
||||
//
|
||||
// This is the local static that is initialized at runtime (technically at
|
||||
// first request) and contains the instance settings.
|
||||
pub(crate) static CONFIG: Lazy<Config> = Lazy::new(Config::load);
|
||||
|
||||
// This serves as the frontend for the Pushshift API - on removed comments, this URL will
|
||||
// be the base of a link, to display removed content (on another site).
|
||||
pub(crate) const DEFAULT_PUSHSHIFT_FRONTEND: &str = "www.unddit.com";
|
||||
|
||||
/// Stores the configuration parsed from the environment variables and the
|
||||
/// config file. `Config::Default()` contains None for each setting.
|
||||
/// When adding more config settings, add it to `Config::load`,
|
||||
/// `get_setting_from_config`, both below, as well as
|
||||
/// instance_info::InstanceInfo.to_string(), README.md and app.json.
|
||||
#[derive(Default, Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct Config {
|
||||
#[serde(rename = "LIBREDDIT_SFW_ONLY")]
|
||||
pub(crate) sfw_only: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_THEME")]
|
||||
pub(crate) default_theme: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_FRONT_PAGE")]
|
||||
pub(crate) default_front_page: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_LAYOUT")]
|
||||
pub(crate) default_layout: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_WIDE")]
|
||||
pub(crate) default_wide: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_COMMENT_SORT")]
|
||||
pub(crate) default_comment_sort: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_POST_SORT")]
|
||||
pub(crate) default_post_sort: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_SHOW_NSFW")]
|
||||
pub(crate) default_show_nsfw: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_BLUR_NSFW")]
|
||||
pub(crate) default_blur_nsfw: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_USE_HLS")]
|
||||
pub(crate) default_use_hls: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_HIDE_HLS_NOTIFICATION")]
|
||||
pub(crate) default_hide_hls_notification: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_HIDE_AWARDS")]
|
||||
pub(crate) default_hide_awards: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_SUBSCRIPTIONS")]
|
||||
pub(crate) default_subscriptions: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION")]
|
||||
pub(crate) default_disable_visit_reddit_confirmation: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_BANNER")]
|
||||
pub(crate) banner: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_ROBOTS_DISABLE_INDEXING")]
|
||||
pub(crate) robots_disable_indexing: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_PUSHSHIFT_FRONTEND")]
|
||||
pub(crate) pushshift: Option<String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Load the configuration from the environment variables and the config file.
|
||||
/// In the case that there are no environment variables set and there is no
|
||||
/// config file, this function returns a Config that contains all None values.
|
||||
pub fn load() -> Self {
|
||||
// Read from libreddit.toml config file. If for any reason, it fails, the
|
||||
// default `Config` is used (all None values)
|
||||
let config: Config = toml::from_str(&read_to_string("libreddit.toml").unwrap_or_default()).unwrap_or_default();
|
||||
// This function defines the order of preference - first check for
|
||||
// environment variables with "LIBREDDIT", then check the config, then if
|
||||
// both are `None`, return a `None` via the `map_or_else` function
|
||||
let parse = |key: &str| -> Option<String> { var(key).ok().map_or_else(|| get_setting_from_config(key, &config), Some) };
|
||||
|
||||
Self {
|
||||
sfw_only: parse("LIBREDDIT_SFW_ONLY"),
|
||||
default_theme: parse("LIBREDDIT_DEFAULT_THEME"),
|
||||
default_front_page: parse("LIBREDDIT_DEFAULT_FRONT_PAGE"),
|
||||
default_layout: parse("LIBREDDIT_DEFAULT_LAYOUT"),
|
||||
default_post_sort: parse("LIBREDDIT_DEFAULT_POST_SORT"),
|
||||
default_wide: parse("LIBREDDIT_DEFAULT_WIDE"),
|
||||
default_comment_sort: parse("LIBREDDIT_DEFAULT_COMMENT_SORT"),
|
||||
default_show_nsfw: parse("LIBREDDIT_DEFAULT_SHOW_NSFW"),
|
||||
default_blur_nsfw: parse("LIBREDDIT_DEFAULT_BLUR_NSFW"),
|
||||
default_use_hls: parse("LIBREDDIT_DEFAULT_USE_HLS"),
|
||||
default_hide_hls_notification: parse("LIBREDDIT_DEFAULT_HIDE_HLS"),
|
||||
default_hide_awards: parse("LIBREDDIT_DEFAULT_HIDE_AWARDS"),
|
||||
default_subscriptions: parse("LIBREDDIT_DEFAULT_SUBSCRIPTIONS"),
|
||||
default_disable_visit_reddit_confirmation: parse("LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION"),
|
||||
banner: parse("LIBREDDIT_BANNER"),
|
||||
robots_disable_indexing: parse("LIBREDDIT_ROBOTS_DISABLE_INDEXING"),
|
||||
pushshift: parse("LIBREDDIT_PUSHSHIFT_FRONTEND"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_setting_from_config(name: &str, config: &Config) -> Option<String> {
|
||||
match name {
|
||||
"LIBREDDIT_SFW_ONLY" => config.sfw_only.clone(),
|
||||
"LIBREDDIT_DEFAULT_THEME" => config.default_theme.clone(),
|
||||
"LIBREDDIT_DEFAULT_FRONT_PAGE" => config.default_front_page.clone(),
|
||||
"LIBREDDIT_DEFAULT_LAYOUT" => config.default_layout.clone(),
|
||||
"LIBREDDIT_DEFAULT_COMMENT_SORT" => config.default_comment_sort.clone(),
|
||||
"LIBREDDIT_DEFAULT_POST_SORT" => config.default_post_sort.clone(),
|
||||
"LIBREDDIT_DEFAULT_SHOW_NSFW" => config.default_show_nsfw.clone(),
|
||||
"LIBREDDIT_DEFAULT_BLUR_NSFW" => config.default_blur_nsfw.clone(),
|
||||
"LIBREDDIT_DEFAULT_USE_HLS" => config.default_use_hls.clone(),
|
||||
"LIBREDDIT_DEFAULT_HIDE_HLS_NOTIFICATION" => config.default_hide_hls_notification.clone(),
|
||||
"LIBREDDIT_DEFAULT_WIDE" => config.default_wide.clone(),
|
||||
"LIBREDDIT_DEFAULT_HIDE_AWARDS" => config.default_hide_awards.clone(),
|
||||
"LIBREDDIT_DEFAULT_SUBSCRIPTIONS" => config.default_subscriptions.clone(),
|
||||
"LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION" => config.default_disable_visit_reddit_confirmation.clone(),
|
||||
"LIBREDDIT_BANNER" => config.banner.clone(),
|
||||
"LIBREDDIT_ROBOTS_DISABLE_INDEXING" => config.robots_disable_indexing.clone(),
|
||||
"LIBREDDIT_PUSHSHIFT_FRONTEND" => config.pushshift.clone(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves setting from environment variable or config file.
|
||||
pub(crate) fn get_setting(name: &str) -> Option<String> {
|
||||
get_setting_from_config(name, &CONFIG)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
use {sealed_test::prelude::*, std::fs::write};
|
||||
|
||||
#[test]
|
||||
fn test_deserialize() {
|
||||
// Must handle empty input
|
||||
let result = toml::from_str::<Config>("");
|
||||
assert!(result.is_ok(), "Error: {}", result.unwrap_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("LIBREDDIT_SFW_ONLY", "on")])]
|
||||
fn test_env_var() {
|
||||
assert!(crate::utils::sfw_only())
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test]
|
||||
fn test_config() {
|
||||
let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#;
|
||||
write("libreddit.toml", config_to_write).unwrap();
|
||||
assert_eq!(get_setting("LIBREDDIT_DEFAULT_COMMENT_SORT"), Some("best".into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("LIBREDDIT_DEFAULT_COMMENT_SORT", "top")])]
|
||||
fn test_env_config_precedence() {
|
||||
let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#;
|
||||
write("libreddit.toml", config_to_write).unwrap();
|
||||
assert_eq!(get_setting("LIBREDDIT_DEFAULT_COMMENT_SORT"), Some("top".into()))
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("LIBREDDIT_DEFAULT_COMMENT_SORT", "top")])]
|
||||
fn test_alt_env_config_precedence() {
|
||||
let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#;
|
||||
write("libreddit.toml", config_to_write).unwrap();
|
||||
assert_eq!(get_setting("LIBREDDIT_DEFAULT_COMMENT_SORT"), Some("top".into()))
|
||||
}
|
||||
#[test]
|
||||
#[sealed_test(env = [("LIBREDDIT_DEFAULT_SUBSCRIPTIONS", "news+bestof")])]
|
||||
fn test_default_subscriptions() {
|
||||
assert_eq!(get_setting("LIBREDDIT_DEFAULT_SUBSCRIPTIONS"), Some("news+bestof".into()));
|
||||
}
|
236
src/duplicates.rs
Normal file
236
src/duplicates.rs
Normal file
@ -0,0 +1,236 @@
|
||||
// Handler for post duplicates.
|
||||
|
||||
use crate::client::json;
|
||||
use crate::server::RequestExt;
|
||||
use crate::subreddit::{can_access_quarantine, quarantine};
|
||||
use crate::utils::{error, filter_posts, get_filters, nsfw_landing, parse_post, template, Post, Preferences};
|
||||
|
||||
use askama::Template;
|
||||
use hyper::{Body, Request, Response};
|
||||
use serde_json::Value;
|
||||
use std::borrow::ToOwned;
|
||||
use std::collections::HashSet;
|
||||
use std::vec::Vec;
|
||||
|
||||
/// DuplicatesParams contains the parameters in the URL.
|
||||
struct DuplicatesParams {
|
||||
before: String,
|
||||
after: String,
|
||||
sort: String,
|
||||
}
|
||||
|
||||
/// DuplicatesTemplate defines an Askama template for rendering duplicate
|
||||
/// posts.
|
||||
#[derive(Template)]
|
||||
#[template(path = "duplicates.html")]
|
||||
struct DuplicatesTemplate {
|
||||
/// params contains the relevant request parameters.
|
||||
params: DuplicatesParams,
|
||||
|
||||
/// post is the post whose ID is specified in the reqeust URL. Note that
|
||||
/// this is not necessarily the "original" post.
|
||||
post: Post,
|
||||
|
||||
/// duplicates is the list of posts that, per Reddit, are duplicates of
|
||||
/// Post above.
|
||||
duplicates: Vec<Post>,
|
||||
|
||||
/// prefs are the user preferences.
|
||||
prefs: Preferences,
|
||||
|
||||
/// url is the request URL.
|
||||
url: String,
|
||||
|
||||
/// num_posts_filtered counts how many posts were filtered from the
|
||||
/// duplicates list.
|
||||
num_posts_filtered: u64,
|
||||
|
||||
/// all_posts_filtered is true if every duplicate was filtered. This is an
|
||||
/// edge case but can still happen.
|
||||
all_posts_filtered: bool,
|
||||
}
|
||||
|
||||
/// Make the GET request to Reddit. It assumes `req` is the appropriate Reddit
|
||||
/// REST endpoint for enumerating post duplicates.
|
||||
pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default());
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
|
||||
// Log the request in debugging mode
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(req.param("id").unwrap_or_default());
|
||||
|
||||
// Send the GET, and await JSON.
|
||||
match json(path, quarantined).await {
|
||||
// Process response JSON.
|
||||
Ok(response) => {
|
||||
let post = parse_post(&response[0]["data"]["children"][0]).await;
|
||||
|
||||
let req_url = req.uri().to_string();
|
||||
// Return landing page if this post if this Reddit deems this post
|
||||
// NSFW, but we have also disabled the display of NSFW content
|
||||
// or if the instance is SFW-only
|
||||
if post.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) {
|
||||
return Ok(nsfw_landing(req, req_url).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let filters = get_filters(&req);
|
||||
let (duplicates, num_posts_filtered, all_posts_filtered) = parse_duplicates(&response[1], &filters).await;
|
||||
|
||||
// These are the values for the "before=", "after=", and "sort="
|
||||
// query params, respectively.
|
||||
let mut before: String = String::new();
|
||||
let mut after: String = String::new();
|
||||
let mut sort: String = String::new();
|
||||
|
||||
// FIXME: We have to perform a kludge to work around a Reddit API
|
||||
// bug.
|
||||
//
|
||||
// The JSON object in "data" will never contain a "before" value so
|
||||
// it is impossible to use it to determine our position in a
|
||||
// listing. We'll make do by getting the ID of the first post in
|
||||
// the listing, setting that as our "before" value, and ask Reddit
|
||||
// to give us a batch of duplicate posts up to that post.
|
||||
//
|
||||
// Likewise, if we provide a "before" request in the GET, the
|
||||
// result won't have an "after" in the JSON, in addition to missing
|
||||
// the "before." So we will have to use the final post in the list
|
||||
// of duplicates.
|
||||
//
|
||||
// That being said, we'll also need to capture the value of the
|
||||
// "sort=" parameter as well, so we will need to inspect the
|
||||
// query key-value pairs anyway.
|
||||
let l = duplicates.len();
|
||||
if l > 0 {
|
||||
// This gets set to true if "before=" is one of the GET params.
|
||||
let mut have_before: bool = false;
|
||||
|
||||
// This gets set to true if "after=" is one of the GET params.
|
||||
let mut have_after: bool = false;
|
||||
|
||||
// Inspect the query key-value pairs. We will need to record
|
||||
// the value of "sort=", along with checking to see if either
|
||||
// one of "before=" or "after=" are given.
|
||||
//
|
||||
// If we're in the middle of the batch (evidenced by the
|
||||
// presence of a "before=" or "after=" parameter in the GET),
|
||||
// then use the first post as the "before" reference.
|
||||
//
|
||||
// We'll do this iteratively. Better than with .map_or()
|
||||
// since a closure will continue to operate on remaining
|
||||
// elements even after we've determined one of "before=" or
|
||||
// "after=" (or both) are in the GET request.
|
||||
//
|
||||
// In practice, here should only ever be one of "before=" or
|
||||
// "after=" and never both.
|
||||
let query_str = req.uri().query().unwrap_or_default().to_string();
|
||||
|
||||
if !query_str.is_empty() {
|
||||
for param in query_str.split('&') {
|
||||
let kv: Vec<&str> = param.split('=').collect();
|
||||
if kv.len() < 2 {
|
||||
// Reject invalid query parameter.
|
||||
continue;
|
||||
}
|
||||
|
||||
let key: &str = kv[0];
|
||||
match key {
|
||||
"before" => have_before = true,
|
||||
"after" => have_after = true,
|
||||
"sort" => {
|
||||
let val: &str = kv[1];
|
||||
match val {
|
||||
"new" | "num_comments" => sort = val.to_string(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if have_after {
|
||||
before = "t3_".to_owned();
|
||||
before.push_str(&duplicates[0].id);
|
||||
}
|
||||
|
||||
// Address potentially missing "after". If "before=" is in the
|
||||
// GET, then "after" will be null in the JSON (see FIXME
|
||||
// above).
|
||||
if have_before {
|
||||
// The next batch will need to start from one after the
|
||||
// last post in the current batch.
|
||||
after = "t3_".to_owned();
|
||||
after.push_str(&duplicates[l - 1].id);
|
||||
|
||||
// Here is where things get terrible. Notice that we
|
||||
// haven't set `before`. In order to do so, we will
|
||||
// need to know if there is a batch that exists before
|
||||
// this one, and doing so requires actually fetching the
|
||||
// previous batch. In other words, we have to do yet one
|
||||
// more GET to Reddit. There is no other way to determine
|
||||
// whether or not to define `before`.
|
||||
//
|
||||
// We'll mitigate that by requesting at most one duplicate.
|
||||
let new_path: String = format!(
|
||||
"{}.json?before=t3_{}&sort={}&limit=1&raw_json=1",
|
||||
req.uri().path(),
|
||||
&duplicates[0].id,
|
||||
if sort.is_empty() { "num_comments".to_string() } else { sort.clone() }
|
||||
);
|
||||
match json(new_path, true).await {
|
||||
Ok(response) => {
|
||||
if !response[1]["data"]["children"].as_array().unwrap_or(&Vec::new()).is_empty() {
|
||||
before = "t3_".to_owned();
|
||||
before.push_str(&duplicates[0].id);
|
||||
}
|
||||
}
|
||||
Err(msg) => {
|
||||
// Abort entirely if we couldn't get the previous
|
||||
// batch.
|
||||
return error(req, msg).await;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
after = response[1]["data"]["after"].as_str().unwrap_or_default().to_string();
|
||||
}
|
||||
}
|
||||
|
||||
template(DuplicatesTemplate {
|
||||
params: DuplicatesParams { before, after, sort },
|
||||
post,
|
||||
duplicates,
|
||||
prefs: Preferences::new(&req),
|
||||
url: req_url,
|
||||
num_posts_filtered,
|
||||
all_posts_filtered,
|
||||
})
|
||||
}
|
||||
|
||||
// Process error.
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" || msg == "gated" {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
quarantine(req, sub, msg)
|
||||
} else {
|
||||
error(req, msg).await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// DUPLICATES
|
||||
async fn parse_duplicates(json: &serde_json::Value, filters: &HashSet<String>) -> (Vec<Post>, u64, bool) {
|
||||
let post_duplicates: &Vec<Value> = &json["data"]["children"].as_array().map_or(Vec::new(), ToOwned::to_owned);
|
||||
let mut duplicates: Vec<Post> = Vec::new();
|
||||
|
||||
// Process each post and place them in the Vec<Post>.
|
||||
for val in post_duplicates.iter() {
|
||||
let post: Post = parse_post(val).await;
|
||||
duplicates.push(post);
|
||||
}
|
||||
|
||||
let (num_posts_filtered, all_posts_filtered) = filter_posts(&mut duplicates, filters);
|
||||
(duplicates, num_posts_filtered, all_posts_filtered)
|
||||
}
|
212
src/instance_info.rs
Normal file
212
src/instance_info.rs
Normal file
@ -0,0 +1,212 @@
|
||||
use crate::{
|
||||
config::{Config, CONFIG},
|
||||
server::RequestExt,
|
||||
utils::{ErrorTemplate, Preferences},
|
||||
};
|
||||
use askama::Template;
|
||||
use build_html::{Container, Html, HtmlContainer, Table};
|
||||
use hyper::{http::Error, Body, Request, Response};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
// This is the local static that is intialized at runtime (technically at
|
||||
// the first request to the info endpoint) and contains the data
|
||||
// retrieved from the info endpoint.
|
||||
pub(crate) static INSTANCE_INFO: Lazy<InstanceInfo> = Lazy::new(InstanceInfo::new);
|
||||
|
||||
/// Handles instance info endpoint
|
||||
pub async fn instance_info(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// This will retrieve the extension given, or create a new string - which will
|
||||
// simply become the last option, an HTML page.
|
||||
let extension = req.param("extension").unwrap_or(String::new());
|
||||
let response = match extension.as_str() {
|
||||
"yaml" | "yml" => info_yaml(),
|
||||
"txt" => info_txt(),
|
||||
"json" => info_json(),
|
||||
"html" | "" => info_html(req),
|
||||
_ => {
|
||||
let error = ErrorTemplate {
|
||||
msg: "Error: Invalid info extension".into(),
|
||||
prefs: Preferences::new(&req),
|
||||
url: req.uri().to_string(),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Response::builder().status(404).header("content-type", "text/html; charset=utf-8").body(error.into())
|
||||
}
|
||||
};
|
||||
response.map_err(|err| format!("{err}"))
|
||||
}
|
||||
|
||||
fn info_json() -> Result<Response<Body>, Error> {
|
||||
if let Ok(body) = serde_json::to_string(&*INSTANCE_INFO) {
|
||||
Response::builder().status(200).header("content-type", "application/json").body(body.into())
|
||||
} else {
|
||||
Response::builder()
|
||||
.status(500)
|
||||
.header("content-type", "text/plain")
|
||||
.body(Body::from("Error serializing JSON"))
|
||||
}
|
||||
}
|
||||
|
||||
fn info_yaml() -> Result<Response<Body>, Error> {
|
||||
if let Ok(body) = serde_yaml::to_string(&*INSTANCE_INFO) {
|
||||
// We can use `application/yaml` as media type, though there is no guarantee
|
||||
// that browsers will honor it. But we'll do it anyway. See:
|
||||
// https://github.com/ietf-wg-httpapi/mediatypes/blob/main/draft-ietf-httpapi-yaml-mediatypes.md#media-type-applicationyaml-application-yaml
|
||||
Response::builder().status(200).header("content-type", "application/yaml").body(body.into())
|
||||
} else {
|
||||
Response::builder()
|
||||
.status(500)
|
||||
.header("content-type", "text/plain")
|
||||
.body(Body::from("Error serializing YAML."))
|
||||
}
|
||||
}
|
||||
|
||||
fn info_txt() -> Result<Response<Body>, Error> {
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "text/plain")
|
||||
.body(Body::from(INSTANCE_INFO.to_string(StringType::Raw)))
|
||||
}
|
||||
fn info_html(req: Request<Body>) -> Result<Response<Body>, Error> {
|
||||
let message = MessageTemplate {
|
||||
title: String::from("Instance information"),
|
||||
body: INSTANCE_INFO.to_string(StringType::Html),
|
||||
prefs: Preferences::new(&req),
|
||||
url: req.uri().to_string(),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Response::builder().status(200).header("content-type", "text/html; charset=utf8").body(Body::from(message))
|
||||
}
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub(crate) struct InstanceInfo {
|
||||
crate_version: String,
|
||||
git_commit: String,
|
||||
deploy_date: String,
|
||||
compile_mode: String,
|
||||
deploy_unix_ts: i64,
|
||||
config: Config,
|
||||
}
|
||||
|
||||
impl InstanceInfo {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
crate_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
git_commit: env!("GIT_HASH").to_string(),
|
||||
deploy_date: OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc()).to_string(),
|
||||
#[cfg(debug_assertions)]
|
||||
compile_mode: "Debug".into(),
|
||||
#[cfg(not(debug_assertions))]
|
||||
compile_mode: "Release".into(),
|
||||
deploy_unix_ts: OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc()).unix_timestamp(),
|
||||
config: CONFIG.clone(),
|
||||
}
|
||||
}
|
||||
fn to_table(&self) -> String {
|
||||
let mut container = Container::default();
|
||||
let convert = |o: &Option<String>| -> String { o.clone().unwrap_or("<span class=\"unset\"><i>Unset</i></span>".to_owned()) };
|
||||
if let Some(banner) = &self.config.banner {
|
||||
container.add_header(3, "Instance banner");
|
||||
container.add_raw("<br />");
|
||||
container.add_paragraph(banner);
|
||||
container.add_raw("<br />");
|
||||
}
|
||||
container.add_table(
|
||||
Table::from([
|
||||
["Crate version", &self.crate_version],
|
||||
["Git commit", &self.git_commit],
|
||||
["Deploy date", &self.deploy_date],
|
||||
["Deploy timestamp", &self.deploy_unix_ts.to_string()],
|
||||
["Compile mode", &self.compile_mode],
|
||||
["SFW only", &convert(&self.config.sfw_only)],
|
||||
["Pushshift frontend", &convert(&self.config.pushshift)],
|
||||
//TODO: fallback to crate::config::DEFAULT_PUSHSHIFT_FRONTEND
|
||||
])
|
||||
.with_header_row(["Settings"]),
|
||||
);
|
||||
container.add_raw("<br />");
|
||||
container.add_table(
|
||||
Table::from([
|
||||
["Hide awards", &convert(&self.config.default_hide_awards)],
|
||||
["Theme", &convert(&self.config.default_theme)],
|
||||
["Front page", &convert(&self.config.default_front_page)],
|
||||
["Layout", &convert(&self.config.default_layout)],
|
||||
["Wide", &convert(&self.config.default_wide)],
|
||||
["Comment sort", &convert(&self.config.default_comment_sort)],
|
||||
["Post sort", &convert(&self.config.default_post_sort)],
|
||||
["Show NSFW", &convert(&self.config.default_show_nsfw)],
|
||||
["Blur NSFW", &convert(&self.config.default_blur_nsfw)],
|
||||
["Use HLS", &convert(&self.config.default_use_hls)],
|
||||
["Hide HLS notification", &convert(&self.config.default_hide_hls_notification)],
|
||||
["Subscriptions", &convert(&self.config.default_subscriptions)],
|
||||
])
|
||||
.with_header_row(["Default preferences"]),
|
||||
);
|
||||
container.to_html_string().replace("<th>", "<th colspan=\"2\">")
|
||||
}
|
||||
fn to_string(&self, string_type: StringType) -> String {
|
||||
match string_type {
|
||||
StringType::Raw => {
|
||||
format!(
|
||||
"Crate version: {}\n
|
||||
Git commit: {}\n
|
||||
Deploy date: {}\n
|
||||
Deploy timestamp: {}\n
|
||||
Compile mode: {}\n
|
||||
SFW only: {:?}\n
|
||||
Pushshift frontend: {:?}\n
|
||||
Config:\n
|
||||
Banner: {:?}\n
|
||||
Hide awards: {:?}\n
|
||||
Default theme: {:?}\n
|
||||
Default front page: {:?}\n
|
||||
Default layout: {:?}\n
|
||||
Default wide: {:?}\n
|
||||
Default comment sort: {:?}\n
|
||||
Default post sort: {:?}\n
|
||||
Default show NSFW: {:?}\n
|
||||
Default blur NSFW: {:?}\n
|
||||
Default use HLS: {:?}\n
|
||||
Default hide HLS notification: {:?}\n
|
||||
Default subscriptions: {:?}\n",
|
||||
self.crate_version,
|
||||
self.git_commit,
|
||||
self.deploy_date,
|
||||
self.deploy_unix_ts,
|
||||
self.compile_mode,
|
||||
self.config.sfw_only,
|
||||
self.config.pushshift,
|
||||
self.config.banner,
|
||||
self.config.default_hide_awards,
|
||||
self.config.default_theme,
|
||||
self.config.default_front_page,
|
||||
self.config.default_layout,
|
||||
self.config.default_wide,
|
||||
self.config.default_comment_sort,
|
||||
self.config.default_post_sort,
|
||||
self.config.default_show_nsfw,
|
||||
self.config.default_blur_nsfw,
|
||||
self.config.default_use_hls,
|
||||
self.config.default_hide_hls_notification,
|
||||
self.config.default_subscriptions,
|
||||
)
|
||||
}
|
||||
StringType::Html => self.to_table(),
|
||||
}
|
||||
}
|
||||
}
|
||||
enum StringType {
|
||||
Raw,
|
||||
Html,
|
||||
}
|
||||
#[derive(Template)]
|
||||
#[template(path = "message.html")]
|
||||
struct MessageTemplate {
|
||||
title: String,
|
||||
body: String,
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
}
|
418
src/main.rs
418
src/main.rs
@ -1,115 +1,345 @@
|
||||
// Import Crates
|
||||
use actix_web::{middleware, web, App, HttpResponse, HttpServer}; // dev::Service
|
||||
// Global specifiers
|
||||
#![forbid(unsafe_code)]
|
||||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
// Reference local files
|
||||
mod config;
|
||||
mod duplicates;
|
||||
mod instance_info;
|
||||
mod post;
|
||||
mod proxy;
|
||||
mod search;
|
||||
mod settings;
|
||||
mod subreddit;
|
||||
mod user;
|
||||
mod utils;
|
||||
|
||||
// Import Crates
|
||||
use clap::{Arg, ArgAction, Command};
|
||||
|
||||
use futures_lite::FutureExt;
|
||||
use hyper::{header::HeaderValue, Body, Request, Response};
|
||||
|
||||
mod client;
|
||||
use client::{canonical_path, proxy};
|
||||
use once_cell::sync::Lazy;
|
||||
use server::RequestExt;
|
||||
use utils::{error, redirect, ThemeAssets};
|
||||
|
||||
mod server;
|
||||
|
||||
// Create Services
|
||||
async fn style() -> HttpResponse {
|
||||
HttpResponse::Ok().content_type("text/css").body(include_str!("../static/style.css"))
|
||||
|
||||
// Required for the manifest to be valid
|
||||
async fn pwa_logo() -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "image/png")
|
||||
.body(include_bytes!("../static/logo.png").as_ref().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
async fn robots() -> HttpResponse {
|
||||
HttpResponse::Ok()
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body("User-agent: *\nAllow: /")
|
||||
// Required for iOS App Icons
|
||||
async fn iphone_logo() -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "image/png")
|
||||
.body(include_bytes!("../static/apple-touch-icon.png").as_ref().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
async fn favicon() -> HttpResponse {
|
||||
HttpResponse::Ok()
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body(include_bytes!("../static/favicon.ico").as_ref())
|
||||
async fn favicon() -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "image/vnd.microsoft.icon")
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body(include_bytes!("../static/favicon.ico").as_ref().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
let mut address = "0.0.0.0:8080".to_string();
|
||||
// let mut https = false;
|
||||
async fn font() -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "font/woff2")
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body(include_bytes!("../static/Inter.var.woff2").as_ref().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
for arg in std::env::args().collect::<Vec<String>>() {
|
||||
match arg.split('=').collect::<Vec<&str>>()[0] {
|
||||
"--address" | "-a" => address = arg.split('=').collect::<Vec<&str>>()[1].to_string(),
|
||||
// "--redirect-https" | "-r" => https = true,
|
||||
_ => (),
|
||||
async fn resource(body: &str, content_type: &str, cache: bool) -> Result<Response<Body>, String> {
|
||||
let mut res = Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", content_type)
|
||||
.body(body.to_string().into())
|
||||
.unwrap_or_default();
|
||||
|
||||
if cache {
|
||||
if let Ok(val) = HeaderValue::from_str("public, max-age=1209600, s-maxage=86400") {
|
||||
res.headers_mut().insert("Cache-Control", val);
|
||||
}
|
||||
}
|
||||
|
||||
// start http server
|
||||
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), &address);
|
||||
|
||||
HttpServer::new(|| {
|
||||
App::new()
|
||||
// Redirect to HTTPS
|
||||
// .wrap_fn(|req, srv| { let fut = srv.call(req); async { let mut res = fut.await?; if https {} Ok(res) } })
|
||||
// Append trailing slash and remove double slashes
|
||||
.wrap(middleware::NormalizePath::default())
|
||||
// Default service in case no routes match
|
||||
.default_service(web::get().to(|| utils::error("Nothing here".to_string())))
|
||||
// Read static files
|
||||
.route("/style.css/", web::get().to(style))
|
||||
.route("/favicon.ico/", web::get().to(favicon))
|
||||
.route("/robots.txt/", web::get().to(robots))
|
||||
// Proxy media through Libreddit
|
||||
.route("/proxy/{url:.*}/", web::get().to(proxy::handler))
|
||||
// Browse user profile
|
||||
.service(
|
||||
web::scope("/{scope:user|u}").service(
|
||||
web::scope("/{username}").route("/", web::get().to(user::profile)).service(
|
||||
web::scope("/comments/{id}/{title}")
|
||||
.route("/", web::get().to(post::item))
|
||||
.route("/{comment_id}/", web::get().to(post::item)),
|
||||
),
|
||||
),
|
||||
)
|
||||
// Configure settings
|
||||
.service(web::resource("/settings/").route(web::get().to(settings::get)).route(web::post().to(settings::set)))
|
||||
// Subreddit services
|
||||
.service(
|
||||
web::scope("/r/{sub}")
|
||||
// See posts and info about subreddit
|
||||
.route("/", web::get().to(subreddit::page))
|
||||
.route("/{sort:hot|new|top|rising|controversial}/", web::get().to(subreddit::page))
|
||||
// View post on subreddit
|
||||
.service(
|
||||
web::scope("/comments/{id}/{title}")
|
||||
.route("/", web::get().to(post::item))
|
||||
.route("/{comment_id}/", web::get().to(post::item)),
|
||||
)
|
||||
// Search inside subreddit
|
||||
.route("/search/", web::get().to(search::find))
|
||||
// View wiki of subreddit
|
||||
.service(
|
||||
web::scope("/wiki")
|
||||
.route("/", web::get().to(subreddit::wiki))
|
||||
.route("/{page}/", web::get().to(subreddit::wiki)),
|
||||
),
|
||||
)
|
||||
// Universal services
|
||||
.service(
|
||||
web::scope("")
|
||||
// Front page
|
||||
.route("/", web::get().to(subreddit::page))
|
||||
.route("/{sort:best|hot|new|top|rising|controversial}/", web::get().to(subreddit::page))
|
||||
// View Reddit wiki
|
||||
.service(
|
||||
web::scope("/wiki")
|
||||
.route("/", web::get().to(subreddit::wiki))
|
||||
.route("/{page}/", web::get().to(subreddit::wiki)),
|
||||
)
|
||||
// Search all of Reddit
|
||||
.route("/search/", web::get().to(search::find))
|
||||
// Short link for post
|
||||
.route("/{id:.{5,6}}/", web::get().to(post::item)),
|
||||
)
|
||||
})
|
||||
.bind(&address)
|
||||
.unwrap_or_else(|e| panic!("Cannot bind to the address {}: {}", address, e))
|
||||
.run()
|
||||
.await
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
async fn style() -> Result<Response<Body>, String> {
|
||||
let mut res = include_str!("../static/style.css").to_string();
|
||||
for file in ThemeAssets::iter() {
|
||||
res.push('\n');
|
||||
let theme = ThemeAssets::get(file.as_ref()).unwrap();
|
||||
res.push_str(std::str::from_utf8(theme.data.as_ref()).unwrap());
|
||||
}
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "text/css")
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body(res.to_string().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let matches = Command::new("Libreddit")
|
||||
.version(env!("CARGO_PKG_VERSION"))
|
||||
.about("Private front-end for Reddit written in Rust ")
|
||||
.arg(
|
||||
Arg::new("redirect-https")
|
||||
.short('r')
|
||||
.long("redirect-https")
|
||||
.help("Redirect all HTTP requests to HTTPS (no longer functional)")
|
||||
.num_args(0),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("address")
|
||||
.short('a')
|
||||
.long("address")
|
||||
.value_name("ADDRESS")
|
||||
.help("Sets address to listen on")
|
||||
.default_value("0.0.0.0")
|
||||
.num_args(1),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("port")
|
||||
.short('p')
|
||||
.long("port")
|
||||
.value_name("PORT")
|
||||
.env("PORT")
|
||||
.help("Port to listen on")
|
||||
.default_value("8080")
|
||||
.action(ArgAction::Set)
|
||||
.num_args(1),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("hsts")
|
||||
.short('H')
|
||||
.long("hsts")
|
||||
.value_name("EXPIRE_TIME")
|
||||
.help("HSTS header to tell browsers that this site should only be accessed over HTTPS")
|
||||
.default_value("604800")
|
||||
.num_args(1),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
let address = matches.get_one::<String>("address").unwrap();
|
||||
let port = matches.get_one::<String>("port").unwrap();
|
||||
let hsts = matches.get_one("hsts").map(|m: &String| m.as_str());
|
||||
|
||||
let listener = [address, ":", port].concat();
|
||||
|
||||
println!("Starting Libreddit...");
|
||||
|
||||
// Begin constructing a server
|
||||
let mut app = server::Server::new();
|
||||
|
||||
// Force evaluation of statics. In instance_info case, we need to evaluate
|
||||
// the timestamp so deploy date is accurate - in config case, we need to
|
||||
// evaluate the configuration to avoid paying penalty at first request.
|
||||
|
||||
Lazy::force(&config::CONFIG);
|
||||
Lazy::force(&instance_info::INSTANCE_INFO);
|
||||
|
||||
// Define default headers (added to all responses)
|
||||
app.default_headers = headers! {
|
||||
"Referrer-Policy" => "no-referrer",
|
||||
"X-Content-Type-Options" => "nosniff",
|
||||
"X-Frame-Options" => "DENY",
|
||||
"Content-Security-Policy" => "default-src 'none'; font-src 'self'; script-src 'self' blob:; manifest-src 'self'; media-src 'self' data: blob: about:; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none'; connect-src 'self'; worker-src blob:;"
|
||||
};
|
||||
|
||||
if let Some(expire_time) = hsts {
|
||||
if let Ok(val) = HeaderValue::from_str(&format!("max-age={}", expire_time)) {
|
||||
app.default_headers.insert("Strict-Transport-Security", val);
|
||||
}
|
||||
}
|
||||
|
||||
// Read static files
|
||||
app.at("/style.css").get(|_| style().boxed());
|
||||
app
|
||||
.at("/manifest.json")
|
||||
.get(|_| resource(include_str!("../static/manifest.json"), "application/json", false).boxed());
|
||||
app.at("/robots.txt").get(|_| {
|
||||
resource(
|
||||
if match config::get_setting("LIBREDDIT_ROBOTS_DISABLE_INDEXING") {
|
||||
Some(val) => val == "on",
|
||||
None => false,
|
||||
} {
|
||||
"User-agent: *\nDisallow: /"
|
||||
} else {
|
||||
"User-agent: *\nDisallow: /u/\nDisallow: /user/"
|
||||
},
|
||||
"text/plain",
|
||||
true,
|
||||
)
|
||||
.boxed()
|
||||
});
|
||||
app.at("/favicon.ico").get(|_| favicon().boxed());
|
||||
app.at("/logo.png").get(|_| pwa_logo().boxed());
|
||||
app.at("/Inter.var.woff2").get(|_| font().boxed());
|
||||
app.at("/touch-icon-iphone.png").get(|_| iphone_logo().boxed());
|
||||
app.at("/apple-touch-icon.png").get(|_| iphone_logo().boxed());
|
||||
app
|
||||
.at("/playHLSVideo.js")
|
||||
.get(|_| resource(include_str!("../static/playHLSVideo.js"), "text/javascript", false).boxed());
|
||||
app
|
||||
.at("/hls.min.js")
|
||||
.get(|_| resource(include_str!("../static/hls.min.js"), "text/javascript", false).boxed());
|
||||
|
||||
// Proxy media through Libreddit
|
||||
app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed());
|
||||
app.at("/hls/:id/*path").get(|r| proxy(r, "https://v.redd.it/{id}/{path}").boxed());
|
||||
app.at("/img/*path").get(|r| proxy(r, "https://i.redd.it/{path}").boxed());
|
||||
app.at("/thumb/:point/:id").get(|r| proxy(r, "https://{point}.thumbs.redditmedia.com/{id}").boxed());
|
||||
app.at("/emoji/:id/:name").get(|r| proxy(r, "https://emoji.redditmedia.com/{id}/{name}").boxed());
|
||||
app
|
||||
.at("/preview/:loc/award_images/:fullname/:id")
|
||||
.get(|r| proxy(r, "https://{loc}view.redd.it/award_images/{fullname}/{id}").boxed());
|
||||
app.at("/preview/:loc/:id").get(|r| proxy(r, "https://{loc}view.redd.it/{id}").boxed());
|
||||
app.at("/style/*path").get(|r| proxy(r, "https://styles.redditmedia.com/{path}").boxed());
|
||||
app.at("/static/*path").get(|r| proxy(r, "https://www.redditstatic.com/{path}").boxed());
|
||||
|
||||
// Browse user profile
|
||||
app
|
||||
.at("/u/:name")
|
||||
.get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
|
||||
app.at("/u/:name/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/u/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
|
||||
app.at("/user/[deleted]").get(|req| error(req, "User has deleted their account".to_string()).boxed());
|
||||
app.at("/user/:name").get(|r| user::profile(r).boxed());
|
||||
app.at("/user/:name/:listing").get(|r| user::profile(r).boxed());
|
||||
app.at("/user/:name/comments/:id").get(|r| post::item(r).boxed());
|
||||
app.at("/user/:name/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/user/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
|
||||
// Configure settings
|
||||
app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed());
|
||||
app.at("/settings/restore").get(|r| settings::restore(r).boxed());
|
||||
app.at("/settings/update").get(|r| settings::update(r).boxed());
|
||||
|
||||
// Subreddit services
|
||||
app
|
||||
.at("/r/:sub")
|
||||
.get(|r| subreddit::community(r).boxed())
|
||||
.post(|r| subreddit::add_quarantine_exception(r).boxed());
|
||||
|
||||
app
|
||||
.at("/r/u_:name")
|
||||
.get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
|
||||
|
||||
app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions_filters(r).boxed());
|
||||
app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions_filters(r).boxed());
|
||||
app.at("/r/:sub/filter").post(|r| subreddit::subscriptions_filters(r).boxed());
|
||||
app.at("/r/:sub/unfilter").post(|r| subreddit::subscriptions_filters(r).boxed());
|
||||
|
||||
app.at("/r/:sub/comments/:id").get(|r| post::item(r).boxed());
|
||||
app.at("/r/:sub/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/r/:sub/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/comments").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/comments/:comment_id").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
|
||||
app.at("/r/:sub/duplicates/:id").get(|r| duplicates::item(r).boxed());
|
||||
app.at("/r/:sub/duplicates/:id/:title").get(|r| duplicates::item(r).boxed());
|
||||
app.at("/duplicates/:id").get(|r| duplicates::item(r).boxed());
|
||||
app.at("/duplicates/:id/:title").get(|r| duplicates::item(r).boxed());
|
||||
|
||||
app.at("/r/:sub/search").get(|r| search::find(r).boxed());
|
||||
|
||||
app
|
||||
.at("/r/:sub/w")
|
||||
.get(|r| async move { Ok(redirect(format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed());
|
||||
app
|
||||
.at("/r/:sub/w/*page")
|
||||
.get(|r| async move { Ok(redirect(format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed());
|
||||
app.at("/r/:sub/wiki").get(|r| subreddit::wiki(r).boxed());
|
||||
app.at("/r/:sub/wiki/*page").get(|r| subreddit::wiki(r).boxed());
|
||||
|
||||
app.at("/r/:sub/about/sidebar").get(|r| subreddit::sidebar(r).boxed());
|
||||
|
||||
app.at("/r/:sub/:sort").get(|r| subreddit::community(r).boxed());
|
||||
|
||||
// Front page
|
||||
app.at("/").get(|r| subreddit::community(r).boxed());
|
||||
|
||||
// View Reddit wiki
|
||||
app.at("/w").get(|_| async { Ok(redirect("/wiki".to_string())) }.boxed());
|
||||
app
|
||||
.at("/w/*page")
|
||||
.get(|r| async move { Ok(redirect(format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed());
|
||||
app.at("/wiki").get(|r| subreddit::wiki(r).boxed());
|
||||
app.at("/wiki/*page").get(|r| subreddit::wiki(r).boxed());
|
||||
|
||||
// Search all of Reddit
|
||||
app.at("/search").get(|r| search::find(r).boxed());
|
||||
|
||||
// Handle about pages
|
||||
app.at("/about").get(|req| error(req, "About pages aren't added yet".to_string()).boxed());
|
||||
|
||||
// Instance info page
|
||||
app.at("/info").get(|r| instance_info::instance_info(r).boxed());
|
||||
app.at("/info.:extension").get(|r| instance_info::instance_info(r).boxed());
|
||||
|
||||
app.at("/:id").get(|req: Request<Body>| {
|
||||
Box::pin(async move {
|
||||
match req.param("id").as_deref() {
|
||||
// Sort front page
|
||||
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await,
|
||||
|
||||
// Short link for post
|
||||
Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/{}", id)).await {
|
||||
Ok(path_opt) => match path_opt {
|
||||
Some(path) => Ok(redirect(path)),
|
||||
None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,
|
||||
},
|
||||
Err(e) => error(req, e).await,
|
||||
},
|
||||
|
||||
// Error message for unknown pages
|
||||
_ => error(req, "Nothing here".to_string()).await,
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
// Default service in case no routes match
|
||||
app.at("/*").get(|req| error(req, "Nothing here".to_string()).boxed());
|
||||
|
||||
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), listener);
|
||||
|
||||
let server = app.listen(listener);
|
||||
|
||||
// Run this server for... forever!
|
||||
if let Err(e) = server.await {
|
||||
eprintln!("Server error: {}", e);
|
||||
}
|
||||
}
|
||||
|
348
src/post.rs
348
src/post.rs
@ -1,177 +1,253 @@
|
||||
// CRATES
|
||||
use crate::utils::*;
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
|
||||
use async_recursion::async_recursion;
|
||||
use crate::client::json;
|
||||
use crate::config::get_setting;
|
||||
use crate::server::RequestExt;
|
||||
use crate::subreddit::{can_access_quarantine, quarantine};
|
||||
use crate::utils::{
|
||||
error, format_num, get_filters, nsfw_landing, param, parse_post, rewrite_urls, setting, template, time, val, Author, Awards, Comment, Flair, FlairPart, Post, Preferences,
|
||||
};
|
||||
use hyper::{Body, Request, Response};
|
||||
|
||||
use askama::Template;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use std::collections::HashSet;
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
#[template(path = "post.html", escape = "none")]
|
||||
#[template(path = "post.html")]
|
||||
struct PostTemplate {
|
||||
comments: Vec<Comment>,
|
||||
post: Post,
|
||||
sort: String,
|
||||
prefs: Preferences,
|
||||
single_thread: bool,
|
||||
url: String,
|
||||
url_without_query: String,
|
||||
comment_query: String,
|
||||
}
|
||||
|
||||
pub async fn item(req: HttpRequest) -> HttpResponse {
|
||||
static COMMENT_SEARCH_CAPTURE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"\?q=(.*)&type=comment"#).unwrap());
|
||||
|
||||
pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// Build Reddit API path
|
||||
let mut path: String = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
|
||||
let mut path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default());
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
let url = req.uri().to_string();
|
||||
|
||||
// Set sort to sort query parameter
|
||||
let mut sort: String = param(&path, "sort");
|
||||
let sort = param(&path, "sort").unwrap_or_else(|| {
|
||||
// Grab default comment sort method from Cookies
|
||||
let default_sort = setting(&req, "comment_sort");
|
||||
|
||||
// Grab default comment sort method from Cookies
|
||||
let default_sort = cookie(&req, "comment_sort");
|
||||
|
||||
// If there's no sort query but there's a default sort, set sort to default_sort
|
||||
if sort.is_empty() && !default_sort.is_empty() {
|
||||
sort = default_sort;
|
||||
path = format!("{}.json?{}&sort={}&raw_json=1", req.path(), req.query_string(), sort);
|
||||
}
|
||||
// If there's no sort query but there's a default sort, set sort to default_sort
|
||||
if default_sort.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
path = format!("{}.json?{}&sort={}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), default_sort);
|
||||
default_sort
|
||||
}
|
||||
});
|
||||
|
||||
// Log the post ID being fetched in debug mode
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(req.match_info().get("id").unwrap_or(""));
|
||||
dbg!(req.param("id").unwrap_or_default());
|
||||
|
||||
let single_thread = req.param("comment_id").is_some();
|
||||
let highlighted_comment = &req.param("comment_id").unwrap_or_default();
|
||||
|
||||
// Send a request to the url, receive JSON in response
|
||||
match request(&path).await {
|
||||
match json(path, quarantined).await {
|
||||
// Otherwise, grab the JSON output from the request
|
||||
Ok(res) => {
|
||||
Ok(response) => {
|
||||
// Parse the JSON into Post and Comment structs
|
||||
let post = parse_post(&res[0]).await;
|
||||
let comments = parse_comments(&res[1]).await;
|
||||
let post = parse_post(&response[0]["data"]["children"][0]).await;
|
||||
|
||||
let req_url = req.uri().to_string();
|
||||
// Return landing page if this post if this Reddit deems this post
|
||||
// NSFW, but we have also disabled the display of NSFW content
|
||||
// or if the instance is SFW-only.
|
||||
if post.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) {
|
||||
return Ok(nsfw_landing(req, req_url).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let query = match COMMENT_SEARCH_CAPTURE.captures(&url) {
|
||||
Some(captures) => captures.get(1).unwrap().as_str().replace("%20", " ").replace('+', " "),
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
let comments = match query.as_str() {
|
||||
"" => parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req), &req),
|
||||
_ => query_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req), &query, &req),
|
||||
};
|
||||
|
||||
// Use the Post and Comment structs to generate a website to show users
|
||||
let s = PostTemplate {
|
||||
template(PostTemplate {
|
||||
comments,
|
||||
post,
|
||||
url_without_query: url.clone().trim_end_matches(&format!("?q={query}&type=comment")).to_string(),
|
||||
sort,
|
||||
prefs: prefs(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
prefs: Preferences::new(&req),
|
||||
single_thread,
|
||||
url: req_url,
|
||||
comment_query: query,
|
||||
})
|
||||
}
|
||||
// If the Reddit API returns an error, exit and send error page to user
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
// POSTS
|
||||
async fn parse_post(json: &serde_json::Value) -> Post {
|
||||
// Retrieve post (as opposed to comments) from JSON
|
||||
let post: &serde_json::Value = &json["data"]["children"][0];
|
||||
|
||||
// Grab UTC time as unix timestamp
|
||||
let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default());
|
||||
// Parse post score and upvote ratio
|
||||
let score = post["data"]["score"].as_i64().unwrap_or_default();
|
||||
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||
|
||||
// Determine the type of media along with the media URL
|
||||
let (post_type, media) = media(&post["data"]).await;
|
||||
|
||||
// Build a post using data parsed from Reddit post API
|
||||
Post {
|
||||
id: val(post, "id"),
|
||||
title: val(post, "title"),
|
||||
community: val(post, "subreddit"),
|
||||
body: rewrite_url(&val(post, "selftext_html")),
|
||||
author: Author {
|
||||
name: val(post, "author"),
|
||||
flair: Flair {
|
||||
flair_parts: parse_rich_flair(
|
||||
val(post, "author_flair_type"),
|
||||
post["data"]["author_flair_richtext"].as_array(),
|
||||
post["data"]["author_flair_text"].as_str(),
|
||||
),
|
||||
background_color: val(post, "author_flair_background_color"),
|
||||
foreground_color: val(post, "author_flair_text_color"),
|
||||
},
|
||||
distinguished: val(post, "distinguished"),
|
||||
},
|
||||
permalink: val(post, "permalink"),
|
||||
score: format_num(score),
|
||||
upvote_ratio: ratio as i64,
|
||||
post_type,
|
||||
thumbnail: format_url(val(post, "thumbnail").as_str()),
|
||||
flair: Flair {
|
||||
flair_parts: parse_rich_flair(
|
||||
val(post, "link_flair_type"),
|
||||
post["data"]["link_flair_richtext"].as_array(),
|
||||
post["data"]["link_flair_text"].as_str(),
|
||||
),
|
||||
background_color: val(post, "link_flair_background_color"),
|
||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" || msg == "gated" {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
quarantine(req, sub, msg)
|
||||
} else {
|
||||
"white".to_string()
|
||||
},
|
||||
},
|
||||
flags: Flags {
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
||||
},
|
||||
media,
|
||||
domain: val(post, "domain"),
|
||||
rel_time,
|
||||
created,
|
||||
error(req, msg).await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// COMMENTS
|
||||
#[async_recursion]
|
||||
async fn parse_comments(json: &serde_json::Value) -> Vec<Comment> {
|
||||
// Separate the comment JSON into a Vector of comments
|
||||
let comment_data = match json["data"]["children"].as_array() {
|
||||
Some(f) => f.to_owned(),
|
||||
None => Vec::new(),
|
||||
};
|
||||
|
||||
let mut comments: Vec<Comment> = Vec::new();
|
||||
fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str, filters: &HashSet<String>, req: &Request<Body>) -> Vec<Comment> {
|
||||
// Parse the comment JSON into a Vector of Comments
|
||||
let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned);
|
||||
|
||||
// For each comment, retrieve the values to build a Comment object
|
||||
for comment in comment_data {
|
||||
let unix_time = comment["data"]["created_utc"].as_f64().unwrap_or_default();
|
||||
if unix_time == 0.0 {
|
||||
continue;
|
||||
}
|
||||
let (rel_time, created) = time(unix_time);
|
||||
|
||||
let score = comment["data"]["score"].as_i64().unwrap_or(0);
|
||||
let body = rewrite_url(&val(&comment, "body_html"));
|
||||
|
||||
let replies: Vec<Comment> = if comment["data"]["replies"].is_object() {
|
||||
parse_comments(&comment["data"]["replies"]).await
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
comments.push(Comment {
|
||||
id: val(&comment, "id"),
|
||||
body,
|
||||
author: Author {
|
||||
name: val(&comment, "author"),
|
||||
flair: Flair {
|
||||
flair_parts: parse_rich_flair(
|
||||
val(&comment, "author_flair_type"),
|
||||
comment["data"]["author_flair_richtext"].as_array(),
|
||||
comment["data"]["author_flair_text"].as_str(),
|
||||
),
|
||||
background_color: val(&comment, "author_flair_background_color"),
|
||||
foreground_color: val(&comment, "author_flair_text_color"),
|
||||
},
|
||||
distinguished: val(&comment, "distinguished"),
|
||||
},
|
||||
score: format_num(score),
|
||||
rel_time,
|
||||
created,
|
||||
replies,
|
||||
});
|
||||
}
|
||||
|
||||
comments
|
||||
.into_iter()
|
||||
.map(|comment| {
|
||||
let data = &comment["data"];
|
||||
let replies: Vec<Comment> = if data["replies"].is_object() {
|
||||
parse_comments(&data["replies"], post_link, post_author, highlighted_comment, filters, req)
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
build_comment(&comment, data, replies, post_link, post_author, highlighted_comment, filters, req)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn query_comments(
|
||||
json: &serde_json::Value,
|
||||
post_link: &str,
|
||||
post_author: &str,
|
||||
highlighted_comment: &str,
|
||||
filters: &HashSet<String>,
|
||||
query: &str,
|
||||
req: &Request<Body>,
|
||||
) -> Vec<Comment> {
|
||||
let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned);
|
||||
let mut results = Vec::new();
|
||||
|
||||
comments.into_iter().for_each(|comment| {
|
||||
let data = &comment["data"];
|
||||
|
||||
// If this comment contains replies, handle those too
|
||||
if data["replies"].is_object() {
|
||||
results.append(&mut query_comments(&data["replies"], post_link, post_author, highlighted_comment, filters, query, req))
|
||||
}
|
||||
|
||||
let c = build_comment(&comment, data, Vec::new(), post_link, post_author, highlighted_comment, filters, req);
|
||||
if c.body.to_lowercase().contains(&query.to_lowercase()) {
|
||||
results.push(c);
|
||||
}
|
||||
});
|
||||
|
||||
results
|
||||
}
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn build_comment(
|
||||
comment: &serde_json::Value,
|
||||
data: &serde_json::Value,
|
||||
replies: Vec<Comment>,
|
||||
post_link: &str,
|
||||
post_author: &str,
|
||||
highlighted_comment: &str,
|
||||
filters: &HashSet<String>,
|
||||
req: &Request<Body>,
|
||||
) -> Comment {
|
||||
let id = val(comment, "id");
|
||||
|
||||
let body = if (val(comment, "author") == "[deleted]" && val(comment, "body") == "[removed]") || val(comment, "body") == "[ Removed by Reddit ]" {
|
||||
format!(
|
||||
"<div class=\"md\"><p>[removed] — <a href=\"https://{}{}{}\">view removed comment</a></p></div>",
|
||||
get_setting("LIBREDDIT_PUSHSHIFT_FRONTEND").unwrap_or(String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)),
|
||||
post_link,
|
||||
id
|
||||
)
|
||||
} else {
|
||||
rewrite_urls(&val(comment, "body_html"))
|
||||
};
|
||||
let kind = comment["kind"].as_str().unwrap_or_default().to_string();
|
||||
|
||||
let unix_time = data["created_utc"].as_f64().unwrap_or_default();
|
||||
let (rel_time, created) = time(unix_time);
|
||||
|
||||
let edited = data["edited"].as_f64().map_or((String::new(), String::new()), time);
|
||||
|
||||
let score = data["score"].as_i64().unwrap_or(0);
|
||||
|
||||
// The JSON API only provides comments up to some threshold.
|
||||
// Further comments have to be loaded by subsequent requests.
|
||||
// The "kind" value will be "more" and the "count"
|
||||
// shows how many more (sub-)comments exist in the respective nesting level.
|
||||
// Note that in certain (seemingly random) cases, the count is simply wrong.
|
||||
let more_count = data["count"].as_i64().unwrap_or_default();
|
||||
|
||||
let awards: Awards = Awards::parse(&data["all_awardings"]);
|
||||
|
||||
let parent_kind_and_id = val(comment, "parent_id");
|
||||
let parent_info = parent_kind_and_id.split('_').collect::<Vec<&str>>();
|
||||
|
||||
let highlighted = id == highlighted_comment;
|
||||
|
||||
let author = Author {
|
||||
name: val(comment, "author"),
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
data["author_flair_type"].as_str().unwrap_or_default(),
|
||||
data["author_flair_richtext"].as_array(),
|
||||
data["author_flair_text"].as_str(),
|
||||
),
|
||||
text: val(comment, "link_flair_text"),
|
||||
background_color: val(comment, "author_flair_background_color"),
|
||||
foreground_color: val(comment, "author_flair_text_color"),
|
||||
},
|
||||
distinguished: val(comment, "distinguished"),
|
||||
};
|
||||
let is_filtered = filters.contains(&["u_", author.name.as_str()].concat());
|
||||
|
||||
// Many subreddits have a default comment posted about the sub's rules etc.
|
||||
// Many libreddit users do not wish to see this kind of comment by default.
|
||||
// Reddit does not tell us which users are "bots", so a good heuristic is to
|
||||
// collapse stickied moderator comments.
|
||||
let is_moderator_comment = data["distinguished"].as_str().unwrap_or_default() == "moderator";
|
||||
let is_stickied = data["stickied"].as_bool().unwrap_or_default();
|
||||
let collapsed = (is_moderator_comment && is_stickied) || is_filtered;
|
||||
|
||||
Comment {
|
||||
id,
|
||||
kind,
|
||||
parent_id: parent_info[1].to_string(),
|
||||
parent_kind: parent_info[0].to_string(),
|
||||
post_link: post_link.to_string(),
|
||||
post_author: post_author.to_string(),
|
||||
body,
|
||||
author,
|
||||
score: if data["score_hidden"].as_bool().unwrap_or_default() {
|
||||
("\u{2022}".to_string(), "Hidden".to_string())
|
||||
} else {
|
||||
format_num(score)
|
||||
},
|
||||
rel_time,
|
||||
created,
|
||||
edited,
|
||||
replies,
|
||||
highlighted,
|
||||
awards,
|
||||
collapsed,
|
||||
is_filtered,
|
||||
more_count,
|
||||
prefs: Preferences::new(req),
|
||||
}
|
||||
}
|
||||
|
49
src/proxy.rs
49
src/proxy.rs
@ -1,49 +0,0 @@
|
||||
use actix_web::{client::Client, error, web, Error, HttpResponse, Result};
|
||||
use url::Url;
|
||||
|
||||
use base64::decode;
|
||||
|
||||
pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse> {
|
||||
let domains = vec![
|
||||
// THUMBNAILS
|
||||
"a.thumbs.redditmedia.com",
|
||||
"b.thumbs.redditmedia.com",
|
||||
// EMOJI
|
||||
"emoji.redditmedia.com",
|
||||
// ICONS
|
||||
"styles.redditmedia.com",
|
||||
"www.redditstatic.com",
|
||||
// PREVIEWS
|
||||
"preview.redd.it",
|
||||
"external-preview.redd.it",
|
||||
// MEDIA
|
||||
"i.redd.it",
|
||||
"v.redd.it",
|
||||
];
|
||||
|
||||
match decode(b64) {
|
||||
Ok(bytes) => {
|
||||
let media = String::from_utf8(bytes).unwrap_or_default();
|
||||
|
||||
match Url::parse(media.as_str()) {
|
||||
Ok(url) => {
|
||||
let domain = url.domain().unwrap_or_default();
|
||||
|
||||
if domains.contains(&domain) {
|
||||
Client::default().get(media.replace("&", "&")).send().await.map_err(Error::from).map(|res| {
|
||||
HttpResponse::build(res.status())
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.header("Content-Length", res.headers().get("Content-Length").unwrap().to_owned())
|
||||
.header("Content-Type", res.headers().get("Content-Type").unwrap().to_owned())
|
||||
.streaming(res)
|
||||
})
|
||||
} else {
|
||||
Err(error::ErrorForbidden("Resource must be from Reddit"))
|
||||
}
|
||||
}
|
||||
_ => Err(error::ErrorBadRequest("Can't parse base64 into URL")),
|
||||
}
|
||||
}
|
||||
_ => Err(error::ErrorBadRequest("Can't decode base64")),
|
||||
}
|
||||
}
|
197
src/search.rs
197
src/search.rs
@ -1,7 +1,14 @@
|
||||
// CRATES
|
||||
use crate::utils::{error, fetch_posts, param, prefs, request, val, Post, Preferences};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use crate::utils::{self, catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences};
|
||||
use crate::{
|
||||
client::json,
|
||||
subreddit::{can_access_quarantine, quarantine},
|
||||
RequestExt,
|
||||
};
|
||||
use askama::Template;
|
||||
use hyper::{Body, Request, Response};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
// STRUCTS
|
||||
struct SearchParams {
|
||||
@ -11,88 +18,166 @@ struct SearchParams {
|
||||
before: String,
|
||||
after: String,
|
||||
restrict_sr: String,
|
||||
typed: String,
|
||||
}
|
||||
|
||||
// STRUCTS
|
||||
struct Subreddit {
|
||||
name: String,
|
||||
url: String,
|
||||
icon: String,
|
||||
description: String,
|
||||
subscribers: i64,
|
||||
subscribers: (String, String),
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "search.html", escape = "none")]
|
||||
#[template(path = "search.html")]
|
||||
struct SearchTemplate {
|
||||
posts: Vec<Post>,
|
||||
subreddits: Vec<Subreddit>,
|
||||
sub: String,
|
||||
params: SearchParams,
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
/// Whether the subreddit itself is filtered.
|
||||
is_filtered: bool,
|
||||
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||
/// and all fetched posts being filtered).
|
||||
all_posts_filtered: bool,
|
||||
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||
all_posts_hidden_nsfw: bool,
|
||||
no_posts: bool,
|
||||
}
|
||||
|
||||
// Regex matched against search queries to determine if they are reddit urls.
|
||||
static REDDIT_URL_MATCH: Lazy<Regex> = Lazy::new(|| Regex::new(r"^https?://([^\./]+\.)*reddit.com/").unwrap());
|
||||
|
||||
// SERVICES
|
||||
pub async fn find(req: HttpRequest) -> HttpResponse {
|
||||
let path = format!("{}.json?{}", req.path(), req.query_string());
|
||||
let sub = req.match_info().get("sub").unwrap_or("").to_string();
|
||||
|
||||
let sort = if param(&path, "sort").is_empty() {
|
||||
"relevance".to_string()
|
||||
pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// This ensures that during a search, no NSFW posts are fetched at all
|
||||
let nsfw_results = if setting(&req, "show_nsfw") == "on" && !utils::sfw_only() {
|
||||
"&include_over_18=on"
|
||||
} else {
|
||||
param(&path, "sort")
|
||||
""
|
||||
};
|
||||
let path = format!("{}.json?{}{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
|
||||
let mut query = param(&path, "q").unwrap_or_default();
|
||||
query = REDDIT_URL_MATCH.replace(&query, "").to_string();
|
||||
|
||||
let subreddits = if param(&path, "restrict_sr").is_empty() {
|
||||
search_subreddits(param(&path, "q")).await
|
||||
if query.is_empty() {
|
||||
return Ok(redirect("/".to_string()));
|
||||
}
|
||||
|
||||
if query.starts_with("r/") {
|
||||
return Ok(redirect(format!("/{}", query)));
|
||||
}
|
||||
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
// Handle random subreddits
|
||||
if let Ok(random) = catch_random(&sub, "/find").await {
|
||||
return Ok(random);
|
||||
}
|
||||
|
||||
let typed = param(&path, "type").unwrap_or_default();
|
||||
|
||||
let sort = param(&path, "sort").unwrap_or_else(|| "relevance".to_string());
|
||||
let filters = get_filters(&req);
|
||||
|
||||
// If search is not restricted to this subreddit, show other subreddits in search results
|
||||
let subreddits = if param(&path, "restrict_sr").is_none() {
|
||||
let mut subreddits = search_subreddits(&query, &typed).await;
|
||||
subreddits.retain(|s| !filters.contains(s.name.as_str()));
|
||||
subreddits
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok((posts, after)) => HttpResponse::Ok().content_type("text/html").body(
|
||||
SearchTemplate {
|
||||
posts,
|
||||
subreddits,
|
||||
sub,
|
||||
params: SearchParams {
|
||||
q: param(&path, "q"),
|
||||
sort,
|
||||
t: param(&path, "t"),
|
||||
before: param(&path, "after"),
|
||||
after,
|
||||
restrict_sr: param(&path, "restrict_sr"),
|
||||
},
|
||||
prefs: prefs(req),
|
||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||
|
||||
// If all requested subs are filtered, we don't need to fetch posts.
|
||||
if sub.split('+').all(|s| filters.contains(s)) {
|
||||
template(SearchTemplate {
|
||||
posts: Vec::new(),
|
||||
subreddits,
|
||||
sub,
|
||||
params: SearchParams {
|
||||
q: query.replace('"', """),
|
||||
sort,
|
||||
t: param(&path, "t").unwrap_or_default(),
|
||||
before: param(&path, "after").unwrap_or_default(),
|
||||
after: "".to_string(),
|
||||
restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
|
||||
typed,
|
||||
},
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
is_filtered: true,
|
||||
all_posts_filtered: false,
|
||||
all_posts_hidden_nsfw: false,
|
||||
no_posts: false,
|
||||
})
|
||||
} else {
|
||||
match Post::fetch(&path, quarantined).await {
|
||||
Ok((mut posts, after)) => {
|
||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||
let no_posts = posts.is_empty();
|
||||
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
||||
template(SearchTemplate {
|
||||
posts,
|
||||
subreddits,
|
||||
sub,
|
||||
params: SearchParams {
|
||||
q: query.replace('"', """),
|
||||
sort,
|
||||
t: param(&path, "t").unwrap_or_default(),
|
||||
before: param(&path, "after").unwrap_or_default(),
|
||||
after,
|
||||
restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
|
||||
typed,
|
||||
},
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
is_filtered: false,
|
||||
all_posts_filtered,
|
||||
all_posts_hidden_nsfw,
|
||||
no_posts,
|
||||
})
|
||||
}
|
||||
.render()
|
||||
.unwrap(),
|
||||
),
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn search_subreddits(q: String) -> Vec<Subreddit> {
|
||||
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit=3", q.replace(' ', "+"));
|
||||
|
||||
// Send a request to the url
|
||||
match request(&subreddit_search_path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => {
|
||||
match response["data"]["children"].as_array() {
|
||||
// For each subreddit from subreddit list
|
||||
Some(list) => list
|
||||
.iter()
|
||||
.map(|subreddit| Subreddit {
|
||||
name: val(subreddit, "display_name_prefixed"),
|
||||
url: val(subreddit, "url"),
|
||||
description: val(subreddit, "public_description"),
|
||||
subscribers: subreddit["data"]["subscribers"].as_u64().unwrap_or_default() as i64,
|
||||
})
|
||||
.collect::<Vec<Subreddit>>(),
|
||||
_ => Vec::new(),
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" || msg == "gated" {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
quarantine(req, sub, msg)
|
||||
} else {
|
||||
error(req, msg).await
|
||||
}
|
||||
}
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
_ => Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn search_subreddits(q: &str, typed: &str) -> Vec<Subreddit> {
|
||||
let limit = if typed == "sr_user" { "50" } else { "3" };
|
||||
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit={}", q.replace(' ', "+"), limit);
|
||||
|
||||
// Send a request to the url
|
||||
json(subreddit_search_path, false).await.unwrap_or_default()["data"]["children"]
|
||||
.as_array()
|
||||
.map(ToOwned::to_owned)
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|subreddit| {
|
||||
// For each subreddit from subreddit list
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let icon = subreddit["data"]["community_icon"].as_str().map_or_else(|| val(subreddit, "icon_img"), ToString::to_string);
|
||||
|
||||
Subreddit {
|
||||
name: val(subreddit, "display_name"),
|
||||
url: val(subreddit, "url"),
|
||||
icon: format_url(&icon),
|
||||
description: val(subreddit, "public_description"),
|
||||
subscribers: format_num(subreddit["data"]["subscribers"].as_f64().unwrap_or_default() as i64),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Subreddit>>()
|
||||
}
|
||||
|
742
src/server.rs
Normal file
742
src/server.rs
Normal file
@ -0,0 +1,742 @@
|
||||
use brotli::enc::{BrotliCompress, BrotliEncoderParams};
|
||||
use cached::proc_macro::cached;
|
||||
use cookie::Cookie;
|
||||
use core::f64;
|
||||
use futures_lite::{future::Boxed, Future, FutureExt};
|
||||
use hyper::{
|
||||
body,
|
||||
body::HttpBody,
|
||||
header,
|
||||
service::{make_service_fn, service_fn},
|
||||
HeaderMap,
|
||||
};
|
||||
use hyper::{Body, Method, Request, Response, Server as HyperServer};
|
||||
use libflate::gzip;
|
||||
use route_recognizer::{Params, Router};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
io,
|
||||
pin::Pin,
|
||||
result::Result,
|
||||
str::{from_utf8, Split},
|
||||
string::ToString,
|
||||
};
|
||||
use time::Duration;
|
||||
|
||||
use crate::dbg_msg;
|
||||
|
||||
type BoxResponse = Pin<Box<dyn Future<Output = Result<Response<Body>, String>> + Send>>;
|
||||
|
||||
/// Compressors for the response Body, in ascending order of preference.
|
||||
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
||||
enum CompressionType {
|
||||
Passthrough,
|
||||
Gzip,
|
||||
Brotli,
|
||||
}
|
||||
|
||||
/// All browsers support gzip, so if we are given `Accept-Encoding: *`, deliver
|
||||
/// gzipped-content.
|
||||
///
|
||||
/// Brotli would be nice universally, but Safari (iOS, iPhone, macOS) reportedly
|
||||
/// doesn't support it yet.
|
||||
const DEFAULT_COMPRESSOR: CompressionType = CompressionType::Gzip;
|
||||
|
||||
impl CompressionType {
|
||||
/// Returns a `CompressionType` given a content coding
|
||||
/// in [RFC 7231](https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.4)
|
||||
/// format.
|
||||
fn parse(s: &str) -> Option<CompressionType> {
|
||||
let c = match s {
|
||||
// Compressors we support.
|
||||
"gzip" => CompressionType::Gzip,
|
||||
"br" => CompressionType::Brotli,
|
||||
|
||||
// The wildcard means that we can choose whatever
|
||||
// compression we prefer. In this case, use the
|
||||
// default.
|
||||
"*" => DEFAULT_COMPRESSOR,
|
||||
|
||||
// Compressor not supported.
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(c)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for CompressionType {
|
||||
fn to_string(&self) -> String {
|
||||
match self {
|
||||
CompressionType::Gzip => "gzip".to_string(),
|
||||
CompressionType::Brotli => "br".to_string(),
|
||||
_ => String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Route<'a> {
|
||||
router: &'a mut Router<fn(Request<Body>) -> BoxResponse>,
|
||||
path: String,
|
||||
}
|
||||
|
||||
pub struct Server {
|
||||
pub default_headers: HeaderMap,
|
||||
router: Router<fn(Request<Body>) -> BoxResponse>,
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! headers(
|
||||
{ $($key:expr => $value:expr),+ } => {
|
||||
{
|
||||
let mut m = hyper::HeaderMap::new();
|
||||
$(
|
||||
if let Ok(val) = hyper::header::HeaderValue::from_str($value) {
|
||||
m.insert($key, val);
|
||||
}
|
||||
)+
|
||||
m
|
||||
}
|
||||
};
|
||||
);
|
||||
|
||||
pub trait RequestExt {
|
||||
fn params(&self) -> Params;
|
||||
fn param(&self, name: &str) -> Option<String>;
|
||||
fn set_params(&mut self, params: Params) -> Option<Params>;
|
||||
fn cookies(&self) -> Vec<Cookie>;
|
||||
fn cookie(&self, name: &str) -> Option<Cookie>;
|
||||
}
|
||||
|
||||
pub trait ResponseExt {
|
||||
fn cookies(&self) -> Vec<Cookie>;
|
||||
fn insert_cookie(&mut self, cookie: Cookie);
|
||||
fn remove_cookie(&mut self, name: String);
|
||||
}
|
||||
|
||||
impl RequestExt for Request<Body> {
|
||||
fn params(&self) -> Params {
|
||||
self.extensions().get::<Params>().unwrap_or(&Params::new()).clone()
|
||||
// self.extensions()
|
||||
// .get::<RequestMeta>()
|
||||
// .and_then(|meta| meta.route_params())
|
||||
// .expect("Routerify: No RouteParams added while processing request")
|
||||
}
|
||||
|
||||
fn param(&self, name: &str) -> Option<String> {
|
||||
self.params().find(name).map(std::borrow::ToOwned::to_owned)
|
||||
}
|
||||
|
||||
fn set_params(&mut self, params: Params) -> Option<Params> {
|
||||
self.extensions_mut().insert(params)
|
||||
}
|
||||
|
||||
fn cookies(&self) -> Vec<Cookie> {
|
||||
self.headers().get("Cookie").map_or(Vec::new(), |header| {
|
||||
header
|
||||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.split("; ")
|
||||
.map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named("")))
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
fn cookie(&self, name: &str) -> Option<Cookie> {
|
||||
self.cookies().into_iter().find(|c| c.name() == name)
|
||||
}
|
||||
}
|
||||
|
||||
impl ResponseExt for Response<Body> {
|
||||
fn cookies(&self) -> Vec<Cookie> {
|
||||
self.headers().get("Cookie").map_or(Vec::new(), |header| {
|
||||
header
|
||||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.split("; ")
|
||||
.map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named("")))
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
fn insert_cookie(&mut self, cookie: Cookie) {
|
||||
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||
self.headers_mut().append("Set-Cookie", val);
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_cookie(&mut self, name: String) {
|
||||
let mut cookie = Cookie::named(name);
|
||||
cookie.set_path("/");
|
||||
cookie.set_max_age(Duration::seconds(1));
|
||||
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||
self.headers_mut().append("Set-Cookie", val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Route<'_> {
|
||||
fn method(&mut self, method: Method, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
||||
self.router.add(&format!("/{}{}", method.as_str(), self.path), dest);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add an endpoint for `GET` requests
|
||||
pub fn get(&mut self, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
||||
self.method(Method::GET, dest)
|
||||
}
|
||||
|
||||
/// Add an endpoint for `POST` requests
|
||||
pub fn post(&mut self, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
||||
self.method(Method::POST, dest)
|
||||
}
|
||||
}
|
||||
|
||||
impl Server {
|
||||
pub fn new() -> Self {
|
||||
Server {
|
||||
default_headers: HeaderMap::new(),
|
||||
router: Router::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn at(&mut self, path: &str) -> Route {
|
||||
Route {
|
||||
path: path.to_owned(),
|
||||
router: &mut self.router,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn listen(self, addr: String) -> Boxed<Result<(), hyper::Error>> {
|
||||
let make_svc = make_service_fn(move |_conn| {
|
||||
// For correct borrowing, these values need to be borrowed
|
||||
let router = self.router.clone();
|
||||
let default_headers = self.default_headers.clone();
|
||||
|
||||
// This is the `Service` that will handle the connection.
|
||||
// `service_fn` is a helper to convert a function that
|
||||
// returns a Response into a `Service`.
|
||||
// let shared_router = router.clone();
|
||||
async move {
|
||||
Ok::<_, String>(service_fn(move |req: Request<Body>| {
|
||||
let req_headers = req.headers().clone();
|
||||
let def_headers = default_headers.clone();
|
||||
|
||||
// Remove double slashes and decode encoded slashes
|
||||
let mut path = req.uri().path().replace("//", "/").replace("%2F", "/");
|
||||
|
||||
// Remove trailing slashes
|
||||
if path != "/" && path.ends_with('/') {
|
||||
path.pop();
|
||||
}
|
||||
|
||||
// Match the visited path with an added route
|
||||
match router.recognize(&format!("/{}{}", req.method().as_str(), path)) {
|
||||
// If a route was configured for this path
|
||||
Ok(found) => {
|
||||
let mut parammed = req;
|
||||
parammed.set_params(found.params().clone());
|
||||
|
||||
// Run the route's function
|
||||
let func = (found.handler().to_owned().to_owned())(parammed);
|
||||
async move {
|
||||
match func.await {
|
||||
Ok(mut res) => {
|
||||
res.headers_mut().extend(def_headers);
|
||||
let _ = compress_response(&req_headers, &mut res).await;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await,
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
// If there was a routing error
|
||||
Err(e) => new_boilerplate(def_headers, req_headers, 404, e.into()).boxed(),
|
||||
}
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
// Build SocketAddr from provided address
|
||||
let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {} as address (example format: 0.0.0.0:8080)", addr));
|
||||
|
||||
// Bind server to address specified above. Gracefully shut down if CTRL+C is pressed
|
||||
let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async {
|
||||
// Wait for the CTRL+C signal
|
||||
tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler");
|
||||
});
|
||||
|
||||
server.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a boilerplate Response for error conditions. This response will be
|
||||
/// compressed if requested by client.
|
||||
async fn new_boilerplate(
|
||||
default_headers: HeaderMap<header::HeaderValue>,
|
||||
req_headers: HeaderMap<header::HeaderValue>,
|
||||
status: u16,
|
||||
body: Body,
|
||||
) -> Result<Response<Body>, String> {
|
||||
match Response::builder().status(status).body(body) {
|
||||
Ok(mut res) => {
|
||||
let _ = compress_response(&req_headers, &mut res).await;
|
||||
|
||||
res.headers_mut().extend(default_headers.clone());
|
||||
Ok(res)
|
||||
}
|
||||
Err(msg) => Err(msg.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines the desired compressor based on the Accept-Encoding header.
|
||||
///
|
||||
/// This function will honor the [q-value](https://developer.mozilla.org/en-US/docs/Glossary/Quality_values)
|
||||
/// for each compressor. The q-value is an optional parameter, a decimal value
|
||||
/// on \[0..1\], to order the compressors by preference. An Accept-Encoding value
|
||||
/// with no q-values is also accepted.
|
||||
///
|
||||
/// Here are [examples](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding#examples)
|
||||
/// of valid Accept-Encoding headers.
|
||||
///
|
||||
/// ```http
|
||||
/// Accept-Encoding: gzip
|
||||
/// Accept-Encoding: gzip, compress, br
|
||||
/// Accept-Encoding: br;q=1.0, gzip;q=0.8, *;q=0.1
|
||||
/// ```
|
||||
#[cached]
|
||||
fn determine_compressor(accept_encoding: String) -> Option<CompressionType> {
|
||||
if accept_encoding.is_empty() {
|
||||
return None;
|
||||
};
|
||||
|
||||
// Keep track of the compressor candidate based on both the client's
|
||||
// preference and our own. Concrete examples:
|
||||
//
|
||||
// 1. "Accept-Encoding: gzip, br" => assuming we like brotli more than
|
||||
// gzip, and the browser supports brotli, we choose brotli
|
||||
//
|
||||
// 2. "Accept-Encoding: gzip;q=0.8, br;q=0.3" => the client has stated a
|
||||
// preference for gzip over brotli, so we choose gzip
|
||||
//
|
||||
// To do this, we need to define a struct which contains the requested
|
||||
// requested compressor (abstracted as a CompressionType enum) and the
|
||||
// q-value. If no q-value is defined for the compressor, we assume one of
|
||||
// 1.0. We first compare compressor candidates by comparing q-values, and
|
||||
// then CompressionTypes. We keep track of whatever is the greatest per our
|
||||
// ordering.
|
||||
|
||||
struct CompressorCandidate {
|
||||
alg: CompressionType,
|
||||
q: f64,
|
||||
}
|
||||
|
||||
impl Ord for CompressorCandidate {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
// Compare q-values. Break ties with the
|
||||
// CompressionType values.
|
||||
|
||||
match self.q.total_cmp(&other.q) {
|
||||
Ordering::Equal => self.alg.cmp(&other.alg),
|
||||
ord => ord,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for CompressorCandidate {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
// Guard against NAN, both on our end and on the other.
|
||||
if self.q.is_nan() || other.q.is_nan() {
|
||||
return None;
|
||||
};
|
||||
|
||||
// f64 and CompressionType are ordered, except in the case
|
||||
// where the f64 is NAN (which we checked against), so we
|
||||
// can safely return a Some here.
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for CompressorCandidate {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
(self.q == other.q) && (self.alg == other.alg)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for CompressorCandidate {}
|
||||
|
||||
// This is the current candidate.
|
||||
//
|
||||
// Assmume no candidate so far. We do this by assigning the sentinel value
|
||||
// of negative infinity to the q-value. If this value is negative infinity,
|
||||
// that means there was no viable compressor candidate.
|
||||
let mut cur_candidate = CompressorCandidate {
|
||||
alg: CompressionType::Passthrough,
|
||||
q: f64::NEG_INFINITY,
|
||||
};
|
||||
|
||||
// This loop reads the requested compressors and keeps track of whichever
|
||||
// one has the highest priority per our heuristic.
|
||||
for val in accept_encoding.split(',') {
|
||||
let mut q: f64 = 1.0;
|
||||
|
||||
// The compressor and q-value (if the latter is defined)
|
||||
// will be delimited by semicolons.
|
||||
let mut spl: Split<char> = val.split(';');
|
||||
|
||||
// Get the compressor. For example, in
|
||||
// gzip;q=0.8
|
||||
// this grabs "gzip" in the string. It
|
||||
// will further validate the compressor against the
|
||||
// list of those we support. If it is not supported,
|
||||
// we move onto the next one.
|
||||
let compressor: CompressionType = match spl.next() {
|
||||
// CompressionType::parse will return the appropriate enum given
|
||||
// a string. For example, it will return CompressionType::Gzip
|
||||
// when given "gzip".
|
||||
Some(s) => match CompressionType::parse(s.trim()) {
|
||||
Some(candidate) => candidate,
|
||||
|
||||
// We don't support the requested compression algorithm.
|
||||
None => continue,
|
||||
},
|
||||
|
||||
// We should never get here, but I'm paranoid.
|
||||
None => continue,
|
||||
};
|
||||
|
||||
// Get the q-value. This might not be defined, in which case assume
|
||||
// 1.0.
|
||||
if let Some(s) = spl.next() {
|
||||
if !(s.len() > 2 && s.starts_with("q=")) {
|
||||
// If the q-value is malformed, the header is malformed, so
|
||||
// abort.
|
||||
return None;
|
||||
}
|
||||
|
||||
match s[2..].parse::<f64>() {
|
||||
Ok(val) => {
|
||||
if (0.0..=1.0).contains(&val) {
|
||||
q = val;
|
||||
} else {
|
||||
// If the value is outside [0..1], header is malformed.
|
||||
// Abort.
|
||||
return None;
|
||||
};
|
||||
}
|
||||
Err(_) => {
|
||||
// If this isn't a f64, then assume a malformed header
|
||||
// value and abort.
|
||||
return None;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// If new_candidate > cur_candidate, make new_candidate the new
|
||||
// cur_candidate. But do this safely! It is very possible that
|
||||
// someone gave us the string "NAN", which (&str).parse::<f64>
|
||||
// will happily translate to f64::NAN.
|
||||
let new_candidate = CompressorCandidate { alg: compressor, q };
|
||||
if let Some(ord) = new_candidate.partial_cmp(&cur_candidate) {
|
||||
if ord == Ordering::Greater {
|
||||
cur_candidate = new_candidate;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if cur_candidate.q != f64::NEG_INFINITY {
|
||||
Some(cur_candidate.alg)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Compress the response body, if possible or desirable. The Body will be
|
||||
/// compressed in place, and a new header Content-Encoding will be set
|
||||
/// indicating the compression algorithm.
|
||||
///
|
||||
/// This function deems Body eligible compression if and only if the following
|
||||
/// conditions are met:
|
||||
///
|
||||
/// 1. the HTTP client requests a compression encoding in the Content-Encoding
|
||||
/// header (hence the need for the req_headers);
|
||||
///
|
||||
/// 2. the content encoding corresponds to a compression algorithm we support;
|
||||
///
|
||||
/// 3. the Media type in the Content-Type response header is text with any
|
||||
/// subtype (e.g. text/plain) or application/json.
|
||||
///
|
||||
/// compress_response returns Ok on successful compression, or if not all three
|
||||
/// conditions above are met. It returns Err if there was a problem decoding
|
||||
/// any header in either req_headers or res, but res will remain intact.
|
||||
///
|
||||
/// This function logs errors to stderr, but only in debug mode. No information
|
||||
/// is logged in release builds.
|
||||
async fn compress_response(req_headers: &HeaderMap<header::HeaderValue>, res: &mut Response<Body>) -> Result<(), String> {
|
||||
// Check if the data is eligible for compression.
|
||||
if let Some(hdr) = res.headers().get(header::CONTENT_TYPE) {
|
||||
match from_utf8(hdr.as_bytes()) {
|
||||
Ok(val) => {
|
||||
let s = val.to_string();
|
||||
|
||||
// TODO: better determination of what is eligible for compression
|
||||
if !(s.starts_with("text/") || s.starts_with("application/json")) {
|
||||
return Ok(());
|
||||
};
|
||||
}
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
};
|
||||
} else {
|
||||
// Response declares no Content-Type. Assume for simplicity that it
|
||||
// cannot be compressed.
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Don't bother if the size of the size of the response body will fit
|
||||
// within an IP frame (less the bytes that make up the TCP/IP and HTTP
|
||||
// headers).
|
||||
if res.body().size_hint().lower() < 1452 {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Check to see which compressor is requested, and if we can use it.
|
||||
let accept_encoding: String = match req_headers.get(header::ACCEPT_ENCODING) {
|
||||
None => return Ok(()), // Client requested no compression.
|
||||
|
||||
Some(hdr) => match String::from_utf8(hdr.as_bytes().into()) {
|
||||
Ok(val) => val,
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
Err(_) => return Ok(()),
|
||||
},
|
||||
};
|
||||
|
||||
let compressor: CompressionType = match determine_compressor(accept_encoding) {
|
||||
Some(c) => c,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
// Get the body from the response.
|
||||
let body_bytes: Vec<u8> = match body::to_bytes(res.body_mut()).await {
|
||||
Ok(b) => b.to_vec(),
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
};
|
||||
|
||||
// Compress!
|
||||
match compress_body(compressor, body_bytes) {
|
||||
Ok(compressed) => {
|
||||
// We get here iff the compression was successful. Replace the body
|
||||
// with the compressed payload, and add the appropriate
|
||||
// Content-Encoding header in the response.
|
||||
res.headers_mut().insert(header::CONTENT_ENCODING, compressor.to_string().parse().unwrap());
|
||||
*(res.body_mut()) = Body::from(compressed);
|
||||
}
|
||||
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Compresses a `Vec<u8>` given a [`CompressionType`].
|
||||
///
|
||||
/// This is a helper function for [`compress_response`] and should not be
|
||||
/// called directly.
|
||||
|
||||
// I've chosen a TTL of 600 (== 10 minutes) since compression is
|
||||
// computationally expensive and we don't want to be doing it often. This is
|
||||
// larger than client::json's TTL, but that's okay, because if client::json
|
||||
// returns a new serde_json::Value, body_bytes changes, so this function will
|
||||
// execute again.
|
||||
#[cached(size = 100, time = 600, result = true)]
|
||||
fn compress_body(compressor: CompressionType, body_bytes: Vec<u8>) -> Result<Vec<u8>, String> {
|
||||
// io::Cursor implements io::Read, required for our encoders.
|
||||
let mut reader = io::Cursor::new(body_bytes);
|
||||
|
||||
let compressed: Vec<u8> = match compressor {
|
||||
CompressionType::Gzip => {
|
||||
let mut gz: gzip::Encoder<Vec<u8>> = match gzip::Encoder::new(Vec::new()) {
|
||||
Ok(gz) => gz,
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
};
|
||||
|
||||
match io::copy(&mut reader, &mut gz) {
|
||||
Ok(_) => match gz.finish().into_result() {
|
||||
Ok(compressed) => compressed,
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CompressionType::Brotli => {
|
||||
// We may want to make the compression parameters configurable
|
||||
// in the future. For now, the defaults are sufficient.
|
||||
let brotli_params = BrotliEncoderParams::default();
|
||||
|
||||
let mut compressed = Vec::<u8>::new();
|
||||
match BrotliCompress(&mut reader, &mut compressed, &brotli_params) {
|
||||
Ok(_) => compressed,
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This arm is for any requested compressor for which we don't yet
|
||||
// have an implementation.
|
||||
_ => {
|
||||
let msg = "unsupported compressor".to_string();
|
||||
return Err(msg);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(compressed)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use brotli::Decompressor as BrotliDecompressor;
|
||||
use futures_lite::future::block_on;
|
||||
use lipsum::lipsum;
|
||||
use std::{boxed::Box, io};
|
||||
|
||||
#[test]
|
||||
fn test_determine_compressor() {
|
||||
// Single compressor given.
|
||||
assert_eq!(determine_compressor("unsupported".to_string()), None);
|
||||
assert_eq!(determine_compressor("gzip".to_string()), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("*".to_string()), Some(DEFAULT_COMPRESSOR));
|
||||
|
||||
// Multiple compressors.
|
||||
assert_eq!(determine_compressor("gzip, br".to_string()), Some(CompressionType::Brotli));
|
||||
assert_eq!(determine_compressor("gzip;q=0.8, br;q=0.3".to_string()), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("br, gzip".to_string()), Some(CompressionType::Brotli));
|
||||
assert_eq!(determine_compressor("br;q=0.3, gzip;q=0.4".to_string()), Some(CompressionType::Gzip));
|
||||
|
||||
// Invalid q-values.
|
||||
assert_eq!(determine_compressor("gzip;q=NAN".to_string()), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compress_response() {
|
||||
// This macro generates an Accept-Encoding header value given any number of
|
||||
// compressors.
|
||||
macro_rules! ae_gen {
|
||||
($x:expr) => {
|
||||
$x.to_string().as_str()
|
||||
};
|
||||
|
||||
($x:expr, $($y:expr),+) => {
|
||||
format!("{}, {}", $x.to_string(), ae_gen!($($y),+)).as_str()
|
||||
};
|
||||
}
|
||||
|
||||
for accept_encoding in [
|
||||
"*",
|
||||
ae_gen!(CompressionType::Gzip),
|
||||
ae_gen!(CompressionType::Brotli, CompressionType::Gzip),
|
||||
ae_gen!(CompressionType::Brotli),
|
||||
] {
|
||||
// Determine what the expected encoding should be based on both the
|
||||
// specific encodings we accept.
|
||||
let expected_encoding: CompressionType = match determine_compressor(accept_encoding.to_string()) {
|
||||
Some(s) => s,
|
||||
None => panic!("determine_compressor(accept_encoding.to_string()) => None"),
|
||||
};
|
||||
|
||||
// Build headers with our Accept-Encoding.
|
||||
let mut req_headers = HeaderMap::new();
|
||||
req_headers.insert(header::ACCEPT_ENCODING, header::HeaderValue::from_str(accept_encoding).unwrap());
|
||||
|
||||
// Build test response.
|
||||
let lorem_ipsum: String = lipsum(10000);
|
||||
let expected_lorem_ipsum = Vec::<u8>::from(lorem_ipsum.as_str());
|
||||
let mut res = Response::builder()
|
||||
.status(200)
|
||||
.header(header::CONTENT_TYPE, "text/plain")
|
||||
.body(Body::from(lorem_ipsum))
|
||||
.unwrap();
|
||||
|
||||
// Perform the compression.
|
||||
if let Err(e) = block_on(compress_response(&req_headers, &mut res)) {
|
||||
panic!("compress_response(&req_headers, &mut res) => Err(\"{}\")", e);
|
||||
};
|
||||
|
||||
// If the content was compressed, we expect the Content-Encoding
|
||||
// header to be modified.
|
||||
assert_eq!(
|
||||
res
|
||||
.headers()
|
||||
.get(header::CONTENT_ENCODING)
|
||||
.unwrap_or_else(|| panic!("missing content-encoding header"))
|
||||
.to_str()
|
||||
.unwrap_or_else(|_| panic!("failed to convert Content-Encoding header::HeaderValue to String")),
|
||||
expected_encoding.to_string()
|
||||
);
|
||||
|
||||
// Decompress body and make sure it's equal to what we started
|
||||
// with.
|
||||
//
|
||||
// In the case of no compression, just make sure the "new" body in
|
||||
// the Response is the same as what with which we start.
|
||||
let body_vec = match block_on(body::to_bytes(res.body_mut())) {
|
||||
Ok(b) => b.to_vec(),
|
||||
Err(e) => panic!("{}", e),
|
||||
};
|
||||
|
||||
if expected_encoding == CompressionType::Passthrough {
|
||||
assert!(body_vec.eq(&expected_lorem_ipsum));
|
||||
continue;
|
||||
}
|
||||
|
||||
// This provides an io::Read for the underlying body.
|
||||
let mut body_cursor: io::Cursor<Vec<u8>> = io::Cursor::new(body_vec);
|
||||
|
||||
// Match the appropriate decompresor for the given
|
||||
// expected_encoding.
|
||||
let mut decoder: Box<dyn io::Read> = match expected_encoding {
|
||||
CompressionType::Gzip => match gzip::Decoder::new(&mut body_cursor) {
|
||||
Ok(dgz) => Box::new(dgz),
|
||||
Err(e) => panic!("{}", e),
|
||||
},
|
||||
|
||||
CompressionType::Brotli => Box::new(BrotliDecompressor::new(body_cursor, expected_lorem_ipsum.len())),
|
||||
|
||||
_ => panic!("no decompressor for {}", expected_encoding.to_string()),
|
||||
};
|
||||
|
||||
let mut decompressed = Vec::<u8>::new();
|
||||
if let Err(e) = io::copy(&mut decoder, &mut decompressed) {
|
||||
panic!("{}", e);
|
||||
};
|
||||
|
||||
assert!(decompressed.eq(&expected_lorem_ipsum));
|
||||
}
|
||||
}
|
||||
}
|
143
src/settings.rs
143
src/settings.rs
@ -1,7 +1,12 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
// CRATES
|
||||
use crate::utils::{prefs, Preferences};
|
||||
use actix_web::{cookie::Cookie, web::Form, HttpMessage, HttpRequest, HttpResponse};
|
||||
use crate::server::ResponseExt;
|
||||
use crate::utils::{redirect, template, Preferences};
|
||||
use askama::Template;
|
||||
use cookie::Cookie;
|
||||
use futures_lite::StreamExt;
|
||||
use hyper::{Body, Request, Response};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
// STRUCTS
|
||||
@ -9,51 +14,129 @@ use time::{Duration, OffsetDateTime};
|
||||
#[template(path = "settings.html")]
|
||||
struct SettingsTemplate {
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct SettingsForm {
|
||||
theme: Option<String>,
|
||||
front_page: Option<String>,
|
||||
layout: Option<String>,
|
||||
wide: Option<String>,
|
||||
comment_sort: Option<String>,
|
||||
hide_nsfw: Option<String>,
|
||||
}
|
||||
// CONSTANTS
|
||||
|
||||
const PREFS: [&str; 13] = [
|
||||
"theme",
|
||||
"front_page",
|
||||
"layout",
|
||||
"wide",
|
||||
"comment_sort",
|
||||
"post_sort",
|
||||
"show_nsfw",
|
||||
"blur_nsfw",
|
||||
"use_hls",
|
||||
"hide_hls_notification",
|
||||
"autoplay_videos",
|
||||
"hide_awards",
|
||||
"disable_visit_reddit_confirmation",
|
||||
];
|
||||
|
||||
// FUNCTIONS
|
||||
|
||||
// Retrieve cookies from request "Cookie" header
|
||||
pub async fn get(req: HttpRequest) -> HttpResponse {
|
||||
let s = SettingsTemplate { prefs: prefs(req) }.render().unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
pub async fn get(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let url = req.uri().to_string();
|
||||
template(SettingsTemplate {
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
||||
// Set cookies using response "Set-Cookie" header
|
||||
pub async fn set(req: HttpRequest, form: Form<SettingsForm>) -> HttpResponse {
|
||||
let mut res = HttpResponse::Found();
|
||||
pub async fn set(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// Split the body into parts
|
||||
let (parts, mut body) = req.into_parts();
|
||||
|
||||
let names = vec!["theme", "front_page", "layout", "wide", "comment_sort", "hide_nsfw"];
|
||||
let values = vec![&form.theme, &form.front_page, &form.layout, &form.wide, &form.comment_sort, &form.hide_nsfw];
|
||||
// Grab existing cookies
|
||||
let _cookies: Vec<Cookie> = parts
|
||||
.headers
|
||||
.get_all("Cookie")
|
||||
.iter()
|
||||
.filter_map(|header| Cookie::parse(header.to_str().unwrap_or_default()).ok())
|
||||
.collect();
|
||||
|
||||
for (i, name) in names.iter().enumerate() {
|
||||
match values[i] {
|
||||
Some(value) => res.cookie(
|
||||
Cookie::build(name.to_owned(), value)
|
||||
// Aggregate the body...
|
||||
// let whole_body = hyper::body::aggregate(req).await.map_err(|e| e.to_string())?;
|
||||
let body_bytes = body
|
||||
.try_fold(Vec::new(), |mut data, chunk| {
|
||||
data.extend_from_slice(&chunk);
|
||||
Ok(data)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let form = url::form_urlencoded::parse(&body_bytes).collect::<HashMap<_, _>>();
|
||||
|
||||
let mut response = redirect("/settings".to_string());
|
||||
|
||||
for &name in &PREFS {
|
||||
match form.get(name) {
|
||||
Some(value) => response.insert_cookie(
|
||||
Cookie::build(name.to_owned(), value.clone())
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.finish(),
|
||||
),
|
||||
None => match HttpMessage::cookie(&req, name.to_owned()) {
|
||||
Some(cookie) => res.del_cookie(&cookie),
|
||||
None => &mut res,
|
||||
},
|
||||
None => response.remove_cookie(name.to_string()),
|
||||
};
|
||||
}
|
||||
|
||||
res
|
||||
.content_type("text/html")
|
||||
.set_header("Location", "/settings")
|
||||
.body(r#"Redirecting to <a href="/settings">settings</a>..."#)
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body> {
|
||||
// Split the body into parts
|
||||
let (parts, _) = req.into_parts();
|
||||
|
||||
// Grab existing cookies
|
||||
let _cookies: Vec<Cookie> = parts
|
||||
.headers
|
||||
.get_all("Cookie")
|
||||
.iter()
|
||||
.filter_map(|header| Cookie::parse(header.to_str().unwrap_or_default()).ok())
|
||||
.collect();
|
||||
|
||||
let query = parts.uri.query().unwrap_or_default().as_bytes();
|
||||
|
||||
let form = url::form_urlencoded::parse(query).collect::<HashMap<_, _>>();
|
||||
|
||||
let path = match form.get("redirect") {
|
||||
Some(value) => format!("/{}", value.replace("%26", "&").replace("%23", "#")),
|
||||
None => "/".to_string(),
|
||||
};
|
||||
|
||||
let mut response = redirect(path);
|
||||
|
||||
for name in [PREFS.to_vec(), vec!["subscriptions", "filters"]].concat() {
|
||||
match form.get(name) {
|
||||
Some(value) => response.insert_cookie(
|
||||
Cookie::build(name.to_owned(), value.clone())
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.finish(),
|
||||
),
|
||||
None => {
|
||||
if remove_cookies {
|
||||
response.remove_cookie(name.to_string());
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
response
|
||||
}
|
||||
|
||||
// Set cookies using response "Set-Cookie" header
|
||||
pub async fn restore(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
Ok(set_cookies_method(req, true))
|
||||
}
|
||||
|
||||
pub async fn update(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
Ok(set_cookies_method(req, false))
|
||||
}
|
||||
|
470
src/subreddit.rs
470
src/subreddit.rs
@ -1,120 +1,436 @@
|
||||
// CRATES
|
||||
use crate::utils::*;
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use crate::utils::{
|
||||
catch_random, error, filter_posts, format_num, format_url, get_filters, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit,
|
||||
};
|
||||
use crate::{client::json, server::ResponseExt, RequestExt};
|
||||
use askama::Template;
|
||||
use cookie::Cookie;
|
||||
use hyper::{Body, Request, Response};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
#[template(path = "subreddit.html", escape = "none")]
|
||||
#[template(path = "subreddit.html")]
|
||||
struct SubredditTemplate {
|
||||
sub: Subreddit,
|
||||
posts: Vec<Post>,
|
||||
sort: (String, String),
|
||||
ends: (String, String),
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
redirect_url: String,
|
||||
/// Whether the subreddit itself is filtered.
|
||||
is_filtered: bool,
|
||||
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||
/// and all fetched posts being filtered).
|
||||
all_posts_filtered: bool,
|
||||
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||
all_posts_hidden_nsfw: bool,
|
||||
no_posts: bool,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "wiki.html", escape = "none")]
|
||||
#[template(path = "wiki.html")]
|
||||
struct WikiTemplate {
|
||||
sub: String,
|
||||
wiki: String,
|
||||
page: String,
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "wall.html")]
|
||||
struct WallTemplate {
|
||||
title: String,
|
||||
sub: String,
|
||||
msg: String,
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn page(req: HttpRequest) -> HttpResponse {
|
||||
let path = format!("{}.json?{}", req.path(), req.query_string());
|
||||
let default = cookie(&req, "front_page");
|
||||
let sub_name = req
|
||||
.match_info()
|
||||
.get("sub")
|
||||
.unwrap_or(if default.is_empty() { "popular" } else { default.as_str() })
|
||||
.to_string();
|
||||
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
|
||||
pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// Build Reddit API path
|
||||
let root = req.uri().path() == "/";
|
||||
let subscribed = setting(&req, "subscriptions");
|
||||
let front_page = setting(&req, "front_page");
|
||||
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
|
||||
let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort));
|
||||
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok((posts, after)) => {
|
||||
// If you can get subreddit posts, also request subreddit metadata
|
||||
let sub = if !sub_name.contains('+') && sub_name != "popular" && sub_name != "all" {
|
||||
subreddit(&sub_name).await.unwrap_or_default()
|
||||
} else if sub_name.contains('+') {
|
||||
Subreddit {
|
||||
name: sub_name,
|
||||
..Subreddit::default()
|
||||
}
|
||||
} else {
|
||||
Subreddit::default()
|
||||
};
|
||||
|
||||
let s = SubredditTemplate {
|
||||
sub,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), after),
|
||||
prefs: prefs(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
let sub_name = req.param("sub").unwrap_or(if front_page == "default" || front_page.is_empty() {
|
||||
if subscribed.is_empty() {
|
||||
"popular".to_string()
|
||||
} else {
|
||||
subscribed.clone()
|
||||
}
|
||||
} else {
|
||||
front_page.clone()
|
||||
});
|
||||
let quarantined = can_access_quarantine(&req, &sub_name) || root;
|
||||
|
||||
// Handle random subreddits
|
||||
if let Ok(random) = catch_random(&sub_name, "").await {
|
||||
return Ok(random);
|
||||
}
|
||||
|
||||
if req.param("sub").is_some() && sub_name.starts_with("u_") {
|
||||
return Ok(redirect(["/user/", &sub_name[2..]].concat()));
|
||||
}
|
||||
|
||||
// Request subreddit metadata
|
||||
let sub = if !sub_name.contains('+') && sub_name != subscribed && sub_name != "popular" && sub_name != "all" {
|
||||
// Regular subreddit
|
||||
subreddit(&sub_name, quarantined).await.unwrap_or_default()
|
||||
} else if sub_name == subscribed {
|
||||
// Subscription feed
|
||||
if req.uri().path().starts_with("/r/") {
|
||||
subreddit(&sub_name, quarantined).await.unwrap_or_default()
|
||||
} else {
|
||||
Subreddit::default()
|
||||
}
|
||||
} else {
|
||||
// Multireddit, all, popular
|
||||
Subreddit {
|
||||
name: sub_name.clone(),
|
||||
..Subreddit::default()
|
||||
}
|
||||
};
|
||||
|
||||
let req_url = req.uri().to_string();
|
||||
// Return landing page if this post if this is NSFW community but the user
|
||||
// has disabled the display of NSFW content or if the instance is SFW-only.
|
||||
if sub.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) {
|
||||
return Ok(nsfw_landing(req, req_url).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let path = format!("/r/{}/{}.json?{}&raw_json=1", sub_name.clone(), sort, req.uri().query().unwrap_or_default());
|
||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B");
|
||||
let filters = get_filters(&req);
|
||||
|
||||
// If all requested subs are filtered, we don't need to fetch posts.
|
||||
if sub_name.split('+').all(|s| filters.contains(s)) {
|
||||
template(SubredditTemplate {
|
||||
sub,
|
||||
posts: Vec::new(),
|
||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||
ends: (param(&path, "after").unwrap_or_default(), "".to_string()),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
redirect_url,
|
||||
is_filtered: true,
|
||||
all_posts_filtered: false,
|
||||
all_posts_hidden_nsfw: false,
|
||||
no_posts: false,
|
||||
})
|
||||
} else {
|
||||
match Post::fetch(&path, quarantined).await {
|
||||
Ok((mut posts, after)) => {
|
||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||
let no_posts = posts.is_empty();
|
||||
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
||||
template(SubredditTemplate {
|
||||
sub,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||
ends: (param(&path, "after").unwrap_or_default(), after),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
redirect_url,
|
||||
is_filtered: false,
|
||||
all_posts_filtered,
|
||||
all_posts_hidden_nsfw,
|
||||
no_posts,
|
||||
})
|
||||
}
|
||||
Err(msg) => match msg.as_str() {
|
||||
"quarantined" | "gated" => quarantine(req, sub_name, msg),
|
||||
"private" => error(req, format!("r/{} is a private community", sub_name)).await,
|
||||
"banned" => error(req, format!("r/{} has been banned from Reddit", sub_name)).await,
|
||||
_ => error(req, msg).await,
|
||||
},
|
||||
}
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn wiki(req: HttpRequest) -> HttpResponse {
|
||||
let sub = req.match_info().get("sub").unwrap_or("reddit.com").to_string();
|
||||
let page = req.match_info().get("page").unwrap_or("index").to_string();
|
||||
pub fn quarantine(req: Request<Body>, sub: String, restriction: String) -> Result<Response<Body>, String> {
|
||||
let wall = WallTemplate {
|
||||
title: format!("r/{} is {}", sub, restriction),
|
||||
msg: "Please click the button below to continue to this subreddit.".to_string(),
|
||||
url: req.uri().to_string(),
|
||||
sub,
|
||||
prefs: Preferences::new(&req),
|
||||
};
|
||||
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(403)
|
||||
.header("content-type", "text/html")
|
||||
.body(wall.render().unwrap_or_default().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn add_quarantine_exception(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let subreddit = req.param("sub").ok_or("Invalid URL")?;
|
||||
let redir = param(&format!("?{}", req.uri().query().unwrap_or_default()), "redir").ok_or("Invalid URL")?;
|
||||
let mut response = redirect(redir);
|
||||
response.insert_cookie(
|
||||
Cookie::build(&format!("allow_quaran_{}", subreddit.to_lowercase()), "true")
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(cookie::Expiration::Session)
|
||||
.finish(),
|
||||
);
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub fn can_access_quarantine(req: &Request<Body>, sub: &str) -> bool {
|
||||
// Determine if the subreddit can be accessed
|
||||
setting(req, &format!("allow_quaran_{}", sub.to_lowercase())).parse().unwrap_or_default()
|
||||
}
|
||||
|
||||
// Sub, filter, unfilter, or unsub by setting subscription cookie using response "Set-Cookie" header
|
||||
pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
let action: Vec<String> = req.uri().path().split('/').map(String::from).collect();
|
||||
|
||||
// Handle random subreddits
|
||||
if sub == "random" || sub == "randnsfw" {
|
||||
if action.contains(&"filter".to_string()) || action.contains(&"unfilter".to_string()) {
|
||||
return Err("Can't filter random subreddit!".to_string());
|
||||
} else {
|
||||
return Err("Can't subscribe to random subreddit!".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
let query = req.uri().query().unwrap_or_default().to_string();
|
||||
|
||||
let preferences = Preferences::new(&req);
|
||||
let mut sub_list = preferences.subscriptions;
|
||||
let mut filters = preferences.filters;
|
||||
|
||||
// Retrieve list of posts for these subreddits to extract display names
|
||||
let posts = json(format!("/r/{}/hot.json?raw_json=1", sub), true).await?;
|
||||
let display_lookup: Vec<(String, &str)> = posts["data"]["children"]
|
||||
.as_array()
|
||||
.map(|list| {
|
||||
list
|
||||
.iter()
|
||||
.map(|post| {
|
||||
let display_name = post["data"]["subreddit"].as_str().unwrap_or_default();
|
||||
(display_name.to_lowercase(), display_name)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
// Find each subreddit name (separated by '+') in sub parameter
|
||||
for part in sub.split('+').filter(|x| x != &"") {
|
||||
// Retrieve display name for the subreddit
|
||||
let display;
|
||||
let part = if part.starts_with("u_") {
|
||||
part
|
||||
} else if let Some(&(_, display)) = display_lookup.iter().find(|x| x.0 == part.to_lowercase()) {
|
||||
// This is already known, doesn't require separate request
|
||||
display
|
||||
} else {
|
||||
// This subreddit display name isn't known, retrieve it
|
||||
let path: String = format!("/r/{}/about.json?raw_json=1", part);
|
||||
display = json(path, true).await?;
|
||||
display["data"]["display_name"].as_str().ok_or_else(|| "Failed to query subreddit name".to_string())?
|
||||
};
|
||||
|
||||
// Modify sub list based on action
|
||||
if action.contains(&"subscribe".to_string()) && !sub_list.contains(&part.to_owned()) {
|
||||
// Add each sub name to the subscribed list
|
||||
sub_list.push(part.to_owned());
|
||||
filters.retain(|s| s.to_lowercase() != part.to_lowercase());
|
||||
// Reorder sub names alphabetically
|
||||
sub_list.sort_by_key(|a| a.to_lowercase());
|
||||
filters.sort_by_key(|a| a.to_lowercase());
|
||||
} else if action.contains(&"unsubscribe".to_string()) {
|
||||
// Remove sub name from subscribed list
|
||||
sub_list.retain(|s| s.to_lowercase() != part.to_lowercase());
|
||||
} else if action.contains(&"filter".to_string()) && !filters.contains(&part.to_owned()) {
|
||||
// Add each sub name to the filtered list
|
||||
filters.push(part.to_owned());
|
||||
sub_list.retain(|s| s.to_lowercase() != part.to_lowercase());
|
||||
// Reorder sub names alphabetically
|
||||
filters.sort_by_key(|a| a.to_lowercase());
|
||||
sub_list.sort_by_key(|a| a.to_lowercase());
|
||||
} else if action.contains(&"unfilter".to_string()) {
|
||||
// Remove sub name from filtered list
|
||||
filters.retain(|s| s.to_lowercase() != part.to_lowercase());
|
||||
}
|
||||
}
|
||||
|
||||
// Redirect back to subreddit
|
||||
// check for redirect parameter if unsubscribing/unfiltering from outside sidebar
|
||||
let path = if let Some(redirect_path) = param(&format!("?{}", query), "redirect") {
|
||||
format!("/{}", redirect_path)
|
||||
} else {
|
||||
format!("/r/{}", sub)
|
||||
};
|
||||
|
||||
let mut response = redirect(path);
|
||||
|
||||
// Delete cookie if empty, else set
|
||||
if sub_list.is_empty() {
|
||||
response.remove_cookie("subscriptions".to_string());
|
||||
} else {
|
||||
response.insert_cookie(
|
||||
Cookie::build("subscriptions", sub_list.join("+"))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.finish(),
|
||||
);
|
||||
}
|
||||
if filters.is_empty() {
|
||||
response.remove_cookie("filters".to_string());
|
||||
} else {
|
||||
response.insert_cookie(
|
||||
Cookie::build("filters", filters.join("+"))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.finish(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn wiki(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let sub = req.param("sub").unwrap_or_else(|| "reddit.com".to_string());
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
// Handle random subreddits
|
||||
if let Ok(random) = catch_random(&sub, "/wiki").await {
|
||||
return Ok(random);
|
||||
}
|
||||
|
||||
let page = req.param("page").unwrap_or_else(|| "index".to_string());
|
||||
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
|
||||
let url = req.uri().to_string();
|
||||
|
||||
match request(&path).await {
|
||||
Ok(res) => {
|
||||
let s = WikiTemplate {
|
||||
sub,
|
||||
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()),
|
||||
page,
|
||||
prefs: prefs(req),
|
||||
match json(path, quarantined).await {
|
||||
Ok(response) => template(WikiTemplate {
|
||||
sub,
|
||||
wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or("<h3>Wiki not found</h3>")),
|
||||
page,
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
}),
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" || msg == "gated" {
|
||||
quarantine(req, sub, msg)
|
||||
} else {
|
||||
error(req, msg).await
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn sidebar(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let sub = req.param("sub").unwrap_or_else(|| "reddit.com".to_string());
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
|
||||
// Handle random subreddits
|
||||
if let Ok(random) = catch_random(&sub, "/about/sidebar").await {
|
||||
return Ok(random);
|
||||
}
|
||||
|
||||
// Build the Reddit JSON API url
|
||||
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
||||
let url = req.uri().to_string();
|
||||
|
||||
// Send a request to the url
|
||||
match json(path, quarantined).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => template(WikiTemplate {
|
||||
wiki: rewrite_urls(&val(&response, "description_html")),
|
||||
// wiki: format!(
|
||||
// "{}<hr><h1>Moderators</h1><br><ul>{}</ul>",
|
||||
// rewrite_urls(&val(&response, "description_html"),
|
||||
// moderators(&sub, quarantined).await.unwrap_or(vec!["Could not fetch moderators".to_string()]).join(""),
|
||||
// ),
|
||||
sub,
|
||||
page: "Sidebar".to_string(),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
}),
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" || msg == "gated" {
|
||||
quarantine(req, sub, msg)
|
||||
} else {
|
||||
error(req, msg).await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// pub async fn moderators(sub: &str, quarantined: bool) -> Result<Vec<String>, String> {
|
||||
// // Retrieve and format the html for the moderators list
|
||||
// Ok(
|
||||
// moderators_list(sub, quarantined)
|
||||
// .await?
|
||||
// .iter()
|
||||
// .map(|m| format!("<li><a style=\"color: var(--accent)\" href=\"/u/{name}\">{name}</a></li>", name = m))
|
||||
// .collect(),
|
||||
// )
|
||||
// }
|
||||
|
||||
// async fn moderators_list(sub: &str, quarantined: bool) -> Result<Vec<String>, String> {
|
||||
// // Build the moderator list URL
|
||||
// let path: String = format!("/r/{}/about/moderators.json?raw_json=1", sub);
|
||||
|
||||
// // Retrieve response
|
||||
// json(path, quarantined).await.map(|response| {
|
||||
// // Traverse json tree and format into list of strings
|
||||
// response["data"]["children"]
|
||||
// .as_array()
|
||||
// .unwrap_or(&Vec::new())
|
||||
// .iter()
|
||||
// .filter_map(|moderator| {
|
||||
// let name = moderator["name"].as_str().unwrap_or_default();
|
||||
// if name.is_empty() {
|
||||
// None
|
||||
// } else {
|
||||
// Some(name.to_string())
|
||||
// }
|
||||
// })
|
||||
// .collect::<Vec<_>>()
|
||||
// })
|
||||
// }
|
||||
|
||||
// SUBREDDIT
|
||||
async fn subreddit(sub: &str) -> Result<Subreddit, String> {
|
||||
async fn subreddit(sub: &str, quarantined: bool) -> Result<Subreddit, String> {
|
||||
// Build the Reddit JSON API url
|
||||
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
||||
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(res) => {
|
||||
// Metadata regarding the subreddit
|
||||
let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
|
||||
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
|
||||
let res = json(path, quarantined).await?;
|
||||
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let community_icon: &str = res["data"]["community_icon"].as_str().unwrap_or("").split('?').collect::<Vec<&str>>()[0];
|
||||
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
||||
// Metadata regarding the subreddit
|
||||
let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
|
||||
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
|
||||
|
||||
let sub = Subreddit {
|
||||
name: val(&res, "display_name"),
|
||||
title: val(&res, "title"),
|
||||
description: val(&res, "public_description"),
|
||||
info: rewrite_url(&val(&res, "description_html").replace("\\", "")),
|
||||
icon: format_url(icon.as_str()),
|
||||
members: format_num(members),
|
||||
active: format_num(active),
|
||||
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
||||
};
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let community_icon: &str = res["data"]["community_icon"].as_str().unwrap_or_default();
|
||||
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
||||
|
||||
Ok(sub)
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg),
|
||||
}
|
||||
Ok(Subreddit {
|
||||
name: val(&res, "display_name"),
|
||||
title: val(&res, "title"),
|
||||
description: val(&res, "public_description"),
|
||||
info: rewrite_urls(&val(&res, "description_html")),
|
||||
// moderators: moderators_list(sub, quarantined).await.unwrap_or_default(),
|
||||
icon: format_url(&icon),
|
||||
members: format_num(members),
|
||||
active: format_num(active),
|
||||
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
||||
nsfw: res["data"]["over18"].as_bool().unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
143
src/user.rs
143
src/user.rs
@ -1,80 +1,131 @@
|
||||
// CRATES
|
||||
use crate::utils::{error, fetch_posts, format_url, param, prefs, request, Post, Preferences, User};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use crate::client::json;
|
||||
use crate::server::RequestExt;
|
||||
use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User};
|
||||
use askama::Template;
|
||||
use time::OffsetDateTime;
|
||||
use hyper::{Body, Request, Response};
|
||||
use time::{macros::format_description, OffsetDateTime};
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
#[template(path = "user.html", escape = "none")]
|
||||
#[template(path = "user.html")]
|
||||
struct UserTemplate {
|
||||
user: User,
|
||||
posts: Vec<Post>,
|
||||
sort: (String, String),
|
||||
ends: (String, String),
|
||||
/// "overview", "comments", or "submitted"
|
||||
listing: String,
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
redirect_url: String,
|
||||
/// Whether the user themself is filtered.
|
||||
is_filtered: bool,
|
||||
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||
/// and all fetched posts being filtered).
|
||||
all_posts_filtered: bool,
|
||||
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||
all_posts_hidden_nsfw: bool,
|
||||
no_posts: bool,
|
||||
}
|
||||
|
||||
// FUNCTIONS
|
||||
pub async fn profile(req: HttpRequest) -> HttpResponse {
|
||||
pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let listing = req.param("listing").unwrap_or_else(|| "overview".to_string());
|
||||
|
||||
// Build the Reddit JSON API path
|
||||
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
|
||||
let path = format!(
|
||||
"/user/{}/{}.json?{}&raw_json=1",
|
||||
req.param("name").unwrap_or_else(|| "reddit".to_string()),
|
||||
listing,
|
||||
req.uri().query().unwrap_or_default(),
|
||||
);
|
||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26");
|
||||
|
||||
// Retrieve other variables from Libreddit request
|
||||
let sort = param(&path, "sort");
|
||||
let username = req.match_info().get("username").unwrap_or("").to_string();
|
||||
let sort = param(&path, "sort").unwrap_or_default();
|
||||
let username = req.param("name").unwrap_or_default();
|
||||
|
||||
// Request user posts/comments from Reddit
|
||||
let posts = fetch_posts(&path, "Comment".to_string()).await;
|
||||
// Retrieve info from user about page.
|
||||
let user = user(&username).await.unwrap_or_default();
|
||||
|
||||
match posts {
|
||||
Ok((posts, after)) => {
|
||||
// If you can get user posts, also request user data
|
||||
let user = user(&username).await.unwrap_or_default();
|
||||
let req_url = req.uri().to_string();
|
||||
// Return landing page if this post if this Reddit deems this user NSFW,
|
||||
// but we have also disabled the display of NSFW content or if the instance
|
||||
// is SFW-only.
|
||||
if user.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) {
|
||||
return Ok(nsfw_landing(req, req_url).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let s = UserTemplate {
|
||||
user,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), after),
|
||||
prefs: prefs(req),
|
||||
let filters = get_filters(&req);
|
||||
if filters.contains(&["u_", &username].concat()) {
|
||||
template(UserTemplate {
|
||||
user,
|
||||
posts: Vec::new(),
|
||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||
ends: (param(&path, "after").unwrap_or_default(), "".to_string()),
|
||||
listing,
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
redirect_url,
|
||||
is_filtered: true,
|
||||
all_posts_filtered: false,
|
||||
all_posts_hidden_nsfw: false,
|
||||
no_posts: false,
|
||||
})
|
||||
} else {
|
||||
// Request user posts/comments from Reddit
|
||||
match Post::fetch(&path, false).await {
|
||||
Ok((mut posts, after)) => {
|
||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||
let no_posts = posts.is_empty();
|
||||
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
||||
template(UserTemplate {
|
||||
user,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||
ends: (param(&path, "after").unwrap_or_default(), after),
|
||||
listing,
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
redirect_url,
|
||||
is_filtered: false,
|
||||
all_posts_filtered,
|
||||
all_posts_hidden_nsfw,
|
||||
no_posts,
|
||||
})
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
// If there is an error show error page
|
||||
Err(msg) => error(req, msg).await,
|
||||
}
|
||||
// If there is an error show error page
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
// USER
|
||||
async fn user(name: &str) -> Result<User, String> {
|
||||
// Build the Reddit JSON API path
|
||||
let path: String = format!("/user/{}/about.json", name);
|
||||
let path: String = format!("/user/{}/about.json?raw_json=1", name);
|
||||
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(res) => {
|
||||
// Grab creation date as unix timestamp
|
||||
let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
|
||||
json(path, false).await.map(|res| {
|
||||
// Grab creation date as unix timestamp
|
||||
let created_unix = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
|
||||
let created = OffsetDateTime::from_unix_timestamp(created_unix).unwrap_or(OffsetDateTime::UNIX_EPOCH);
|
||||
|
||||
// nested_val function used to parse JSON from Reddit APIs
|
||||
let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string();
|
||||
// Closure used to parse JSON from Reddit APIs
|
||||
let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string();
|
||||
|
||||
// Parse the JSON output into a User struct
|
||||
Ok(User {
|
||||
name: name.to_string(),
|
||||
title: about("title"),
|
||||
icon: format_url(about("icon_img").as_str()),
|
||||
karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
|
||||
created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"),
|
||||
banner: about("banner_img"),
|
||||
description: about("public_description"),
|
||||
})
|
||||
// Parse the JSON output into a User struct
|
||||
User {
|
||||
name: res["data"]["name"].as_str().unwrap_or(name).to_owned(),
|
||||
title: about("title"),
|
||||
icon: format_url(&about("icon_img")),
|
||||
karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
|
||||
created: created.format(format_description!("[month repr:short] [day] '[year repr:last_two]")).unwrap_or_default(),
|
||||
banner: about("banner_img"),
|
||||
description: about("public_description"),
|
||||
nsfw: res["data"]["subreddit"]["over_18"].as_bool().unwrap_or_default(),
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
1232
src/utils.rs
1232
src/utils.rs
File diff suppressed because it is too large
Load Diff
BIN
static/Inter.var.woff2
Normal file
BIN
static/Inter.var.woff2
Normal file
Binary file not shown.
BIN
static/apple-touch-icon.png
Normal file
BIN
static/apple-touch-icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 8.0 KiB |
Binary file not shown.
Before Width: | Height: | Size: 789 B After Width: | Height: | Size: 4.2 KiB |
BIN
static/favicon.png
Normal file
BIN
static/favicon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 969 B |
5
static/hls.min.js
vendored
Normal file
5
static/hls.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
static/logo.png
Normal file
BIN
static/logo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 7.9 KiB |
23
static/manifest.json
Normal file
23
static/manifest.json
Normal file
@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "Libreddit",
|
||||
"short_name": "Libreddit",
|
||||
"display": "standalone",
|
||||
"background_color": "#1f1f1f",
|
||||
"description": "An alternative private front-end to Reddit",
|
||||
"theme_color": "#1f1f1f",
|
||||
"icons": [
|
||||
{
|
||||
"src": "logo.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "apple-touch-icon.png",
|
||||
"sizes": "180x180"
|
||||
},
|
||||
{
|
||||
"src": "favicon.ico",
|
||||
"sizes": "32x32"
|
||||
}
|
||||
]
|
||||
}
|
77
static/playHLSVideo.js
Normal file
77
static/playHLSVideo.js
Normal file
@ -0,0 +1,77 @@
|
||||
// @license http://www.gnu.org/licenses/agpl-3.0.html AGPL-3.0
|
||||
(function () {
|
||||
if (Hls.isSupported()) {
|
||||
var videoSources = document.querySelectorAll("video source[type='application/vnd.apple.mpegurl']");
|
||||
videoSources.forEach(function (source) {
|
||||
var playlist = source.src;
|
||||
|
||||
var oldVideo = source.parentNode;
|
||||
var autoplay = oldVideo.classList.contains("hls_autoplay");
|
||||
|
||||
// If HLS is supported natively then don't use hls.js
|
||||
if (oldVideo.canPlayType(source.type)) {
|
||||
if (autoplay) {
|
||||
oldVideo.play();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Replace video with copy that will have all "source" elements removed
|
||||
var newVideo = oldVideo.cloneNode(true);
|
||||
var allSources = newVideo.querySelectorAll("source");
|
||||
allSources.forEach(function (source) {
|
||||
source.remove();
|
||||
});
|
||||
|
||||
// Empty source to enable play event
|
||||
newVideo.src = "about:blank";
|
||||
|
||||
oldVideo.parentNode.replaceChild(newVideo, oldVideo);
|
||||
|
||||
function initializeHls() {
|
||||
newVideo.removeEventListener('play', initializeHls);
|
||||
|
||||
var hls = new Hls({ autoStartLoad: false });
|
||||
hls.loadSource(playlist);
|
||||
hls.attachMedia(newVideo);
|
||||
hls.on(Hls.Events.MANIFEST_PARSED, function () {
|
||||
hls.loadLevel = hls.levels.length - 1;
|
||||
hls.startLoad();
|
||||
newVideo.play();
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.ERROR, function (event, data) {
|
||||
var errorType = data.type;
|
||||
var errorFatal = data.fatal;
|
||||
if (errorFatal) {
|
||||
switch (errorType) {
|
||||
case Hls.ErrorType.NETWORK_ERROR:
|
||||
hls.startLoad();
|
||||
break;
|
||||
case Hls.ErrorType.MEDIA_ERROR:
|
||||
hls.recoverMediaError();
|
||||
break;
|
||||
default:
|
||||
hls.destroy();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
console.error("HLS error", data);
|
||||
});
|
||||
}
|
||||
|
||||
newVideo.addEventListener('play', initializeHls);
|
||||
|
||||
if (autoplay) {
|
||||
newVideo.play();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
var videos = document.querySelectorAll("video.hls_autoplay");
|
||||
videos.forEach(function (video) {
|
||||
video.setAttribute("autoplay", "");
|
||||
});
|
||||
}
|
||||
})();
|
||||
// @license-end
|
@ -1,2 +0,0 @@
|
||||
User-agent: *
|
||||
Allow: /
|
1282
static/style.css
1282
static/style.css
File diff suppressed because it is too large
Load Diff
14
static/themes/black.css
Normal file
14
static/themes/black.css
Normal file
@ -0,0 +1,14 @@
|
||||
/* Black theme setting */
|
||||
.black {
|
||||
--accent: #009a9a;
|
||||
--green: #00a229;
|
||||
--text: white;
|
||||
--foreground: #0f0f0f;
|
||||
--background: black;
|
||||
--outside: black;
|
||||
--post: black;
|
||||
--panel-border: 2px solid #0f0f0f;
|
||||
--highlighted: #0f0f0f;
|
||||
--visited: #aaa;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
14
static/themes/dark.css
Normal file
14
static/themes/dark.css
Normal file
@ -0,0 +1,14 @@
|
||||
/* Dark theme setting */
|
||||
.dark{
|
||||
--accent: aqua;
|
||||
--green: #5cff85;
|
||||
--text: white;
|
||||
--foreground: #222;
|
||||
--background: #0f0f0f;
|
||||
--outside: #1f1f1f;
|
||||
--post: #161616;
|
||||
--panel-border: 1px solid #333;
|
||||
--highlighted: #333;
|
||||
--visited: #aaa;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
||||
}
|
13
static/themes/doomone.css
Normal file
13
static/themes/doomone.css
Normal file
@ -0,0 +1,13 @@
|
||||
.doomone {
|
||||
--accent: #51afef;
|
||||
--green: #00a229;
|
||||
--text: #bbc2cf;
|
||||
--foreground: #3d4148;
|
||||
--background: #282c34;
|
||||
--outside: #52565c;
|
||||
--post: #24272e;
|
||||
--panel-border: 2px solid #52565c;
|
||||
--highlighted: #686b70;
|
||||
--visited: #969692;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
14
static/themes/dracula.css
Normal file
14
static/themes/dracula.css
Normal file
@ -0,0 +1,14 @@
|
||||
/* Dracula theme setting */
|
||||
.dracula {
|
||||
--accent: #bd93f9;
|
||||
--green: #50fa7b;
|
||||
--text: #f8f8f2;
|
||||
--foreground: #3d4051;
|
||||
--background: #282a36;
|
||||
--outside: #393c4d;
|
||||
--post: #333544;
|
||||
--panel-border: 2px solid #44475a;
|
||||
--highlighted: #4e5267;
|
||||
--visited: #969692;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
14
static/themes/gold.css
Normal file
14
static/themes/gold.css
Normal file
@ -0,0 +1,14 @@
|
||||
/* Gold theme setting */
|
||||
.gold {
|
||||
--accent: #f2aa4c;
|
||||
--green: #5cff85;
|
||||
--text: white;
|
||||
--foreground: #234;
|
||||
--background: #101820;
|
||||
--outside: #1b2936;
|
||||
--post: #1b2936;
|
||||
--panel-border: 0px solid black;
|
||||
--highlighted: #234;
|
||||
--visited: #aaa;
|
||||
--shadow: 0 2px 5px rgba(0, 0, 0, 0.5);
|
||||
}
|
13
static/themes/gruvboxdark.css
Normal file
13
static/themes/gruvboxdark.css
Normal file
@ -0,0 +1,13 @@
|
||||
/* Gruvbox-Dark theme setting */
|
||||
.gruvboxdark {
|
||||
--accent: #8ec07c;
|
||||
--green: #b8bb26;
|
||||
--text: #ebdbb2;
|
||||
--foreground: #3c3836;
|
||||
--background: #282828;
|
||||
--outside: #3c3836;
|
||||
--post: #3c3836;
|
||||
--panel-border: 1px solid #504945;
|
||||
--highlighted: #282828;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
||||
}
|
18
static/themes/gruvboxlight.css
Normal file
18
static/themes/gruvboxlight.css
Normal file
@ -0,0 +1,18 @@
|
||||
/* Gruvbox-Light theme setting */
|
||||
.gruvboxlight {
|
||||
--accent: #427b58;
|
||||
--green: #79740e;
|
||||
--text: #3c3836;
|
||||
--foreground: #ebdbb2;
|
||||
--background: #fbf1c7;
|
||||
--outside: #ebdbb2;
|
||||
--post: #ebdbb2;
|
||||
--panel-border: 1px solid #d5c4a1;
|
||||
--highlighted: #fbf1c7;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.25);
|
||||
}
|
||||
|
||||
html:has(> .gruvboxlight) {
|
||||
/* Hint color theme to browser for scrollbar */
|
||||
color-scheme: light;
|
||||
}
|
14
static/themes/laserwave.css
Normal file
14
static/themes/laserwave.css
Normal file
@ -0,0 +1,14 @@
|
||||
/* Laserwave theme setting */
|
||||
.laserwave {
|
||||
--accent: #eb64b9;
|
||||
--green: #74dfc4;
|
||||
--text: #e0dfe1;
|
||||
--foreground: #302a36;
|
||||
--background: #27212e;
|
||||
--outside: #3e3647;
|
||||
--post: #3e3647;
|
||||
--panel-border: 2px solid #2f2738;
|
||||
--highlighted: #302a36;
|
||||
--visited: #91889b;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
19
static/themes/light.css
Normal file
19
static/themes/light.css
Normal file
@ -0,0 +1,19 @@
|
||||
/* Light theme setting */
|
||||
.light {
|
||||
--accent: #009a9a;
|
||||
--green: #00a229;
|
||||
--text: black;
|
||||
--foreground: #f5f5f5;
|
||||
--background: #ddd;
|
||||
--outside: #ececec;
|
||||
--post: #eee;
|
||||
--panel-border: 1px solid #ccc;
|
||||
--highlighted: white;
|
||||
--visited: #555;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
html:has(> .light) {
|
||||
/* Hint color theme to browser for scrollbar */
|
||||
color-scheme: light;
|
||||
}
|
14
static/themes/nord.css
Normal file
14
static/themes/nord.css
Normal file
@ -0,0 +1,14 @@
|
||||
/* Nord theme setting */
|
||||
.nord {
|
||||
--accent: #8fbcbb;
|
||||
--green: #a3be8c;
|
||||
--text: #eceff4;
|
||||
--foreground: #3b4252;
|
||||
--background: #2e3440;
|
||||
--outside: #434c5e;
|
||||
--post: #434c5e;
|
||||
--panel-border: 2px solid #4c566a;
|
||||
--highlighted: #3b4252;
|
||||
--visited: #a3a5aa;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
13
static/themes/rosebox.css
Normal file
13
static/themes/rosebox.css
Normal file
@ -0,0 +1,13 @@
|
||||
/* Rosebox theme setting */
|
||||
.rosebox {
|
||||
--accent: #a57562;
|
||||
--green: #a3be8c;
|
||||
--text: white;
|
||||
--foreground: #222;
|
||||
--background: #262626;
|
||||
--outside: #222;
|
||||
--post: #222;
|
||||
--panel-border: 1px solid #222;
|
||||
--highlighted: #262626;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
||||
}
|
14
static/themes/violet.css
Normal file
14
static/themes/violet.css
Normal file
@ -0,0 +1,14 @@
|
||||
/* Violet theme setting */
|
||||
.violet {
|
||||
--accent: #7c71dd;
|
||||
--green: #5cff85;
|
||||
--text: white;
|
||||
--foreground: #1F2347;
|
||||
--background: #12152b;
|
||||
--outside: #181c3a;
|
||||
--post: #181c3a;
|
||||
--panel-border: 1px solid #1F2347;
|
||||
--highlighted: #1F2347;
|
||||
--visited: #aaa;
|
||||
--shadow: 0 2px 5px rgba(0, 0, 0, 0.5);
|
||||
}
|
@ -1,32 +1,59 @@
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
{% block head %}
|
||||
<title>{% block title %}Libreddit{% endblock %}</title>
|
||||
<meta http-equiv="Referrer-Policy" content="no-referrer">
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; form-action 'self';">
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
||||
<meta name="description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<!-- General PWA -->
|
||||
<meta name="theme-color" content="#1F1F1F">
|
||||
<!-- iOS Application -->
|
||||
<meta name="apple-mobile-web-app-title" content="Libreddit">
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<meta name="apple-mobile-web-app-status-bar-style" content="default">
|
||||
<!-- Android -->
|
||||
<meta name="mobile-web-app-capable" content="yes">
|
||||
<!-- iOS Logo -->
|
||||
<link href="/touch-icon-iphone.png" rel="apple-touch-icon">
|
||||
<!-- PWA Manifest -->
|
||||
<link rel="manifest" type="application/json" href="/manifest.json">
|
||||
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico">
|
||||
<link rel="stylesheet" href="/style.css">
|
||||
<link rel="stylesheet" type="text/css" href="/style.css?v={{ env!("CARGO_PKG_VERSION") }}">
|
||||
{% endblock %}
|
||||
</head>
|
||||
<body class="
|
||||
{% if prefs.layout != "" %}{{ prefs.layout }}{% endif %}
|
||||
{% if prefs.wide == "on" %} wide{% endif %}
|
||||
{% if prefs.theme == "light" %} light{% endif %}">
|
||||
{% if prefs.theme != "system" %} {{ prefs.theme }}{% endif %}">
|
||||
<!-- NAVIGATION BAR -->
|
||||
<nav>
|
||||
<p id="logo">
|
||||
<a id="libreddit" href="/">
|
||||
<span id="lib">lib</span><span id="reddit">reddit.</span>
|
||||
</a>
|
||||
<span id="version">v{{ env!("CARGO_PKG_VERSION") }}</span>
|
||||
<a id="settings_link" href="/settings">settings</a>
|
||||
</p>
|
||||
<div id="logo">
|
||||
<a id="libreddit" href="/"><span id="lib">lib</span><span id="reddit">reddit.</span></a>
|
||||
{% block subscriptions %}{% endblock %}
|
||||
</div>
|
||||
{% block search %}{% endblock %}
|
||||
<a id="github" href="https://github.com/spikecodes/libreddit">GITHUB</a>
|
||||
<div id="links">
|
||||
<a id="reddit_link" {% if prefs.disable_visit_reddit_confirmation != "on" %}href="#popup"{% else %}href="https://www.reddit.com{{ url }}" rel="nofollow"{% endif %}>
|
||||
<span>reddit</span>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path d="M22 2L12 22"/>
|
||||
<path d="M2 6.70587C3.33333 8.07884 3.33333 11.5971 3.33333 11.5971M3.33333 19.647V11.5971M3.33333 11.5971C3.33333 11.5971 5.125 7.47817 8 7.47817C10.875 7.47817 12 8.85114 12 8.85114"/>
|
||||
</svg>
|
||||
</a>
|
||||
{% if prefs.disable_visit_reddit_confirmation != "on" %}
|
||||
{% call utils::visit_reddit_confirmation(url) %}
|
||||
{% endif %}
|
||||
<a id="settings_link" href="/settings">
|
||||
<span>settings</span>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title>settings</title>
|
||||
<circle cx="12" cy="12" r="3"/><path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"/>
|
||||
</svg>
|
||||
</a>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<!-- MAIN CONTENT -->
|
||||
@ -36,5 +63,18 @@
|
||||
{% endblock %}
|
||||
</main>
|
||||
{% endblock %}
|
||||
|
||||
<!-- FOOTER -->
|
||||
{% block footer %}
|
||||
<footer>
|
||||
<p id="version">v{{ env!("CARGO_PKG_VERSION") }}</p>
|
||||
<div class="footer-button">
|
||||
<a href="/info" title="View instance information">ⓘ View instance info</a>
|
||||
</div>
|
||||
<div class="footer-button">
|
||||
<a href="https://github.com/libreddit/libreddit" title="View code on GitHub"><> Code</a>
|
||||
</div>
|
||||
</footer>
|
||||
{% endblock %}
|
||||
</body>
|
||||
</html>
|
41
templates/comment.html
Normal file
41
templates/comment.html
Normal file
@ -0,0 +1,41 @@
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% if kind == "more" && parent_kind == "t1" %}
|
||||
<a class="deeper_replies" href="{{ post_link }}{{ parent_id }}">→ More replies ({{ more_count }})</a>
|
||||
{% else if kind == "t1" %}
|
||||
<div id="{{ id }}" class="comment">
|
||||
<div class="comment_left">
|
||||
<p class="comment_score" title="{{ score.1 }}">{{ score.0 }}</p>
|
||||
<div class="line"></div>
|
||||
</div>
|
||||
<details class="comment_right" {% if !collapsed || highlighted %}open{% endif %}>
|
||||
<summary class="comment_data">
|
||||
{% if author.name != "[deleted]" %}
|
||||
<a class="comment_author {{ author.distinguished }} {% if author.name == post_author %}op{% endif %}" href="/user/{{ author.name }}">u/{{ author.name }}</a>
|
||||
{% else %}
|
||||
<span class="comment_author {{ author.distinguished }}">u/[deleted]</span>
|
||||
{% endif %}
|
||||
{% if author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call utils::render_flair(author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<a href="{{ post_link }}{{ id }}/?context=3" class="created" title="{{ created }}">{{ rel_time }}</a>
|
||||
{% if edited.0 != "".to_string() %}<span class="edited" title="{{ edited.1 }}">edited {{ edited.0 }}</span>{% endif %}
|
||||
{% if !awards.is_empty() && prefs.hide_awards != "on" %}
|
||||
<span class="dot">•</span>
|
||||
{% for award in awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
</span>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
</summary>
|
||||
{% if is_filtered %}
|
||||
<div class="comment_body_filtered {% if highlighted %}highlighted{% endif %}">(Filtered content)</div>
|
||||
{% else %}
|
||||
<div class="comment_body {% if highlighted %}highlighted{% endif %}">{{ body|safe }}</div>
|
||||
{% endif %}
|
||||
<blockquote class="replies">{% for c in replies -%}{{ c.render().unwrap()|safe }}{%- endfor %}
|
||||
</bockquote>
|
||||
</details>
|
||||
</div>
|
||||
{% endif %}
|
107
templates/duplicates.html
Normal file
107
templates/duplicates.html
Normal file
@ -0,0 +1,107 @@
|
||||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block title %}{{ post.title }} - r/{{ post.community }}{% endblock %}
|
||||
|
||||
{% block search %}
|
||||
{% call utils::search(["/r/", post.community.as_str()].concat(), "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block root %}/r/{{ post.community }}{% endblock %}{% block location %}r/{{ post.community }}{% endblock %}
|
||||
{% block head %}
|
||||
{% call super() %}
|
||||
{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list(post.community.as_str()) %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
{% call utils::post(post) %}
|
||||
|
||||
<!-- DUPLICATES -->
|
||||
{% if post.num_duplicates == 0 %}
|
||||
<span class="listing_warn">(No duplicates found)</span>
|
||||
{% else if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||
<span class="listing_warn">(Enable "Show NSFW posts" in <a href="/settings">settings</a> to show duplicates)</span>
|
||||
{% else %}
|
||||
<div id="duplicates_msg"><h3>Duplicates</h3></div>
|
||||
{% if num_posts_filtered > 0 %}
|
||||
<span class="listing_warn">
|
||||
{% if all_posts_filtered %}
|
||||
(All posts have been filtered)
|
||||
{% else %}
|
||||
(Some posts have been filtered)
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
|
||||
<div id="sort">
|
||||
<div id="sort_options">
|
||||
<a {% if params.sort.is_empty() || params.sort.eq("num_comments") %}class="selected"{% endif %} href="?sort=num_comments">
|
||||
Number of comments
|
||||
</a>
|
||||
<a {% if params.sort.eq("new") %}class="selected"{% endif %} href="?sort=new">
|
||||
New
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="posts">
|
||||
{% for post in duplicates -%}
|
||||
{# TODO: utils::post should be reworked to permit a truncated display of a post as below #}
|
||||
{% if !(post.flags.nsfw) || prefs.show_nsfw == "on" %}
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %}" id="{{ post.id }}">
|
||||
<p class="post_header">
|
||||
{% let community -%}
|
||||
{% if post.community.starts_with("u_") -%}
|
||||
{% let community = format!("u/{}", &post.community[2..]) -%}
|
||||
{% else -%}
|
||||
{% let community = format!("r/{}", post.community) -%}
|
||||
{% endif -%}
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author {{ post.author.distinguished }}" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
{% if !post.awards.is_empty() && prefs.hide_awards != "on" %}
|
||||
{% for award in post.awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
</span>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
</p>
|
||||
<h2 class="post_title">
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};"
|
||||
dir="ltr">{% call utils::render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
<a href="{{ post.permalink }}">{{ post.title }}</a>{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</h2>
|
||||
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
<div class="post_footer">
|
||||
<a href="{{ post.permalink }}" class="post_comments" title="{{ post.comments.1 }} comments">{{ post.comments.0 }} comments</a>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
{% endif %}
|
||||
{%- endfor %}
|
||||
</div>
|
||||
|
||||
<footer>
|
||||
{% if params.before != "" %}
|
||||
<a href="?before={{ params.before }}{% if !params.sort.is_empty() %}&sort={{ params.sort }}{% endif %}" accesskey="P">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if params.after != "" %}
|
||||
<a href="?after={{ params.after }}{% if !params.sort.is_empty() %}&sort={{ params.sort }}{% endif %}" accesskey="N">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
@ -2,5 +2,8 @@
|
||||
{% block title %}Error: {{ msg }}{% endblock %}
|
||||
{% block sortstyle %}{% endblock %}
|
||||
{% block content %}
|
||||
<h1 style="text-align: center; font-size: 50px;">{{ msg }}</h1>
|
||||
<div id="error">
|
||||
<h1>{{ msg }}</h1>
|
||||
<h3>Head back <a href="/">home</a>?</h3>
|
||||
</div>
|
||||
{% endblock %}
|
10
templates/message.html
Normal file
10
templates/message.html
Normal file
@ -0,0 +1,10 @@
|
||||
{% extends "base.html" %}
|
||||
{% block title %}{{ title }}{% endblock %}
|
||||
{% block sortstyle %}{% endblock %}
|
||||
{% block content %}
|
||||
<div id="message">
|
||||
<h1>{{ title }}</h1>
|
||||
<br>
|
||||
{{ body|safe }}
|
||||
</div>
|
||||
{% endblock %}
|
30
templates/nsfwlanding.html
Normal file
30
templates/nsfwlanding.html
Normal file
@ -0,0 +1,30 @@
|
||||
{% extends "base.html" %}
|
||||
{% block title %}NSFW content gated{% endblock %}
|
||||
{% block sortstyle %}{% endblock %}
|
||||
{% block content %}
|
||||
<div id="nsfw_landing">
|
||||
<h1>
|
||||
😱
|
||||
{% if res_type == crate::utils::ResourceType::Subreddit %}
|
||||
r/{{ res }} is a NSFW community!
|
||||
{% else if res_type == crate::utils::ResourceType::User %}
|
||||
u/{{ res }}'s content is NSFW!
|
||||
{% else if res_type == crate::utils::ResourceType::Post %}
|
||||
This post is NSFW!
|
||||
{% endif %}
|
||||
</h1>
|
||||
<br />
|
||||
|
||||
<p>
|
||||
{% if crate::utils::sfw_only() %}
|
||||
This instance of Libreddit is SFW-only.</p>
|
||||
{% else %}
|
||||
Enable "Show NSFW posts" in <a href="/settings">settings</a> to view this {% if res_type == crate::utils::ResourceType::Subreddit %}subreddit{% else if res_type == crate::utils::ResourceType::User %}user's posts or comments{% else if res_type == crate::utils::ResourceType::Post %}post{% endif %}. <br>
|
||||
{% if res_type == crate::utils::ResourceType::Post %} You can also temporarily bypass this gate and view the post by clicking on this <a href="{{url}}&bypass_nsfw_landing">link</a>.{% endif %}
|
||||
{% endif %}
|
||||
</p>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% block footer %}
|
||||
{% endblock %}
|
||||
|
@ -10,112 +10,77 @@
|
||||
{% block root %}/r/{{ post.community }}{% endblock %}{% block location %}r/{{ post.community }}{% endblock %}
|
||||
{% block head %}
|
||||
{% call super() %}
|
||||
<!-- Meta Tags -->
|
||||
<meta name="author" content="u/{{ post.author.name }}">
|
||||
<meta name="title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="og:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="og:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
<meta property="og:url" content="{{ post.permalink }}">
|
||||
<meta property="twitter:url" content="{{ post.permalink }}">
|
||||
<meta property="twitter:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="twitter:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
{% if post.post_type == "image" %}
|
||||
<meta property="og:type" content="image">
|
||||
<meta property="og:image" content="{{ post.thumbnail.url }}">
|
||||
<meta property="twitter:card" content="summary_large_image">
|
||||
<meta property="twitter:image" content="{{ post.thumbnail.url }}">
|
||||
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||
<meta property="twitter:card" content="video">
|
||||
<meta property="og:type" content="video">
|
||||
<meta property="og:video" content="{{ post.media.url }}">
|
||||
<meta property="og:video:type" content="video/mp4">
|
||||
{% else %}
|
||||
<meta property="og:type" content="website">
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
<!-- OPEN COMMENT MACRO -->
|
||||
{% macro comment(item) -%}
|
||||
<div id="{{ item.id }}" class="comment">
|
||||
<div class="comment_left">
|
||||
<p class="comment_score">{{ item.score }}</p>
|
||||
<div class="line"></div>
|
||||
</div>
|
||||
<details class="comment_right" open>
|
||||
<summary class="comment_data">
|
||||
<a class="comment_author {{ item.author.distinguished }} {% if item.author.name == post.author.name %}op{% endif %}" href="/u/{{ item.author.name }}">u/{{ item.author.name }}</a>
|
||||
{% if item.author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call utils::render_flair(item.author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<span class="created" title="{{ post.created }}">{{ item.rel_time }}</span>
|
||||
</summary>
|
||||
<div class="comment_body">{{ item.body }}</div>
|
||||
{%- endmacro %}
|
||||
|
||||
<!-- CLOSE COMMENT MACRO -->
|
||||
{% macro close() %}
|
||||
</details></div>
|
||||
{% endmacro %}
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list(post.community.as_str()) %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
|
||||
<!-- POST CONTENT -->
|
||||
<div class="post highlighted">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<div class="post_text">
|
||||
<p class="post_header">
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
{% if post.author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call utils::render_flair(post.author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</p>
|
||||
<a href="{{ post.permalink }}" class="post_title">
|
||||
{{ post.title }}
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<small class="post_flair" style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }}">{% call utils::render_flair(post.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
</a>
|
||||
|
||||
<!-- POST MEDIA -->
|
||||
{% if post.post_type == "image" %}
|
||||
<img class="post_media" src="{{ post.media }}"/>
|
||||
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||
<video class="post_media" src="{{ post.media }}" controls autoplay loop></video>
|
||||
{% else if post.post_type == "link" %}
|
||||
<a id="post_url" href="{{ post.media }}">{{ post.media }}</a>
|
||||
{% endif %}
|
||||
|
||||
<!-- POST BODY -->
|
||||
<div class="post_body">{{ post.body }}</div>
|
||||
<div id="post_footer">
|
||||
<ul id="post_links">
|
||||
<li><a href="/{{ post.id }}">permalink</a></li>
|
||||
<li><a href="https://reddit.com/{{ post.id }}">reddit</a></li>
|
||||
</ul>
|
||||
<p>{{ post.upvote_ratio }}% Upvoted</p>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% call utils::post(post) %}
|
||||
|
||||
<!-- SORT FORM -->
|
||||
<div id="commentQueryForms">
|
||||
<form id="sort">
|
||||
<select name="sort">
|
||||
<p id="comment_count">{{post.comments.0}} {% if post.comments.0 == "1" %}comment{% else %}comments{% endif %} <span id="sorted_by">sorted by </span></p>
|
||||
<select name="sort" title="Sort comments by" id="commentSortSelect">
|
||||
{% call utils::options(sort, ["confidence", "top", "new", "controversial", "old"], "confidence") %}
|
||||
</select><input id="sort_submit" type="submit" value="→">
|
||||
</form>
|
||||
</select>
|
||||
<button id="sort_submit" class="submit">
|
||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||
<path d="M20 50 H100" />
|
||||
<path d="M75 15 L100 50 L75 85" />
|
||||
→
|
||||
</svg>
|
||||
</button>
|
||||
</form>
|
||||
<!-- SEARCH FORM -->
|
||||
<form id="sort">
|
||||
<input id="search" class="commentQuery" type="search" name="q" value="{{ comment_query }}" placeholder="Search comments">
|
||||
<input type="hidden" name="type" value="comment">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
{% if comment_query != "" %}
|
||||
Comments containing "{{ comment_query }}" | <a id="allCommentsLink" href="{{ url_without_query }}">All comments</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<!-- COMMENTS -->
|
||||
{% for c in comments -%}
|
||||
<div class="thread">
|
||||
<!-- EACH COMMENT -->
|
||||
{% call comment(c) %}
|
||||
<blockquote class="replies">{% for reply1 in c.replies %}{% call comment(reply1) %}
|
||||
<!-- FIRST-LEVEL REPLIES -->
|
||||
<blockquote class="replies">{% for reply2 in reply1.replies %}{% call comment(reply2) %}
|
||||
<!-- SECOND-LEVEL REPLIES -->
|
||||
<blockquote class="replies">{% for reply3 in reply2.replies %}{% call comment(reply3) %}
|
||||
<!-- THIRD-LEVEL REPLIES -->
|
||||
{% if reply3.replies.len() > 0 %}
|
||||
<!-- LINK TO CONTINUE REPLIES -->
|
||||
<a class="deeper_replies" href="{{ post.permalink }}{{ reply3.id }}">→ More replies</a>
|
||||
{% endif %}
|
||||
{% call close() %}
|
||||
{% endfor %}
|
||||
</blockquote>{% call close() %}
|
||||
{% endfor %}
|
||||
</blockquote>{% call close() %}
|
||||
{% endfor %}
|
||||
</blockquote>{% call close() %}
|
||||
{% if single_thread %}
|
||||
<p class="thread_nav"><a href="{{ post.permalink }}">View all comments</a></p>
|
||||
{% if c.parent_kind == "t1" %}
|
||||
<p class="thread_nav"><a href="?context=9999">Show parent comments</a></p>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{{ c.render().unwrap()|safe }}
|
||||
</div>
|
||||
{%- endfor %}
|
||||
|
||||
|
@ -3,112 +3,113 @@
|
||||
|
||||
{% block title %}Libreddit: search results - {{ params.q }}{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list("") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
<form id="search_sort">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ params.q }}">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ params.q|safe }}" title="Search libreddit">
|
||||
{% if sub != "" %}
|
||||
<div id="inside">
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr" {% if params.restrict_sr != "" %}checked{% endif %}>
|
||||
<label for="restrict_sr" class="search_label">in r/{{ sub }}</label>
|
||||
</div>
|
||||
{% endif %}
|
||||
<select id="sort_options" name="sort">
|
||||
{% if params.typed == "sr_user" %}<input type="hidden" name="type" value="sr_user">{% endif %}
|
||||
<select id="sort_options" name="sort" title="Sort results by">
|
||||
{% call utils::options(params.sort, ["relevance", "hot", "top", "new", "comments"], "") %}
|
||||
</select>{% if params.sort != "new" %}<select id="timeframe" name="t">
|
||||
</select>{% if params.sort != "new" %}<select id="timeframe" name="t" title="Timeframe">
|
||||
{% call utils::options(params.t, ["hour", "day", "week", "month", "year", "all"], "all") %}
|
||||
</select>{% endif %}<input id="sort_submit" type="submit" value="→">
|
||||
</form>
|
||||
</select>{% endif %}<button id="sort_submit" class="submit">
|
||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||
<path d="M20 50 H100" />
|
||||
<path d="M75 15 L100 50 L75 85" />
|
||||
→
|
||||
</svg>
|
||||
</button>
|
||||
</form>
|
||||
|
||||
{% if subreddits.len() > 0 %}
|
||||
{% if !is_filtered %}
|
||||
{% if subreddits.len() > 0 || params.typed == "sr_user" %}
|
||||
<div id="search_subreddits">
|
||||
{% if params.typed == "sr_user" %}
|
||||
<a href="?q={{ params.q }}&sort={{ params.sort }}&t={{ params.t }}" class="search_subreddit" id="more_subreddits">← Back to post/comment results</a>
|
||||
{% endif %}
|
||||
{% for subreddit in subreddits %}
|
||||
<a href="{{ subreddit.url }}" class="search_subreddit">
|
||||
<p class="search_subreddit_header">
|
||||
<span class="search_subreddit_name">{{ subreddit.name }}</span>
|
||||
<span class="dot">•</span>
|
||||
<span class="search_subreddit_members">{{ subreddit.subscribers }} Members</span>
|
||||
</p>
|
||||
<p class="search_subreddit_description">{{ subreddit.description }}</p>
|
||||
<div class="search_subreddit_left">{% if subreddit.icon != "" %}<img loading="lazy" src="{{ subreddit.icon|safe }}" alt="r/{{ subreddit.name }} icon">{% endif %}</div>
|
||||
<div class="search_subreddit_right">
|
||||
<p class="search_subreddit_header">
|
||||
<span class="search_subreddit_name">r/{{ subreddit.name }}</span>
|
||||
<span class="dot">•</span>
|
||||
<span class="search_subreddit_members" title="{{ subreddit.subscribers.1 }} Members">{{ subreddit.subscribers.0 }} Members</span>
|
||||
</p>
|
||||
<p class="search_subreddit_description">{{ subreddit.description }}</p>
|
||||
</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
{% if params.typed != "sr_user" %}
|
||||
<a href="?q={{ params.q }}&sort={{ params.sort }}&t={{ params.t }}&type=sr_user" class="search_subreddit" id="more_subreddits">More subreddit results →</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% for post in posts %}
|
||||
|
||||
{% if post.flags.nsfw && prefs.hide_nsfw == "on" %}
|
||||
{% else if post.title != "Comment" %}
|
||||
<div class="post {% if prefs.layout == "card" && post.post_type == "image" %}card_post{% endif %}">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<div class="post_text">
|
||||
<p class="post_header">
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
{% if post.author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call utils::render_flair(post.author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</p>
|
||||
<p class="post_title">
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<small class="post_flair" style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }}">{% call utils::render_flair(post.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<a href="{{ post.permalink }}">{{ post.title }}</a>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- POST MEDIA/THUMBNAIL -->
|
||||
{% if prefs.layout == "card" && post.post_type == "image" %}
|
||||
<img class="post_media" src="{{ post.media }}"/>
|
||||
{% else if post.post_type != "self" %}
|
||||
<a class="post_thumbnail {% if post.thumbnail == "" %}no_thumbnail{% endif %}" href="{% if post.post_type == "link" %}{{ post.media }}{% else %}{{ post.permalink }}{% endif %}">
|
||||
{% if post.thumbnail == "" %}
|
||||
<svg viewBox="0 0 100 106" width="50" height="53" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M35,15h-15a10,10 0,0,0 0,20h25a10,10 0,0,0 10,-10m-12.5,0a10, 10 0,0,1 10, -10h25a10,10 0,0,1 0,20h-15" fill="none" stroke-width="5" stroke-linecap="round"/>
|
||||
</svg>
|
||||
{% else %}
|
||||
<img src="{{ post.thumbnail }}">
|
||||
{% endif %}
|
||||
<span>{% if post.post_type == "link" %}{{ post.domain }}{% else %}{{ post.post_type }}{% endif %}</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="comment">
|
||||
<div class="comment_left">
|
||||
<p class="comment_score">{{ post.score }}</p>
|
||||
<div class="line"></div>
|
||||
</div>
|
||||
<details class="comment_right" open>
|
||||
<summary class="comment_data">
|
||||
<a class="comment_link" href="{{ post.permalink }}">COMMENT</a>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</summary>
|
||||
<p class="comment_body">{{ post.body }}</p>
|
||||
</details>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_hidden_nsfw %}
|
||||
<span class="listing_warn">All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</span>
|
||||
{% endif %}
|
||||
|
||||
{% if no_posts %}
|
||||
<center>No posts were found.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_filtered %}
|
||||
<span class="listing_warn">(All content on this page has been filtered)</span>
|
||||
{% else if is_filtered %}
|
||||
<span class="listing_warn">(Content from r/{{ sub }} has been filtered)</span>
|
||||
{% else if params.typed != "sr_user" %}
|
||||
{% for post in posts %}
|
||||
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||
{% else if !post.title.is_empty() %}
|
||||
{% call utils::post_in_list(post) %}
|
||||
{% else %}
|
||||
<div class="comment">
|
||||
<div class="comment_left">
|
||||
<p class="comment_score" title="{{ post.score.1 }}">{{ post.score.0 }}</p>
|
||||
<div class="line"></div>
|
||||
</div>
|
||||
<details class="comment_right" open>
|
||||
<summary class="comment_data">
|
||||
<a class="comment_link" href="{{ post.permalink }}">COMMENT</a>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</summary>
|
||||
<p class="comment_body">{{ post.body }}</p>
|
||||
</details>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if prefs.use_hls == "on" %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<script src="/playHLSVideo.js"></script>
|
||||
{% endif %}
|
||||
|
||||
{% if params.typed != "sr_user" %}
|
||||
<footer>
|
||||
{% if params.before != "" %}
|
||||
<a href="?q={{ params.q }}&restrict_sr={{ params.restrict_sr }}
|
||||
<a href="?q={{ params.q|safe }}&restrict_sr={{ params.restrict_sr }}
|
||||
&sort={{ params.sort }}&t={{ params.t }}
|
||||
&before={{ params.before }}">PREV</a>
|
||||
&before={{ params.before }}" accesskey="P">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if params.after != "" %}
|
||||
<a href="?q={{ params.q }}&restrict_sr={{ params.restrict_sr }}
|
||||
<a href="?q={{ params.q|safe }}&restrict_sr={{ params.restrict_sr }}
|
||||
&sort={{ params.sort }}&t={{ params.t }}
|
||||
&after={{ params.after }}">NEXT</a>
|
||||
&after={{ params.after }}" accesskey="N">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
@ -8,45 +8,132 @@
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<form id="settings" action="/settings" method="POST">
|
||||
<div id="prefs">
|
||||
<p>Appearance</p>
|
||||
<div id="theme">
|
||||
<label for="theme">Theme:</label>
|
||||
<select name="theme">
|
||||
{% call utils::options(prefs.theme, ["dark", "light"], "dark") %}
|
||||
</select>
|
||||
<div id="settings">
|
||||
<form action="/settings" method="POST">
|
||||
<div class="prefs">
|
||||
<fieldset>
|
||||
<legend>Appearance</legend>
|
||||
<div class="prefs-group">
|
||||
<label for="theme">Theme:</label>
|
||||
<select name="theme" id="theme">
|
||||
{% call utils::options(prefs.theme, prefs.available_themes, "system") %}
|
||||
</select>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<legend>Interface</legend>
|
||||
<div class="prefs-group">
|
||||
<label for="front_page">Front page:</label>
|
||||
<select name="front_page" id="front_page">
|
||||
{% call utils::options(prefs.front_page, ["default", "popular", "all"], "default") %}
|
||||
</select>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="layout">Layout:</label>
|
||||
<select name="layout" id="layout">
|
||||
{% call utils::options(prefs.layout, ["card", "clean", "compact"], "card") %}
|
||||
</select>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="wide">Wide UI:</label>
|
||||
<input type="hidden" value="off" name="wide">
|
||||
<input type="checkbox" name="wide" id="wide" {% if prefs.wide == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<legend>Content</legend>
|
||||
<div class="prefs-group">
|
||||
<label for="post_sort" title="Applies only to subreddit feeds">Default subreddit post sort:</label>
|
||||
<select name="post_sort">
|
||||
{% call utils::options(prefs.post_sort, ["hot", "new", "top", "rising", "controversial"], "hot") %}
|
||||
</select>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="comment_sort">Default comment sort:</label>
|
||||
<select name="comment_sort" id="comment_sort">
|
||||
{% call utils::options(prefs.comment_sort, ["confidence", "top", "new", "controversial", "old"], "confidence") %}
|
||||
</select>
|
||||
</div>
|
||||
{% if !crate::utils::sfw_only() %}
|
||||
<div class="prefs-group">
|
||||
<label for="show_nsfw">Show NSFW posts:</label>
|
||||
<input type="hidden" value="off" name="show_nsfw">
|
||||
<input type="checkbox" name="show_nsfw" id="show_nsfw" {% if prefs.show_nsfw == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="blur_nsfw">Blur NSFW previews:</label>
|
||||
<input type="hidden" value="off" name="blur_nsfw">
|
||||
<input type="checkbox" name="blur_nsfw" id="blur_nsfw" {% if prefs.blur_nsfw == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="prefs-group">
|
||||
<label for="autoplay_videos">Autoplay videos</label>
|
||||
<input type="hidden" value="off" name="autoplay_videos">
|
||||
<input type="checkbox" name="autoplay_videos" id="autoplay_videos" {% if prefs.autoplay_videos == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="use_hls">Use HLS for videos</label>
|
||||
<details id="feeds">
|
||||
<summary>Why?</summary>
|
||||
<div id="feed_list" class="helper">Reddit videos require JavaScript (via HLS.js) to be enabled to be played with audio. Therefore, this toggle lets you either use Libreddit JS-free or utilize this feature.</div>
|
||||
</details>
|
||||
<input type="hidden" value="off" name="use_hls">
|
||||
<input type="checkbox" name="use_hls" id="use_hls" {% if prefs.use_hls == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="hide_hls_notification">Hide notification about possible HLS usage</label>
|
||||
<input type="hidden" value="off" name="hide_hls_notification">
|
||||
<input type="checkbox" name="hide_hls_notification" id="hide_hls_notification" {% if prefs.hide_hls_notification == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="hide_awards">Hide awards</label>
|
||||
<input type="hidden" value="off" name="hide_awards">
|
||||
<input type="checkbox" name="hide_awards" id="hide_awards" {% if prefs.hide_awards == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="disable_visit_reddit_confirmation">Do not confirm before visiting content on Reddit</label>
|
||||
<input type="hidden" value="off" name="disable_visit_reddit_confirmation">
|
||||
<input type="checkbox" name="disable_visit_reddit_confirmation" {% if prefs.disable_visit_reddit_confirmation == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
</fieldset>
|
||||
<input id="save" type="submit" value="Save">
|
||||
</div>
|
||||
<p>Interface</p>
|
||||
<div id="front_page">
|
||||
<label for="front_page">Front page:</label>
|
||||
<select name="front_page">
|
||||
{% call utils::options(prefs.front_page, ["popular", "all"], "popular") %}
|
||||
</select>
|
||||
</form>
|
||||
{% if prefs.subscriptions.len() > 0 %}
|
||||
<div class="prefs" id="settings_subs">
|
||||
<legend>Subscribed Feeds</legend>
|
||||
{% for sub in prefs.subscriptions %}
|
||||
<div>
|
||||
{% let feed -%}
|
||||
{% if sub.starts_with("u_") -%}{% let feed = format!("u/{}", &sub[2..]) -%}{% else -%}{% let feed = format!("r/{}", sub) -%}{% endif -%}
|
||||
<a href="/{{ feed }}">{{ feed }}</a>
|
||||
<form action="/r/{{ sub }}/unsubscribe/?redirect=settings" method="POST">
|
||||
<button class="unsubscribe">Unsubscribe</button>
|
||||
</form>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<div id="layout">
|
||||
<label for="layout">Layout:</label>
|
||||
<select name="layout">
|
||||
{% call utils::options(prefs.layout, ["card", "clean", "compact"], "clean") %}
|
||||
</select>
|
||||
</div>
|
||||
<div id="wide">
|
||||
<label for="wide">Wide UI:</label>
|
||||
<input type="checkbox" name="wide" {% if prefs.wide == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<p>Content</p>
|
||||
<div id="comment_sort">
|
||||
<label for="comment_sort">Default comment sort:</label>
|
||||
<select name="comment_sort">
|
||||
{% call utils::options(prefs.comment_sort, ["confidence", "top", "new", "controversial", "old"], "confidence") %}
|
||||
</select>
|
||||
</div>
|
||||
<div id="hide_nsfw">
|
||||
<label for="hide_nsfw">Hide NSFW posts:</label>
|
||||
<input type="checkbox" name="hide_nsfw" {% if prefs.hide_nsfw == "on" %}checked{% endif %}>
|
||||
{% endif %}
|
||||
{% if !prefs.filters.is_empty() %}
|
||||
<div class="prefs" id="settings_filters">
|
||||
<legend>Filtered Feeds</legend>
|
||||
{% for sub in prefs.filters %}
|
||||
<div>
|
||||
{% let feed -%}
|
||||
{% if sub.starts_with("u_") -%}{% let feed = format!("u/{}", &sub[2..]) -%}{% else -%}{% let feed = format!("r/{}", sub) -%}{% endif -%}
|
||||
<a href="/{{ feed }}">{{ feed }}</a>
|
||||
<form action="/r/{{ sub }}/unfilter/?redirect=settings" method="POST">
|
||||
<button class="unfilter">Unfilter</button>
|
||||
</form>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div id="settings_note">
|
||||
<p><b>Note:</b> settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.</p><br>
|
||||
<p>You can restore your current settings and subscriptions after clearing your cookies using <a href="/settings/restore/?theme={{ prefs.theme }}&front_page={{ prefs.front_page }}&layout={{ prefs.layout }}&wide={{ prefs.wide }}&post_sort={{ prefs.post_sort }}&comment_sort={{ prefs.comment_sort }}&show_nsfw={{ prefs.show_nsfw }}&blur_nsfw={{ prefs.blur_nsfw }}&use_hls={{ prefs.use_hls }}&hide_hls_notification={{ prefs.hide_hls_notification }}&hide_awards={{ prefs.hide_awards }}&disable_visit_reddit_confirmation={{ prefs.disable_visit_reddit_confirmation }}&subscriptions={{ prefs.subscriptions.join("%2B") }}&autoplay_videos={{ prefs.autoplay_videos }}&filters={{ prefs.filters.join("%2B") }}">this link</a>.</p>
|
||||
</div>
|
||||
<p id="settings_note"><b>Note:</b> settings are saved in browser cookies. Clearing your cookie data will reset them.</p>
|
||||
<input id="save" type="submit" value="Save">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
@ -11,8 +11,13 @@
|
||||
{% call utils::search(["/r/", sub.name.as_str()].concat(), "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list(sub.name.as_str(), "wide") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<main>
|
||||
{% if !is_filtered %}
|
||||
<div id="column_one">
|
||||
<form id="sort">
|
||||
<div id="sort_options">
|
||||
@ -22,72 +27,69 @@
|
||||
{% call utils::sort(["/r/", sub.name.as_str()].concat(), ["hot", "new", "top", "rising", "controversial"], sort.0) %}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% if sort.0 == "top" || sort.0 == "controversial" %}<select id="timeframe" name="t">
|
||||
{% if sort.0 == "top" || sort.0 == "controversial" %}<select id="timeframe" name="t" title="Timeframe">
|
||||
{% call utils::options(sort.1, ["hour", "day", "week", "month", "year", "all"], "day") %}
|
||||
<input id="sort_submit" type="submit" value="→">
|
||||
</select>{% endif %}
|
||||
</select>
|
||||
<button id="sort_submit" class="submit">
|
||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||
<path d="M20 50 H100" />
|
||||
<path d="M75 15 L100 50 L75 85" />
|
||||
→
|
||||
</svg>
|
||||
</button>
|
||||
{% endif %}
|
||||
</form>
|
||||
|
||||
{% if sub.name.contains("+") %}
|
||||
<form action="/r/{{ sub.name }}/subscribe?redirect={{ redirect_url }}" method="POST">
|
||||
<button id="multisub" class="subscribe" title="Subscribe to each sub in this multireddit">Subscribe to Multireddit</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_hidden_nsfw %}
|
||||
<center>All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if no_posts %}
|
||||
<center>No posts were found.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_filtered %}
|
||||
<center>(All content on this page has been filtered)</center>
|
||||
{% else %}
|
||||
<div id="posts">
|
||||
{% for post in posts %}
|
||||
{% if !(post.flags.nsfw && prefs.hide_nsfw == "on") %}
|
||||
{% if !(post.flags.nsfw && prefs.show_nsfw != "on") %}
|
||||
<hr class="sep" />
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %} {% if prefs.layout == "card" && post.post_type == "image" %}card_post{% endif %}">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<div class="post_text">
|
||||
<p class="post_header">
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</p>
|
||||
<p class="post_title">
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<small class="post_flair" style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }}">{% call utils::render_flair(post.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<a href="{{ post.permalink }}">{{ post.title }}</a>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- POST MEDIA/THUMBNAIL -->
|
||||
{% if prefs.layout == "card" && post.post_type == "image" %}
|
||||
<img class="post_media" src="{{ post.media }}"/>
|
||||
{% else if post.post_type != "self" %}
|
||||
<a class="post_thumbnail {% if post.thumbnail == "" %}no_thumbnail{% endif %}" href="{% if post.post_type == "link" %}{{ post.media }}{% else %}{{ post.permalink }}{% endif %}">
|
||||
{% if post.thumbnail == "" %}
|
||||
<svg viewBox="0 0 100 106" width="50" height="53" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M35,15h-15a10,10 0,0,0 0,20h25a10,10 0,0,0 10,-10m-12.5,0a10, 10 0,0,1 10, -10h25a10,10 0,0,1 0,20h-15" fill="none" stroke-width="5" stroke-linecap="round"/>
|
||||
</svg>
|
||||
{% else %}
|
||||
<img src="{{ post.thumbnail }}">
|
||||
{% endif %}
|
||||
<span>{% if post.post_type == "link" %}{{ post.domain }}{% else %}{{ post.post_type }}{% endif %}</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% call utils::post_in_list(post) %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% if prefs.use_hls == "on" %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<script src="/playHLSVideo.js"></script>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<footer>
|
||||
{% if ends.0 != "" %}
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}">PREV</a>
|
||||
{% if !ends.0.is_empty() %}
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}" accesskey="P">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if ends.1 != "" %}
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}">NEXT</a>
|
||||
{% if !ends.1.is_empty() %}
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}" accesskey="N">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
</div>
|
||||
{% if sub.name != "" && !sub.name.contains("+") %}
|
||||
{% endif %}
|
||||
{% if is_filtered || (!sub.name.is_empty() && sub.name != "all" && sub.name != "popular" && !sub.name.contains("+")) %}
|
||||
<aside>
|
||||
<div class="panel" id="subreddit">
|
||||
{% if is_filtered %}
|
||||
<center>(Content from r/{{ sub.name }} has been filtered)</center>
|
||||
{% endif %}
|
||||
{% if !sub.name.is_empty() && sub.name != "all" && sub.name != "popular" && !sub.name.contains("+") %}
|
||||
<details class="panel" id="subreddit" open>
|
||||
<summary id="subreddit_label">Subreddit</summary>
|
||||
{% if sub.wiki %}
|
||||
<div id="top">
|
||||
<div>Posts</div>
|
||||
@ -95,22 +97,57 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
<div id="sub_meta">
|
||||
<img id="sub_icon" src="{{ sub.icon }}">
|
||||
<p id="sub_title">{{ sub.title }}</p>
|
||||
<img loading="lazy" id="sub_icon" src="{{ sub.icon }}" alt="Icon for r/{{ sub.name }}">
|
||||
<h1 id="sub_title">{{ sub.title }}</h1>
|
||||
<p id="sub_name">r/{{ sub.name }}</p>
|
||||
<p id="sub_description">{{ sub.description }}</p>
|
||||
<div id="sub_details">
|
||||
<label>Members</label>
|
||||
<label>Active</label>
|
||||
<div>{{ sub.members }}</div>
|
||||
<div>{{ sub.active }}</div>
|
||||
<div title="{{ sub.members.1 }}">{{ sub.members.0 }}</div>
|
||||
<div title="{{ sub.active.1 }}">{{ sub.active.0 }}</div>
|
||||
</div>
|
||||
<div id="sub_actions">
|
||||
<div id="sub_subscription">
|
||||
{% if prefs.subscriptions.contains(sub.name) %}
|
||||
<form action="/r/{{ sub.name }}/unsubscribe?redirect={{ redirect_url }}" method="POST">
|
||||
<button class="unsubscribe">Unsubscribe</button>
|
||||
</form>
|
||||
{% else %}
|
||||
<form action="/r/{{ sub.name }}/subscribe?redirect={{ redirect_url }}" method="POST">
|
||||
<button class="subscribe">Subscribe</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div id="sub_filter">
|
||||
{% if prefs.filters.contains(sub.name) %}
|
||||
<form action="/r/{{ sub.name }}/unfilter?redirect={{ redirect_url }}" method="POST">
|
||||
<button class="unfilter">Unfilter</button>
|
||||
</form>
|
||||
{% else %}
|
||||
<form action="/r/{{ sub.name }}/filter?redirect={{ redirect_url }}" method="POST">
|
||||
<button class="filter">Filter</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</details>
|
||||
<details class="panel" id="sidebar">
|
||||
<summary id="sidebar_label">Sidebar</summary>
|
||||
<div id="sidebar_contents">{{ sub.info }}</div>
|
||||
<div id="sidebar_contents">
|
||||
{{ sub.info|safe }}
|
||||
{# <hr>
|
||||
<h2>Moderators</h2>
|
||||
<br>
|
||||
<ul>
|
||||
{% for moderator in sub.moderators %}
|
||||
<li><a style="color: var(--accent)" href="/u/{{ moderator }}">{{ moderator }}</a></li>
|
||||
{% endfor %}
|
||||
</ul> #}
|
||||
</div>
|
||||
</details>
|
||||
{% endif %}
|
||||
</aside>
|
||||
{% endif %}
|
||||
</main>
|
||||
|
@ -7,68 +7,52 @@
|
||||
|
||||
{% block title %}{{ user.name.replace("u/", "") }} (u/{{ user.name }}) - Libreddit{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list("") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<main>
|
||||
{% if !is_filtered %}
|
||||
<div id="column_one">
|
||||
<form id="sort">
|
||||
<select name="sort">
|
||||
{% call utils::options(sort.0, ["hot", "new", "top"], "") %}
|
||||
</select>{% if sort.0 == "top" %}<select id="timeframe" name="t">
|
||||
<div id="listing_options">
|
||||
{% call utils::sort(["/user/", user.name.as_str()].concat(), ["overview", "comments", "submitted"], listing) %}
|
||||
</div>
|
||||
<select id="sort_select" name="sort">
|
||||
{% call utils::options(sort.0, ["hot", "new", "top", "controversial"], "") %}
|
||||
</select>{% if sort.0 == "top" || sort.0 == "controversial" %}<select id="timeframe" name="t">
|
||||
{% call utils::options(sort.1, ["hour", "day", "week", "month", "year", "all"], "all") %}
|
||||
</select>{% endif %}<input id="sort_submit" type="submit" value="→">
|
||||
</select>{% endif %}<button id="sort_submit" class="submit">
|
||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||
<path d="M20 50 H100" />
|
||||
<path d="M75 15 L100 50 L75 85" />
|
||||
→
|
||||
</svg>
|
||||
</button>
|
||||
</form>
|
||||
|
||||
{% if all_posts_hidden_nsfw %}
|
||||
<center>All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if no_posts %}
|
||||
<center>No posts were found.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_filtered %}
|
||||
<center>(All content on this page has been filtered)</center>
|
||||
{% else %}
|
||||
<div id="posts">
|
||||
{% for post in posts %}
|
||||
|
||||
{% if post.flags.nsfw && prefs.hide_nsfw == "on" %}
|
||||
{% else if post.title != "Comment" %}
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %} {% if prefs.layout == "card" && post.post_type == "image" %}card_post{% endif %}">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<div class="post_text">
|
||||
<p class="post_header">
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||
{% if post.author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call utils::render_flair(post.author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</p>
|
||||
<p class="post_title">
|
||||
{% if post.flair.background_color == "Comment" %}
|
||||
{% else if post.flair.background_color == "" %}
|
||||
{% else %}
|
||||
<small class="post_flair" style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }}">{% call utils::render_flair(post.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<a href="{{ post.permalink }}">{{ post.title }}</a>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- POST MEDIA/THUMBNAIL -->
|
||||
{% if prefs.layout == "card" && post.post_type == "image" %}
|
||||
<img class="post_media" src="{{ post.media }}"/>
|
||||
{% else if post.post_type != "self" %}
|
||||
<a class="post_thumbnail {% if post.thumbnail == "" %}no_thumbnail{% endif %}" href="{% if post.post_type == "link" %}{{ post.media }}{% else %}{{ post.permalink }}{% endif %}">
|
||||
{% if post.thumbnail == "" %}
|
||||
<svg viewBox="0 0 100 106" width="50" height="53" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M35,15h-15a10,10 0,0,0 0,20h25a10,10 0,0,0 10,-10m-12.5,0a10, 10 0,0,1 10, -10h25a10,10 0,0,1 0,20h-15" fill="none" stroke-width="5" stroke-linecap="round"/>
|
||||
</svg>
|
||||
{% else %}
|
||||
<img src="{{ post.thumbnail }}">
|
||||
{% endif %}
|
||||
<span>{% if post.post_type == "link" %}{{ post.domain }}{% else %}{{ post.post_type }}{% endif %}</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||
{% else if !post.title.is_empty() %}
|
||||
{% call utils::post_in_list(post) %}
|
||||
{% else %}
|
||||
<div class="comment">
|
||||
<div class="comment_left">
|
||||
<p class="comment_score">{{ post.score }}</p>
|
||||
<p class="comment_score" title="{{ post.score.1 }}">{{ post.score.0 }}</p>
|
||||
<div class="line"></div>
|
||||
</div>
|
||||
<details class="comment_right" open>
|
||||
@ -76,28 +60,36 @@
|
||||
<a class="comment_link" href="{{ post.permalink }}">COMMENT</a>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</summary>
|
||||
<p class="comment_body">{{ post.body }}</p>
|
||||
<p class="comment_body">{{ post.body|safe }}</p>
|
||||
</details>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% endfor %}
|
||||
{% if prefs.use_hls == "on" %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<script src="/playHLSVideo.js"></script>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<footer>
|
||||
{% if ends.0 != "" %}
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}">PREV</a>
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}" accesskey="P">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if ends.1 != "" %}
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}">NEXT</a>
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}" accesskey="N">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
</div>
|
||||
{% endif %}
|
||||
<aside>
|
||||
{% if is_filtered %}
|
||||
<center>(Content from u/{{ user.name }} has been filtered)</center>
|
||||
{% endif %}
|
||||
<div class="panel" id="user">
|
||||
<img id="user_icon" src="{{ user.icon }}">
|
||||
<p id="user_title">{{ user.title }}</p>
|
||||
<img loading="lazy" id="user_icon" src="{{ user.icon }}" alt="User icon">
|
||||
<h1 id="user_title">{{ user.title }}</h1>
|
||||
<p id="user_name">u/{{ user.name }}</p>
|
||||
<div id="user_description">{{ user.description }}</div>
|
||||
<div id="user_details">
|
||||
@ -106,6 +98,31 @@
|
||||
<div>{{ user.karma }}</div>
|
||||
<div>{{ user.created }}</div>
|
||||
</div>
|
||||
<div id="user_actions">
|
||||
{% let name = ["u_", user.name.as_str()].join("") %}
|
||||
<div id="user_subscription">
|
||||
{% if prefs.subscriptions.contains(name) %}
|
||||
<form action="/r/{{ name }}/unsubscribe?redirect={{ redirect_url }}" method="POST">
|
||||
<button class="unsubscribe">Unfollow</button>
|
||||
</form>
|
||||
{% else %}
|
||||
<form action="/r/{{ name }}/subscribe?redirect={{ redirect_url }}" method="POST">
|
||||
<button class="subscribe">Follow</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div id="user_filter">
|
||||
{% if prefs.filters.contains(name) %}
|
||||
<form action="/r/{{ name }}/unfilter?redirect={{ redirect_url }}" method="POST">
|
||||
<button class="unfilter">Unfilter</button>
|
||||
</form>
|
||||
{% else %}
|
||||
<form action="/r/{{ name }}/filter?redirect={{ redirect_url }}" method="POST">
|
||||
<button class="filter">Filter</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</aside>
|
||||
</main>
|
||||
|
@ -1,35 +1,338 @@
|
||||
{% macro options(current, values, default) -%}
|
||||
{% for value in values %}
|
||||
<option value="{{ value }}" {% if current == value || (current == "" && value == default) %}selected{% endif %}>
|
||||
{{ format!("{}{}", value.get(0..1).unwrap().to_uppercase(), value.get(1..).unwrap()) }}
|
||||
<option value="{{ value }}" {% if current == value.to_string() || (current == "" && value.to_string() == default.to_string()) %}selected{% endif %}>
|
||||
{{ format!("{}{}", value.get(0..1).unwrap_or_default().to_uppercase(), value.get(1..).unwrap_or_default()) }}
|
||||
</option>
|
||||
{% endfor %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sort(root, methods, selected) -%}
|
||||
{% for method in methods %}
|
||||
<a {% if method == selected %}class="selected"{% endif %} href="{{ root }}/{{ method }}">
|
||||
{{ format!("{}{}", method.get(0..1).unwrap().to_uppercase(), method.get(1..).unwrap()) }}
|
||||
<a {% if method.to_string() == selected.to_string() %}class="selected"{% endif %} href="{{ root }}/{{ method }}">
|
||||
{{ format!("{}{}", method.get(0..1).unwrap_or_default().to_uppercase(), method.get(1..).unwrap_or_default()) }}
|
||||
</a>
|
||||
{% endfor %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro search(root, search) -%}
|
||||
<form action="{% if root != "/r/" && !root.is_empty() %}{{ root }}{% endif %}/search/" id="searchbox">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ search }}">
|
||||
<form action="{% if root != "/r/" && !root.is_empty() %}{{ root }}{% endif %}/search" id="searchbox">
|
||||
<input id="search" type="text" name="q" placeholder="Search" title="Search libreddit" value="{{ search }}">
|
||||
{% if root != "/r/" && !root.is_empty() %}
|
||||
<div id="inside">
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr">
|
||||
<label for="restrict_sr" class="search_label">in {{ root }}</label>
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr" checked>
|
||||
<label for="restrict_sr" class="search_label" title="Restrict search to this subreddit">in {{ root }}</label>
|
||||
</div>
|
||||
{% endif %}
|
||||
<input type="submit" value="→">
|
||||
<button class="submit">
|
||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||
<path d="M20 50 H100" />
|
||||
<path d="M75 15 L100 50 L75 85" />
|
||||
→
|
||||
</svg>
|
||||
</button>
|
||||
</form>
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro render_flair(flair) -%}
|
||||
{% for flair_part in flair %}
|
||||
{% if flair_part.flair_part_type == "emoji" %}<span class="emoji" style="background-image:url('{{ flair_part.value }}')"></span>
|
||||
{% else if flair_part.flair_part_type == "text" %}<span>{{ flair_part.value }}</span>{% endif %}
|
||||
{% endfor %}
|
||||
{% macro render_flair(flair_parts) -%}
|
||||
{% for flair_part in flair_parts.clone() %}{% if flair_part.flair_part_type == "emoji" %}<span class="emoji" style="background-image:url('{{ flair_part.value }}');"></span>{% else if flair_part.flair_part_type == "text" && !flair_part.value.is_empty() %}<span>{{ flair_part.value }}</span>{% endif %}{% endfor %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sub_list(current) -%}
|
||||
<details id="feeds">
|
||||
<summary>Feeds</summary>
|
||||
<div id="feed_list">
|
||||
<p>MAIN FEEDS</p>
|
||||
<a href="/">Home</a>
|
||||
<a href="/r/popular">Popular</a>
|
||||
<a href="/r/all">All</a>
|
||||
{% if prefs.subscriptions.len() > 0 %}
|
||||
<p>REDDIT FEEDS</p>
|
||||
{% for sub in prefs.subscriptions %}
|
||||
<a href="/r/{{ sub }}" {% if sub == current %}class="selected"{% endif %}>{{ sub }}</a>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</details>
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro render_hls_notification(redirect_url) -%}
|
||||
{% if post.post_type == "video" && !post.media.alt_url.is_empty() && prefs.hide_hls_notification != "on" %}
|
||||
<div class="post_notification"><p><a href="/settings/update/?use_hls=on&redirect={{ redirect_url }}">Enable HLS</a> to view with audio, or <a href="/settings/update/?hide_hls_notification=on&redirect={{ redirect_url }}">disable this notification</a></p></div>
|
||||
{% endif %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro post(post) -%}
|
||||
<!-- POST CONTENT -->
|
||||
<div class="post highlighted">
|
||||
<p class="post_header">
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author {{ post.author.distinguished }}" href="/user/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
{% if post.author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call render_flair(post.author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
{% if !post.awards.is_empty() && prefs.hide_awards != "on" %}
|
||||
<span class="dot">•</span>
|
||||
<span class="awards">
|
||||
{% for award in post.awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
{{ award.count }}
|
||||
</span>
|
||||
{% endfor %}
|
||||
</span>
|
||||
{% endif %}
|
||||
</p>
|
||||
<h1 class="post_title">
|
||||
{{ post.title }}
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};">{% call render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</h1>
|
||||
|
||||
<!-- POST MEDIA -->
|
||||
<!-- post_type: {{ post.post_type }} -->
|
||||
{% if post.post_type == "image" %}
|
||||
<div class="post_media_content">
|
||||
<a href="{{ post.media.url }}" class="post_media_image" >
|
||||
<svg
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Post image" src="{{ post.media.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
</div>
|
||||
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls>
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
</div>
|
||||
<script src="/playHLSVideo.js"></script>
|
||||
{% else %}
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video" src="{{ post.media.url }}" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %} loop><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% call render_hls_notification(post.permalink[1..]) %}
|
||||
{% endif %}
|
||||
{% else if post.post_type == "gallery" %}
|
||||
<div class="gallery">
|
||||
{% for image in post.gallery -%}
|
||||
<figure>
|
||||
<a href="{{ image.url }}" ><img loading="lazy" alt="Gallery image" src="{{ image.url }}"/></a>
|
||||
<figcaption>
|
||||
<p>{{ image.caption }}</p>
|
||||
{% if image.outbound_url.len() > 0 %}
|
||||
<p><a class="outbound_url" href="{{ image.outbound_url }}" rel="nofollow">{{ image.outbound_url }}</a>
|
||||
{% endif %}
|
||||
</figcaption>
|
||||
</figure>
|
||||
{%- endfor %}
|
||||
</div>
|
||||
{% else if post.post_type == "link" %}
|
||||
<a id="post_url" href="{{ post.media.url }}" rel="nofollow">{{ post.media.url }}</a>
|
||||
{% endif %}
|
||||
|
||||
<!-- POST BODY -->
|
||||
<div class="post_body">{{ post.body|safe }}</div>
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
|
||||
{% call poll(post) %}
|
||||
|
||||
<div class="post_footer">
|
||||
<ul id="post_links">
|
||||
<li class="desktop_item"><a href="{{ post.permalink }}">permalink</a></li>
|
||||
<li class="mobile_item"><a href="{{ post.permalink }}">link</a></li>
|
||||
{% if post.num_duplicates > 0 %}
|
||||
<li class="desktop_item"><a href="/r/{{ post.community }}/duplicates/{{ post.id }}">duplicates</a></li>
|
||||
<li class="mobile_item"><a href="/r/{{ post.community }}/duplicates/{{ post.id }}">dupes</a></li>
|
||||
{% endif %}
|
||||
{% call external_reddit_link(post.permalink) %}
|
||||
</ul>
|
||||
<p>{{ post.upvote_ratio }}%<span id="upvoted"> Upvoted</span></p>
|
||||
</div>
|
||||
</div>
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro external_reddit_link(permalink) %}
|
||||
{% for dev_type in ["desktop", "mobile"] %}
|
||||
<li class="{{ dev_type }}_item">
|
||||
<a
|
||||
{% if prefs.disable_visit_reddit_confirmation != "on" %}
|
||||
href="#popup"
|
||||
{% else %}
|
||||
href="https://reddit.com{{ permalink }}"
|
||||
rel="nofollow"
|
||||
{% endif %}
|
||||
>reddit</a>
|
||||
|
||||
{% if prefs.disable_visit_reddit_confirmation != "on" %}
|
||||
{% call visit_reddit_confirmation(permalink) %}
|
||||
{% endif %}
|
||||
</li>
|
||||
{% endfor %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro post_in_list(post) -%}
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %}" id="{{ post.id }}">
|
||||
<p class="post_header">
|
||||
{% let community -%}
|
||||
{% if post.community.starts_with("u_") -%}
|
||||
{% let community = format!("u/{}", &post.community[2..]) -%}
|
||||
{% else -%}
|
||||
{% let community = format!("r/{}", post.community) -%}
|
||||
{% endif -%}
|
||||
<a class="post_subreddit" href="/{{ community }}">{{ community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author {{ post.author.distinguished }}" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
{% if !post.awards.is_empty() && prefs.hide_awards != "on" %}
|
||||
{% for award in post.awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
</span>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
</p>
|
||||
<h2 class="post_title">
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};"
|
||||
dir="ltr">{% call render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
<a href="{{ post.permalink }}">{{ post.title }}</a>{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</h2>
|
||||
<!-- POST MEDIA/THUMBNAIL -->
|
||||
{% if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "image" %}
|
||||
<div class="post_media_content">
|
||||
<a href="{{ post.media.url }}" class="post_media_image {% if post.media.height / post.media.width < 2 %}short{% endif %}" >
|
||||
<svg
|
||||
{%if post.flags.nsfw && prefs.blur_nsfw=="on" %}class="post_nsfw_blur"{% endif %}
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Post image" src="{{ post.media.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
</div>
|
||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" controls preload="none">
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% call render_hls_notification(format!("{}%23{}", &self.url[1..].replace("&", "%26").replace("+", "%2B"), post.id)) %}
|
||||
{% endif %}
|
||||
{% else if post.post_type != "self" %}
|
||||
<a class="post_thumbnail {% if post.thumbnail.url.is_empty() %}no_thumbnail{% endif %}" href="{% if post.post_type == "link" %}{{ post.media.url }}{% else %}{{ post.permalink }}{% endif %}" rel="nofollow">
|
||||
{% if post.thumbnail.url.is_empty() %}
|
||||
<svg viewBox="0 0 100 106" width="140" height="53" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Thumbnail</title>
|
||||
<path d="M35,15h-15a10,10 0,0,0 0,20h25a10,10 0,0,0 10,-10m-12.5,0a10, 10 0,0,1 10, -10h25a10,10 0,0,1 0,20h-15" fill="none" stroke-width="5" stroke-linecap="round"/>
|
||||
</svg>
|
||||
{% else %}
|
||||
<div style="max-width:{{ post.thumbnail.width }}px;max-height:{{ post.thumbnail.height }}px;">
|
||||
<svg {% if post.flags.nsfw && prefs.blur_nsfw=="on" %} class="thumb_nsfw_blur" {% endif %} width="{{ post.thumbnail.width }}px" height="{{ post.thumbnail.height }}px" xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.thumbnail.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Thumbnail" src="{{ post.thumbnail.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</div>
|
||||
{% endif %}
|
||||
<span>{% if post.post_type == "link" %}{{ post.domain }}{% else %}{{ post.post_type }}{% endif %}</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
<div class="post_body post_preview">
|
||||
{{ post.body|safe }}
|
||||
</div>
|
||||
|
||||
{% call poll(post) %}
|
||||
|
||||
<div class="post_footer">
|
||||
<a href="{{ post.permalink }}" class="post_comments" title="{{ post.comments.1 }} {% if post.comments.1 == "1" %}comment{% else %}comments{% endif %}">{{ post.comments.0 }} {% if post.comments.1 == "1" %}comment{% else %}comments{% endif %}</a>
|
||||
</div>
|
||||
</div>
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro visit_reddit_confirmation(url) -%}
|
||||
<div class="popup" id="popup">
|
||||
<div class="popup-inner">
|
||||
<h1>You are about to leave Libreddit</h1>
|
||||
<p>Do you want to continue?</p>
|
||||
<p id="reddit_url">https://www.reddit.com{{ url }}</p>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 639.24 563">
|
||||
<defs>
|
||||
<style>.cls-1{fill:#000000;}.cls-2{fill:#f8aa00;}</style>
|
||||
</defs>
|
||||
<path class="cls-2" d="M322.03,0c1.95,2.5,4.88,.9,7.33,1.65,10.5,3.21,17.65,10.39,22.83,19.35,93.64,162.06,186.98,324.29,280.25,486.56,15.73,20.19,2.49,51.27-22.92,54.37-1.21,.19-2.72-.54-3.49,1.08H239.03c-70.33-2.43-141.6,.79-212.08-1.74-17.49-4.92-23.16-15.88-26.91-32.26l-.04-1.97C88.74,354.76,194.49,188.2,289.92,18.43c6.2-10.66,15.03-16.94,27.61-17.36,.95-.03,2.05,.18,2.51-1.07h2Zm-2.43,545c94.95-.02,189.9,.04,284.85-.02,11.84-.73,20.75-13.19,16.68-23.55C523.83,355.97,430.74,187.62,332.05,23.07c-7.93-9.02-22.2-6.58-27.23,3.22C230.28,156.11,155.21,285.64,80.41,415.31c-19.88,34.41-39.31,69.07-59.78,103.14-2.43,4.05-4.24,8.8-1.68,14.18,3.92,8.24,9.59,12.37,18.82,12.37,93.95,0,187.9,0,281.85,0Z"/>
|
||||
<path class="cls-1" d="M319.61,545c-93.95,0-187.9,0-281.85,0-9.22,0-14.89-4.13-18.82-12.37-2.56-5.38-.75-10.13,1.68-14.18,20.47-34.07,39.9-68.73,59.78-103.14C155.21,285.64,230.28,156.11,304.82,26.29c5.03-9.8,19.3-12.24,27.23-3.22,98.7,164.55,191.79,332.9,289.1,498.35,4.06,10.36-4.85,22.82-16.68,23.55-94.94,.06-189.9,0-284.85,.02Zm.44-462.31C238.88,223.22,158.17,362.95,77.28,503h485.54c-80.94-140.13-161.61-279.79-242.77-420.31Z"/>
|
||||
<path class="cls-2" d="M320.05,82.69c81.16,140.52,161.83,280.18,242.77,420.31H77.28C158.17,362.95,238.88,223.22,320.05,82.69Zm36.05,118.99c-.14-46.75-68.32-52.32-74.66-4.76,.73,51.49,9.2,102.97,12.63,154.49,1.18,13.14,10.53,21.81,23.32,22.76,13.12,.97,23.89-9.13,24.96-21.58,4.44-49.99,9.4-101.22,13.76-150.91Zm-36.56,271.4c48.8,.76,49.24-74.7-.31-75.47-53.45,3-46.02,78.12,.31,75.47Z"/>
|
||||
<path class="cls-1" d="M356.1,201.67c-4.36,49.69-9.31,100.91-13.76,150.91-1.07,12.45-11.84,22.56-24.96,21.58-12.79-.95-22.14-9.63-23.31-22.76-3.43-51.52-11.9-103-12.63-154.49,6.33-47.53,74.51-42.03,74.66,4.76Z"/>
|
||||
<path class="cls-1" d="M319.54,473.08c-46.34,2.64-53.75-72.47-.31-75.47,49.56,.78,49.1,76.24,.31,75.47Z"/>
|
||||
</svg>
|
||||
<a id="goback" href="#">No, go back!</a>
|
||||
<a id="toreddit" href="https://www.reddit.com{{ url }}" rel="nofollow">Yes, take me to Reddit</a>
|
||||
</div>
|
||||
</div>
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro poll(post) -%}
|
||||
{% match post.poll %}
|
||||
{% when Some with (poll) %}
|
||||
{% let widest = poll.most_votes() %}
|
||||
<div class="post_poll">
|
||||
<span>{{ poll.total_vote_count }} votes,</span>
|
||||
<span title="{{ poll.voting_end_timestamp.1 }}">{{ poll.voting_end_timestamp.0 }}</span>
|
||||
{% for option in poll.poll_options %}
|
||||
<div class="poll_option">
|
||||
{# Posts without vote_count (all open polls) will show up without votes.
|
||||
This is an issue with Reddit API, it doesn't work on Old Reddit either. #}
|
||||
{% match option.vote_count %}
|
||||
{% when Some with (vote_count) %}
|
||||
{% if vote_count.eq(widest) || widest == 0 %}
|
||||
<div class="poll_chart most_voted"></div>
|
||||
{% else %}
|
||||
<div class="poll_chart" style="width: {{ (vote_count * 100) / widest }}%"></div>
|
||||
{% endif %}
|
||||
<span>{{ vote_count }}</span>
|
||||
{% when None %}
|
||||
<div class="poll_chart most_voted"></div>
|
||||
<span></span>
|
||||
{% endmatch %}
|
||||
<span>{{ option.text }}</span>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% when None %}
|
||||
{% endmatch %}
|
||||
{%- endmacro %}
|
||||
|
13
templates/wall.html
Normal file
13
templates/wall.html
Normal file
@ -0,0 +1,13 @@
|
||||
{% extends "base.html" %}
|
||||
{% block title %}{{ msg }}{% endblock %}
|
||||
{% block sortstyle %}{% endblock %}
|
||||
{% block content %}
|
||||
<div id="wall">
|
||||
<h1>{{ title }}</h1>
|
||||
<br>
|
||||
<p>{{ msg }}</p>
|
||||
<form action="/r/{{ sub }}?redir={{ url }}" method="POST">
|
||||
<input id="save" type="submit" value="Continue">
|
||||
</form>
|
||||
</div>
|
||||
{% endblock %}
|
@ -10,6 +10,10 @@
|
||||
{% call utils::search(["/r/", sub.as_str()].concat(), "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list(sub.as_str()) %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<main>
|
||||
<div class="panel" id="column_one">
|
||||
@ -18,7 +22,7 @@
|
||||
<div>Wiki</div>
|
||||
</div>
|
||||
<div id="wiki">
|
||||
{{ wiki }}
|
||||
{{ wiki|safe }}
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
|
Loading…
Reference in New Issue
Block a user