Compare commits
290 Commits
Author | SHA1 | Date | |
---|---|---|---|
8fa8a449cf | |||
473a498bea | |||
92f5286667 | |||
0a6bf6bbee | |||
618b074ad5 | |||
d86cebf975 | |||
ab39b62533 | |||
5aee695bae | |||
c9633e1464 | |||
0152752913 | |||
6912307349 | |||
f76243e0af | |||
f0fa2f2709 | |||
88bed73e5e | |||
3a33c70e7c | |||
40dfddc44d | |||
3f3d9e9c3b | |||
501b47894c | |||
d8c661177b | |||
fade305f90 | |||
e62d33ccae | |||
465d9b7ba7 | |||
5c366e14a3 | |||
d4ca376e8d | |||
371b7b2635 | |||
cc27dc2a26 | |||
bfe03578f0 | |||
c6487799ed | |||
584cd4aac1 | |||
377634841c | |||
c0e37443ae | |||
8348e20724 | |||
ae3ea2da7c | |||
8435b8eab9 | |||
510c8679d6 | |||
98674310bc | |||
170ea384fb | |||
1b5e9a4279 | |||
b170a8dd99 | |||
aa54301054 | |||
b4d3f03335 | |||
1a1ff2e600 | |||
4fc07c02b5 | |||
8d58cf61d2 | |||
711e3c205d | |||
0704eb10b8 | |||
ef86c1be86 | |||
8141b74817 | |||
57d304161b | |||
b5f21bcb97 | |||
36c560144a | |||
2bc714d0c5 | |||
ff4a515e24 | |||
93f089c2cf | |||
23569206cc | |||
5f20e8ee27 | |||
a8a8980b98 | |||
fd7d977835 | |||
50f26333cb | |||
f5cd48b07f | |||
50665bbeb3 | |||
d558127306 | |||
0c757023f9 | |||
90828cc71c | |||
7f5bfc04b3 | |||
322aa97a18 | |||
7e07ca3df1 | |||
428dc58e3c | |||
0ec8e4e9a2 | |||
60c7b6b23f | |||
1c8bcf33c1 | |||
3bdc21f90a | |||
c3dade257d | |||
62b2bbb231 | |||
653aee9294 | |||
bb7fb1313d | |||
01bc729a80 | |||
39e6e6bf81 | |||
8c94c0dd17 | |||
1c50c8f30d | |||
3facaefb53 | |||
aec45311cc | |||
47ab857103 | |||
a9ef5bc08b | |||
eb6c5e5e1e | |||
ed11135af8 | |||
3a1af78e26 | |||
345770c64d | |||
9eb42932df | |||
f0a6bdc21b | |||
3eef60d486 | |||
59043456ba | |||
90c7088da2 | |||
9e65a65556 | |||
8cfbde2710 | |||
70ff150ab4 | |||
388779c1f2 | |||
6b605d859f | |||
0ae48c400c | |||
a6ed18d674 | |||
838cdd95d1 | |||
bc95b08ffd | |||
e6190267e4 | |||
3ceeac5fb0 | |||
60eb0137c2 | |||
b6bca68d4e | |||
91bff826f0 | |||
af6606a855 | |||
977cd0763a | |||
fcadd44cb3 | |||
9c325c2cbf | |||
e9038f4fe2 | |||
8b8f55e09a | |||
f1b3749cf0 | |||
0708fdfb37 | |||
cad29e9544 | |||
6b59976fcf | |||
f9b3981448 | |||
db3196df5a | |||
b3d4f6f91c | |||
45b875b85d | |||
992d7889c4 | |||
3188f9d8e7 | |||
90fa0b5496 | |||
7aeabfc4bc | |||
150ebe38f3 | |||
2905d114fa | |||
40e97cc75d | |||
7c73e352ce | |||
341c623be8 | |||
4c8b724a9d | |||
227d74b187 | |||
f05a818edd | |||
ceee13cfb7 | |||
a39495b3cb | |||
38cfe4ad71 | |||
0b89539c2b | |||
046b8b3edc | |||
0656756d21 | |||
43551f70fd | |||
364c29c4d5 | |||
e6c978a2f7 | |||
91cc140091 | |||
6f29d94337 | |||
67e26479ae | |||
1a1dee36b8 | |||
b63000a93f | |||
401ee2ee41 | |||
99a83ea11b | |||
888e7b302d | |||
beada1f2b2 | |||
bd413060c6 | |||
3054b9f4a0 | |||
1cccef12a4 | |||
8e332b0630 | |||
85ae7c1f60 | |||
6d73024183 | |||
923ff776bd | |||
e181e3f57d | |||
79bb913fa6 | |||
632b64c98b | |||
2878d9c799 | |||
9f8d36cb00 | |||
25e641e7b3 | |||
4faa9d46d6 | |||
7220190811 | |||
768820cd4c | |||
2ef7957a66 | |||
7df8e7b4c6 | |||
67d3be06e1 | |||
6be5eb8991 | |||
5d9c320a7e | |||
f7de5285e4 | |||
c2053524c7 | |||
3a9e6b4ca0 | |||
731a407466 | |||
34ea679519 | |||
0f7ba3c61d | |||
2486347b14 | |||
c298109a7b | |||
a0509890b7 | |||
5644d621f7 | |||
1fc5bda486 | |||
b3255c22cf | |||
1d4ea50a45 | |||
546c8a4cda | |||
03336ecafd | |||
957e1c7728 | |||
09053ef0ad | |||
aff030fc3a | |||
97555dbfdd | |||
32360e5165 | |||
350b796571 | |||
567556711b | |||
1ff725ba2e | |||
6a4191f3b5 | |||
668493b72c | |||
db04dcb238 | |||
cc0a1e0324 | |||
e073fc87aa | |||
982f57efd9 | |||
52a1b45014 | |||
6f88fdfc75 | |||
015d0b3414 | |||
b41eabecf7 | |||
5cb5f46fa2 | |||
a900339529 | |||
41b3dc5739 | |||
b3b5782373 | |||
5c753ee171 | |||
229518c40b | |||
45a5778571 | |||
be253d40dd | |||
e571cc3b1e | |||
345f8e7b80 | |||
a190890239 | |||
ee51ce1a76 | |||
81a2df98cb | |||
e79a4b704a | |||
56998b8332 | |||
5418303b08 | |||
5ab41c4e6e | |||
807b3ffeca | |||
85deb4947d | |||
d2002c9027 | |||
f84f4c0326 | |||
ca3f6c0579 | |||
decc9e5139 | |||
d27bd782ce | |||
4defb58f2a | |||
ba42fc066f | |||
2cd35fb3b6 | |||
b9af6f47f3 | |||
73732a2a44 | |||
43ed9756dc | |||
8bb247af3b | |||
ed05f5a092 | |||
4f09333cd7 | |||
31bf8c802e | |||
e4f9bd7b8d | |||
83a667347d | |||
499a56aed4 | |||
928907086c | |||
dc9fbc1a05 | |||
7ae7a88eed | |||
536a766960 | |||
e34329cfee | |||
97a0680bd0 | |||
c1560f4eba | |||
242ffab0da | |||
1211d781d0 | |||
9e4066658c | |||
560de4e91f | |||
bd1c890961 | |||
6f799b2617 | |||
38e176f59f | |||
8248eca95c | |||
ffc3bfe72d | |||
d713746407 | |||
21b45760eb | |||
e3fb93946a | |||
b6134a39d0 | |||
c844655c98 | |||
cac83493da | |||
b47cfd1ba5 | |||
28ca3589ed | |||
3cf787cf98 | |||
46e22cf74e | |||
5c2e134924 | |||
c6244585fa | |||
9f1ba274eb | |||
93ed1c6f0c | |||
6ce82c36fb | |||
2974d92e30 | |||
34dfcb2512 | |||
6b42e97bda | |||
49bfe4d27c | |||
c8965ae51b | |||
0b64a52a63 | |||
a18db1e2b7 | |||
3b53e5be4c | |||
42e8351285 | |||
b3e4b7bfae | |||
4a42a25ed3 | |||
2bacaa163f | |||
48c3a8c0d0 | |||
c23d2dc50b | |||
46dbd88d91 | |||
f0f484288e | |||
90d39b121f |
2
.github/FUNDING.yml
vendored
Normal file
2
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
liberapay: spike
|
||||||
|
custom: ['https://www.buymeacoffee.com/spikecodes']
|
27
.github/ISSUE_TEMPLATE/bug_report.md
vendored
27
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,24 +1,33 @@
|
|||||||
---
|
---
|
||||||
name: Bug report
|
name: 🐛 Bug report
|
||||||
about: Create a report to help us improve
|
about: Create a report to help us improve
|
||||||
title: Bug Report | [title]
|
title: '🐛 Bug Report: '
|
||||||
labels: bug
|
labels: bug
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**Describe the bug**
|
## Describe the bug
|
||||||
A clear and concise description of what the bug is.
|
<!--
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
-->
|
||||||
|
|
||||||
**To reproduce**
|
## Steps to reproduce the bug
|
||||||
|
|
||||||
|
<!--
|
||||||
Steps to reproduce the behavior:
|
Steps to reproduce the behavior:
|
||||||
1. Go to '...'
|
1. Go to '...'
|
||||||
2. Click on '....'
|
2. Click on '....'
|
||||||
3. Scroll down to '....'
|
3. Scroll down to '....'
|
||||||
4. See error
|
4. See error
|
||||||
|
-->
|
||||||
|
|
||||||
**Expected behavior**
|
## What's the expected behavior?
|
||||||
A clear and concise description of what you expected to happen.
|
<!--
|
||||||
|
A clear and concise description of what you expected to happen.
|
||||||
|
-->
|
||||||
|
|
||||||
**Additional context**
|
## Additional context / screenshot
|
||||||
Add any other context about the problem here.
|
<!--
|
||||||
|
Add any other context about the problem here.
|
||||||
|
-->
|
||||||
|
28
.github/ISSUE_TEMPLATE/feature_parity.md
vendored
Normal file
28
.github/ISSUE_TEMPLATE/feature_parity.md
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
---
|
||||||
|
name: ✨ Feature parity
|
||||||
|
about: Suggest implementing a feature into Libreddit that is found in Reddit.com
|
||||||
|
title: '✨ Feature parity: '
|
||||||
|
labels: feature parity
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## How does this feature work on Reddit?
|
||||||
|
<!--
|
||||||
|
A clear and concise description of what the feature is.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Describe how this could be implemented into Libreddit
|
||||||
|
<!--
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Describe alternatives you've considered
|
||||||
|
<!--
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Additional context / screenshot
|
||||||
|
<!--
|
||||||
|
Add any other context or screenshots about the feature parity request here.
|
||||||
|
-->
|
30
.github/ISSUE_TEMPLATE/feature_request.md
vendored
30
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,20 +1,28 @@
|
|||||||
---
|
---
|
||||||
name: Feature request
|
name: 💡 Feature request
|
||||||
about: Suggest an idea for this project
|
about: Suggest a feature for Libreddit that is not found in Reddit
|
||||||
title: Feature Request | [title]
|
title: '💡 Feature request: '
|
||||||
labels: enhancement
|
labels: enhancement
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**Is your feature request related to a problem? Please describe.**
|
## Is your feature request related to a problem? Please describe.
|
||||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
<!--
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
-->
|
||||||
|
|
||||||
**Describe the solution you'd like**
|
## Describe the feature you would like to be implemented
|
||||||
A clear and concise description of what you want to happen.
|
<!--
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
-->
|
||||||
|
|
||||||
**Describe alternatives you've considered**
|
## Describe alternatives you've considered
|
||||||
A clear and concise description of any alternative solutions or features you've considered.
|
<!--
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
-->
|
||||||
|
|
||||||
**Additional context**
|
## Additional context / screenshot
|
||||||
Add any other context or screenshots about the feature request here.
|
<!--
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
|
-->
|
||||||
|
38
.github/workflows/docker-arm.yml
vendored
Normal file
38
.github/workflows/docker-arm.yml
vendored
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
name: Docker ARM Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths-ignore:
|
||||||
|
- "**.md"
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-docker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
with:
|
||||||
|
platforms: all
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
with:
|
||||||
|
version: latest
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile.arm
|
||||||
|
platforms: linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: spikecodes/libreddit:arm
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
41
.github/workflows/docker-armv7.yml
vendored
Normal file
41
.github/workflows/docker-armv7.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
name: Docker ARM V7 Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths-ignore:
|
||||||
|
- "**.md"
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-docker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up QEMU
|
||||||
|
id: qemu
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
with:
|
||||||
|
platforms: all
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
with:
|
||||||
|
version: latest
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
- name: Build and push
|
||||||
|
id: build_push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile.armv7
|
||||||
|
platforms: linux/arm/v7
|
||||||
|
push: true
|
||||||
|
tags: spikecodes/libreddit:armv7
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
@ -1,4 +1,4 @@
|
|||||||
name: Docker Multi-Architecture Build
|
name: Docker amd64 Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@ -31,6 +31,8 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64
|
||||||
push: true
|
push: true
|
||||||
tags: spikecodes/libreddit:latest
|
tags: spikecodes/libreddit:latest
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
21
.github/workflows/rust.yml
vendored
21
.github/workflows/rust.yml
vendored
@ -2,9 +2,10 @@ name: Rust
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
paths-ignore:
|
||||||
pull_request:
|
- "**.md"
|
||||||
branches: [master]
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CARGO_TERM_COLOR: always
|
CARGO_TERM_COLOR: always
|
||||||
@ -21,6 +22,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release
|
run: cargo build --release
|
||||||
|
|
||||||
|
- name: Publish to crates.io
|
||||||
|
continue-on-error: true
|
||||||
|
run: cargo publish --no-verify --token ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2.2.1
|
- uses: actions/upload-artifact@v2.2.1
|
||||||
name: Upload a Build Artifact
|
name: Upload a Build Artifact
|
||||||
@ -32,23 +37,23 @@ jobs:
|
|||||||
id: version
|
id: version
|
||||||
run: |
|
run: |
|
||||||
echo "::set-output name=version::$(cargo metadata --format-version 1 --no-deps | jq .packages[0].version -r | sed 's/^/v/')"
|
echo "::set-output name=version::$(cargo metadata --format-version 1 --no-deps | jq .packages[0].version -r | sed 's/^/v/')"
|
||||||
echo "::set-output name=tag::${GITHUB_REF#refs/*/}"
|
echo "::set-output name=tag::$(git describe --tags)"
|
||||||
|
|
||||||
- name: Calculate SHA512 checksum
|
- name: Calculate SHA512 checksum
|
||||||
run: sha512sum target/release/libreddit > libreddit.sha512
|
run: sha512sum target/release/libreddit > libreddit.sha512
|
||||||
|
|
||||||
- name: Release
|
- name: Release
|
||||||
uses: softprops/action-gh-release@v1
|
uses: softprops/action-gh-release@v1
|
||||||
|
if: github.base_ref != 'master'
|
||||||
with:
|
with:
|
||||||
tag_name: ${{ steps.version.outputs.version }}
|
tag_name: ${{ steps.version.outputs.version }}
|
||||||
name: ${{ steps.version.outputs.version }} - NAME
|
name: ${{ steps.version.outputs.version }} - ${{ github.event.head_commit.message }}
|
||||||
draft: true
|
draft: true
|
||||||
files: |
|
files: |
|
||||||
target/release/libreddit
|
target/release/libreddit
|
||||||
libreddit.sha512
|
libreddit.sha512
|
||||||
body: |
|
body: |
|
||||||
- CHANGES
|
- ${{ github.event.head_commit.message }} ${{ github.sha }}
|
||||||
|
generate_release_notes: true
|
||||||
See full list of changes [here](https://github.com/spikecodes/libreddit/compare/${{ steps.version.outputs.tag }}...${{ steps.version.outputs.version }}).
|
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,2 +1 @@
|
|||||||
/target
|
/target
|
||||||
Cargo.lock
|
|
2
.replit
Normal file
2
.replit
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
run = "while :; do set -ex; curl -o./libreddit -fsSL -- https://github.com/libreddit/libreddit/releases/latest/download/libreddit ; chmod +x libreddit; set +e; ./libreddit -H 63115200; sleep 1; done"
|
||||||
|
language = "bash"
|
@ -1 +0,0 @@
|
|||||||
* @spikecodes
|
|
81
CREDITS
Normal file
81
CREDITS
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
5trongthany <65565784+5trongthany@users.noreply.github.com>
|
||||||
|
674Y3r <87250374+674Y3r@users.noreply.github.com>
|
||||||
|
accountForIssues <52367365+accountForIssues@users.noreply.github.com>
|
||||||
|
Adrian Lebioda <adrianlebioda@gmail.com>
|
||||||
|
alefvanoon <53198048+alefvanoon@users.noreply.github.com>
|
||||||
|
alyaeanyx <alexandra.hollmeier@mailbox.org>
|
||||||
|
AndreVuillemot160 <84594011+AndreVuillemot160@users.noreply.github.com>
|
||||||
|
Andrew Kaufman <57281817+andrew-kaufman@users.noreply.github.com>
|
||||||
|
Artemis <51862164+artemislena@users.noreply.github.com>
|
||||||
|
arthomnix <35371030+arthomnix@users.noreply.github.com>
|
||||||
|
Arya K <73596856+gi-yt@users.noreply.github.com>
|
||||||
|
Austin Huang <im@austinhuang.me>
|
||||||
|
Basti <pred2k@users.noreply.github.com>
|
||||||
|
Ben Smith <37027883+smithbm2316@users.noreply.github.com>
|
||||||
|
BobIsMyManager <ahoumatt@yahoo.com>
|
||||||
|
curlpipe <11898833+curlpipe@users.noreply.github.com>
|
||||||
|
dacousb <53299044+dacousb@users.noreply.github.com>
|
||||||
|
Daniel Valentine <Daniel-Valentine@users.noreply.github.com>
|
||||||
|
dbrennand <52419383+dbrennand@users.noreply.github.com>
|
||||||
|
Diego Magdaleno <38844659+DiegoMagdaleno@users.noreply.github.com>
|
||||||
|
Dyras <jevwmguf@duck.com>
|
||||||
|
Edward <101938856+EdwardLangdon@users.noreply.github.com>
|
||||||
|
erdnaxe <erdnaxe@users.noreply.github.com>
|
||||||
|
Esmail EL BoB <github.defilable@simplelogin.co>
|
||||||
|
FireMasterK <20838718+FireMasterK@users.noreply.github.com>
|
||||||
|
George Roubos <cowkingdom@hotmail.com>
|
||||||
|
git-bruh <e817509a-8ee9-4332-b0ad-3a6bdf9ab63f@aleeas.com>
|
||||||
|
guaddy <67671414+guaddy@users.noreply.github.com>
|
||||||
|
Harsh Mishra <erbeusgriffincasper@gmail.com>
|
||||||
|
igna <igna@intent.cool>
|
||||||
|
imabritishcow <bcow@protonmail.com>
|
||||||
|
Josiah <70736638+fres7h@users.noreply.github.com>
|
||||||
|
JPyke3 <pyke.jacob1@gmail.com>
|
||||||
|
Kavin <20838718+FireMasterK@users.noreply.github.com>
|
||||||
|
Kazi <kzshantonu@users.noreply.github.com>
|
||||||
|
Kieran <42723993+EnderDev@users.noreply.github.com>
|
||||||
|
Kieran <kieran@dothq.co>
|
||||||
|
Kyle Roth <kylrth@gmail.com>
|
||||||
|
laazyCmd <laazy.pr00gramming@protonmail.com>
|
||||||
|
Laurențiu Nicola <lnicola@users.noreply.github.com>
|
||||||
|
Lena <102762572+MarshDeer@users.noreply.github.com>
|
||||||
|
Macic <46872282+Macic-Dev@users.noreply.github.com>
|
||||||
|
Mario A <10923513+Midblyte@users.noreply.github.com>
|
||||||
|
Matthew Crossman <matt@crossman.page>
|
||||||
|
Matthew E <matt@matthew.science>
|
||||||
|
Mennaruuk <52135169+Mennaruuk@users.noreply.github.com>
|
||||||
|
mikupls <93015331+mikupls@users.noreply.github.com>
|
||||||
|
Nainar <nainar.mb@gmail.com>
|
||||||
|
Nathan Moos <moosingin3space@gmail.com>
|
||||||
|
Nicholas Christopher <nchristopher@tuta.io>
|
||||||
|
Nick Lowery <ClockVapor@users.noreply.github.com>
|
||||||
|
Nico <github@dr460nf1r3.org>
|
||||||
|
NKIPSC <15067635+NKIPSC@users.noreply.github.com>
|
||||||
|
obeho <71698631+obeho@users.noreply.github.com>
|
||||||
|
obscurity <z@x4.pm>
|
||||||
|
Om G <34579088+OxyMagnesium@users.noreply.github.com>
|
||||||
|
RiversideRocks <59586759+RiversideRocks@users.noreply.github.com>
|
||||||
|
robin <8597693+robrobinbin@users.noreply.github.com>
|
||||||
|
Robin <8597693+robrobinbin@users.noreply.github.com>
|
||||||
|
robrobinbin <>
|
||||||
|
robrobinbin <8597693+robrobinbin@users.noreply.github.com>
|
||||||
|
robrobinbin <robindepril@gmail.com>
|
||||||
|
Ruben Elshof <15641671+rubenelshof@users.noreply.github.com>
|
||||||
|
Scoder12 <34356756+Scoder12@users.noreply.github.com>
|
||||||
|
Slayer <51095261+GhostSlayer@users.noreply.github.com>
|
||||||
|
Soheb <somoso@users.noreply.github.com>
|
||||||
|
somini <somini@users.noreply.github.com>
|
||||||
|
somoso <github@soheb.anonaddy.com>
|
||||||
|
Spike <19519553+spikecodes@users.noreply.github.com>
|
||||||
|
spikecodes <19519553+spikecodes@users.noreply.github.com>
|
||||||
|
sybenx <syb@duck.com>
|
||||||
|
TheCultLeader666 <65368815+TheCultLeader666@users.noreply.github.com>
|
||||||
|
TheFrenchGhosty <47571719+TheFrenchGhosty@users.noreply.github.com>
|
||||||
|
The TwilightBlood <hwengerstickel@protonmail.com>
|
||||||
|
tirz <36501933+tirz@users.noreply.github.com>
|
||||||
|
Tsvetomir Bonev <invakid404@riseup.net>
|
||||||
|
Vladislav Nepogodin <nepogodin.vlad@gmail.com>
|
||||||
|
Walkx <walkxnl@gmail.com>
|
||||||
|
Wichai <1482605+Chengings@users.noreply.github.com>
|
||||||
|
xatier <xatierlike@gmail.com>
|
||||||
|
Zach <72994911+zachjmurphy@users.noreply.github.com>
|
1628
Cargo.lock
generated
Normal file
1628
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
41
Cargo.toml
41
Cargo.toml
@ -3,23 +3,30 @@ name = "libreddit"
|
|||||||
description = " Alternative private front-end to Reddit"
|
description = " Alternative private front-end to Reddit"
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
repository = "https://github.com/spikecodes/libreddit"
|
repository = "https://github.com/spikecodes/libreddit"
|
||||||
version = "0.8.2"
|
version = "0.24.3"
|
||||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
askama = { version = "0.10.5", default-features = false }
|
askama = { version = "0.11.1", default-features = false }
|
||||||
async-recursion = "0.3.2"
|
async-recursion = "1.0.0"
|
||||||
cached = "0.23.0"
|
cached = "0.40.0"
|
||||||
clap = { version = "2.33.3", default-features = false }
|
clap = { version = "4.0.24", default-features = false, features = ["std"] }
|
||||||
regex = "1.4.5"
|
regex = "1.7.0"
|
||||||
serde = { version = "1.0.125", features = ["derive"] }
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
cookie = "0.15.0"
|
cookie = "0.16.1"
|
||||||
futures-lite = "1.11.3"
|
futures-lite = "1.12.0"
|
||||||
hyper = { version = "0.14.5", features = ["full"] }
|
hyper = { version = "0.14.23", features = ["full"] }
|
||||||
hyper-rustls = "0.22.1"
|
hyper-rustls = "0.23.0"
|
||||||
route-recognizer = "0.3.0"
|
percent-encoding = "2.2.0"
|
||||||
serde_json = "1.0.64"
|
route-recognizer = "0.3.1"
|
||||||
tokio = { version = "1.4.0", features = ["full"] }
|
serde_json = "1.0.87"
|
||||||
time = "0.2.26"
|
tokio = { version = "1.21.2", features = ["full"] }
|
||||||
url = "2.2.1"
|
time = "0.3.17"
|
||||||
|
url = "2.3.1"
|
||||||
|
rust-embed = { version = "6.4.2", features = ["include-exclude"] }
|
||||||
|
libflate = "1.2.0"
|
||||||
|
brotli = { version = "3.3.4", features = ["std"] }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
lipsum = "0.8.2"
|
||||||
|
35
Dockerfile
35
Dockerfile
@ -1,17 +1,36 @@
|
|||||||
FROM rust:latest as builder
|
####################################################################################################
|
||||||
|
## Builder
|
||||||
|
####################################################################################################
|
||||||
|
FROM rust:alpine AS builder
|
||||||
|
|
||||||
|
RUN apk add --no-cache musl-dev
|
||||||
|
|
||||||
|
WORKDIR /libreddit
|
||||||
|
|
||||||
WORKDIR /usr/src/libreddit
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN cargo install --path .
|
|
||||||
|
|
||||||
|
RUN cargo build --target x86_64-unknown-linux-musl --release
|
||||||
|
|
||||||
FROM debian:buster-slim
|
####################################################################################################
|
||||||
|
## Final image
|
||||||
|
####################################################################################################
|
||||||
|
FROM alpine:latest
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y libcurl4 && rm -rf /var/lib/apt/lists/*
|
# Import ca-certificates from builder
|
||||||
COPY --from=builder /usr/local/cargo/bin/libreddit /usr/local/bin/libreddit
|
COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
||||||
RUN useradd --system --user-group --home-dir /nonexistent --no-create-home --shell /usr/sbin/nologin libreddit
|
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||||
|
|
||||||
|
# Copy our build
|
||||||
|
COPY --from=builder /libreddit/target/x86_64-unknown-linux-musl/release/libreddit /usr/local/bin/libreddit
|
||||||
|
|
||||||
|
# Use an unprivileged user.
|
||||||
|
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||||
USER libreddit
|
USER libreddit
|
||||||
|
|
||||||
|
# Tell Docker to expose port 8080
|
||||||
EXPOSE 8080
|
EXPOSE 8080
|
||||||
|
|
||||||
CMD ["libreddit"]
|
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||||
|
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||||
|
|
||||||
|
CMD ["libreddit"]
|
41
Dockerfile.arm
Normal file
41
Dockerfile.arm
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
####################################################################################################
|
||||||
|
## Builder
|
||||||
|
####################################################################################################
|
||||||
|
FROM rust:alpine AS builder
|
||||||
|
|
||||||
|
RUN apk add --no-cache g++ git
|
||||||
|
|
||||||
|
WORKDIR /usr/src/libreddit
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# net.git-fetch-with-cli is specified in order to prevent a potential OOM kill
|
||||||
|
# in low memory environments. See:
|
||||||
|
# https://users.rust-lang.org/t/cargo-uses-too-much-memory-being-run-in-qemu/76531
|
||||||
|
# This is tracked under issue #641. This also requires us to install git in the
|
||||||
|
# builder.
|
||||||
|
RUN cargo install --config net.git-fetch-with-cli=true --path .
|
||||||
|
|
||||||
|
####################################################################################################
|
||||||
|
## Final image
|
||||||
|
####################################################################################################
|
||||||
|
FROM alpine:latest
|
||||||
|
|
||||||
|
# Import ca-certificates from builder
|
||||||
|
COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
||||||
|
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||||
|
|
||||||
|
# Copy our build
|
||||||
|
COPY --from=builder /usr/local/cargo/bin/libreddit /usr/local/bin/libreddit
|
||||||
|
|
||||||
|
# Use an unprivileged user.
|
||||||
|
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||||
|
USER libreddit
|
||||||
|
|
||||||
|
# Tell Docker to expose port 8080
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||||
|
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||||
|
|
||||||
|
CMD ["libreddit"]
|
43
Dockerfile.armv7
Normal file
43
Dockerfile.armv7
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
####################################################################################################
|
||||||
|
## Builder
|
||||||
|
####################################################################################################
|
||||||
|
FROM --platform=$BUILDPLATFORM rust:slim AS builder
|
||||||
|
|
||||||
|
ENV CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER=arm-linux-gnueabihf-gcc
|
||||||
|
ENV CC_armv7_unknown_linux_musleabihf=arm-linux-gnueabihf-gcc
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get -y install gcc-arm-linux-gnueabihf \
|
||||||
|
binutils-arm-linux-gnueabihf \
|
||||||
|
musl-tools
|
||||||
|
|
||||||
|
RUN rustup target add armv7-unknown-linux-musleabihf
|
||||||
|
|
||||||
|
WORKDIR /libreddit
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN cargo build --target armv7-unknown-linux-musleabihf --release
|
||||||
|
|
||||||
|
####################################################################################################
|
||||||
|
## Final image
|
||||||
|
####################################################################################################
|
||||||
|
FROM alpine:latest
|
||||||
|
|
||||||
|
# Import ca-certificates from builder
|
||||||
|
COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
||||||
|
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||||
|
|
||||||
|
# Copy our build
|
||||||
|
COPY --from=builder /libreddit/target/armv7-unknown-linux-musleabihf/release/libreddit /usr/local/bin/libreddit
|
||||||
|
|
||||||
|
# Use an unprivileged user.
|
||||||
|
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||||
|
USER libreddit
|
||||||
|
|
||||||
|
# Tell Docker to expose port 8080
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||||
|
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||||
|
|
||||||
|
CMD ["libreddit"]
|
131
README.md
131
README.md
@ -1,6 +1,6 @@
|
|||||||
# Libreddit
|
# Libreddit
|
||||||
|
|
||||||
> An alternative private front-end to Reddit
|
> An alternative private front-end to Reddit
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@ -8,46 +8,38 @@
|
|||||||
|
|
||||||
**10 second pitch:** Libreddit is a portmanteau of "libre" (meaning freedom) and "Reddit". It is a private front-end like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libreddit.spike.codes/r/unpopularopinion) without being [tracked](#reddit).
|
**10 second pitch:** Libreddit is a portmanteau of "libre" (meaning freedom) and "Reddit". It is a private front-end like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libreddit.spike.codes/r/unpopularopinion) without being [tracked](#reddit).
|
||||||
|
|
||||||
- 🚀 Fast: written in Rust for blazing fast speeds and memory safety
|
- 🚀 Fast: written in Rust for blazing-fast speeds and memory safety
|
||||||
- ☁️ Light: no JavaScript, no ads, no tracking, no bloat
|
- ☁️ Light: no JavaScript, no ads, no tracking, no bloat
|
||||||
- 🕵 Private: all requests are proxied through the server, including media
|
- 🕵 Private: all requests are proxied through the server, including media
|
||||||
- 🔒 Secure: strong [Content Security Policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP) prevents browser requests to Reddit
|
- 🔒 Secure: strong [Content Security Policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP) prevents browser requests to Reddit
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**BTC:** bc1qwyxjnafpu3gypcpgs025cw9wa7ryudtecmwa6y
|
I appreciate any donations! Your support allows me to continue developing Libreddit.
|
||||||
|
|
||||||
**XMR:** 45FJrEuFPtG2o7QZz2Nps77TbHD4sPqxViwbdyV9A6ktfHiWs47UngG5zXPcLoDXAc8taeuBgeNjfeprwgeXYXhN3C9tVSR
|
<a href="https://www.buymeacoffee.com/spikecodes" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 40px" ></a>
|
||||||
|
<a href="https://liberapay.com/spike/donate"><img alt="Donate using Liberapay" src="https://liberapay.com/assets/widgets/donate.svg" style="height: 40px"></a>
|
||||||
|
|
||||||
|
|
||||||
|
**Bitcoin:** `bc1qwyxjnafpu3gypcpgs025cw9wa7ryudtecmwa6y`
|
||||||
|
|
||||||
|
**Monero:** `45FJrEuFPtG2o7QZz2Nps77TbHD4sPqxViwbdyV9A6ktfHiWs47UngG5zXPcLoDXAc8taeuBgeNjfeprwgeXYXhN3C9tVSR`
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
# Instances
|
# Instances
|
||||||
|
|
||||||
Feel free to [open an issue](https://github.com/spikecodes/libreddit/issues/new) to have your [selfhosted instance](#deployment) listed here!
|
🔗 **Want to automatically redirect Reddit links to Libreddit? Use [LibRedirect](https://github.com/libredirect/libredirect) or [Privacy Redirect](https://github.com/SimonBrazell/privacy-redirect)!**
|
||||||
|
|
||||||
| Website | Country | Cloudflare |
|
[Follow this link](https://github.com/libreddit/libreddit-instances/blob/master/instances.md) for an up-to-date table of instances in markdown format. This list is also available as [a machine-readable JSON](https://github.com/libreddit/libreddit-instances/blob/master/instances.json).
|
||||||
|-|-|-|
|
|
||||||
| [libredd.it](https://libredd.it) (official) | 🇺🇸 US | |
|
|
||||||
| [libreddit.spike.codes](https://libreddit.spike.codes) (official) | 🇺🇸 US | |
|
|
||||||
| [libreddit.dothq.co](https://libreddit.dothq.co) | 🇺🇸 US | |
|
|
||||||
| [libreddit.kavin.rocks](https://libreddit.kavin.rocks) | 🇮🇳 IN | ✅ |
|
|
||||||
| [libreddit.himiko.cloud](https://libreddit.himiko.cloud) | 🇫🇮 FI | |
|
|
||||||
| [libreddit.bcow.xyz](https://libreddit.bcow.xyz) | 🇺🇸 US | |
|
|
||||||
| [libreddit.40two.app](https://libreddit.40two.app) | 🇳🇱 NL | |
|
|
||||||
| [reddit.invak.id](https://reddit.invak.id) | 🇧🇬 BG | |
|
|
||||||
| [reddit.phii.me](https://reddit.phii.me) | 🇺🇸 US | |
|
|
||||||
| [spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion](http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion) | 🇮🇳 IN | |
|
|
||||||
| [fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion](http://fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion) | 🇩🇪 DE | |
|
|
||||||
| [libreddit.himiko7xl2skojc6odi7hykl626gt4qki3vxdbv33u2u3af76d6k32ad.onion](http://libreddit.himiko7xl2skojc6odi7hykl626gt4qki3vxdbv33u2u3af76d6k32ad.onion) | 🇫🇮 FI | |
|
|
||||||
| [dflv6yjt7il3n3tggf4qhcmkzbti2ppytqx3o7pjrzwgntutpewscyid.onion](http://dflv6yjt7il3n3tggf4qhcmkzbti2ppytqx3o7pjrzwgntutpewscyid.onion/) | 🇺🇸 US | |
|
|
||||||
|
|
||||||
A checkmark in the "Cloudflare" category here refers to the use of the reverse proxy, [Cloudflare](https://cloudflare). The checkmark will not be listed for a site which uses Cloudflare DNS but rather the proxying service which grants Cloudflare the ability to monitor traffic to the website.
|
Both files are part of the [libreddit-instances](https://github.com/libreddit/libreddit-instances) repository. To contribute your [self-hosted instance](#deployment) to the list, see the [libreddit-instances README](https://github.com/libreddit/libreddit-instances/blob/master/README.md).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
# About
|
# About
|
||||||
|
|
||||||
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/spikecodes/libreddit), :octocat: [GitHub](https://github.com/spikecodes/libreddit), and 🦊 [GitLab](https://gitlab.com/spikecodes/libreddit).
|
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/spikecodes/libreddit), :octocat: [GitHub](https://github.com/libreddit/libreddit), and 🦊 [GitLab](https://gitlab.com/spikecodes/libreddit).
|
||||||
|
|
||||||
## Built with
|
## Built with
|
||||||
|
|
||||||
@ -59,7 +51,7 @@ Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [D
|
|||||||
## Info
|
## Info
|
||||||
Libreddit hopes to provide an easier way to browse Reddit, without the ads, trackers, and bloat. Libreddit was inspired by other alternative front-ends to popular services such as [Invidious](https://github.com/iv-org/invidious) for YouTube, [Nitter](https://github.com/zedeus/nitter) for Twitter, and [Bibliogram](https://sr.ht/~cadence/bibliogram/) for Instagram.
|
Libreddit hopes to provide an easier way to browse Reddit, without the ads, trackers, and bloat. Libreddit was inspired by other alternative front-ends to popular services such as [Invidious](https://github.com/iv-org/invidious) for YouTube, [Nitter](https://github.com/zedeus/nitter) for Twitter, and [Bibliogram](https://sr.ht/~cadence/bibliogram/) for Instagram.
|
||||||
|
|
||||||
Libreddit currently implements most of Reddit's (signed-out) functionalities but still lacks [a few features](https://github.com/spikecodes/libreddit/issues).
|
Libreddit currently implements most of Reddit's (signed-out) functionalities but still lacks [a few features](https://github.com/libreddit/libreddit/issues).
|
||||||
|
|
||||||
## How does it compare to Teddit?
|
## How does it compare to Teddit?
|
||||||
|
|
||||||
@ -67,7 +59,7 @@ Teddit is another awesome open source project designed to provide an alternative
|
|||||||
|
|
||||||
If you are looking to compare, the biggest differences I have noticed are:
|
If you are looking to compare, the biggest differences I have noticed are:
|
||||||
- Libreddit is themed around Reddit's redesign whereas Teddit appears to stick much closer to Reddit's old design. This may suit some users better as design is always subjective.
|
- Libreddit is themed around Reddit's redesign whereas Teddit appears to stick much closer to Reddit's old design. This may suit some users better as design is always subjective.
|
||||||
- Libreddit is written in [Rust](https://www.rust-lang.org) for speed and memory safety. It uses [Actix Web](https://actix.rs), which was [benchmarked as the fastest web server for single queries](https://www.techempower.com/benchmarks/#hw=ph&test=db).
|
- Libreddit is written in [Rust](https://www.rust-lang.org) for speed and memory safety. It uses [Hyper](https://hyper.rs), a speedy and lightweight HTTP server/client implementation.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -77,15 +69,15 @@ This section outlines how Libreddit compares to Reddit.
|
|||||||
|
|
||||||
## Speed
|
## Speed
|
||||||
|
|
||||||
Lasted tested Jan 17, 2021.
|
Lasted tested Nov 11, 2022.
|
||||||
|
|
||||||
Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdotdevsite.appspot.com/lh/html?url=https%3A%2F%2Flibredd.it), [Reddit Report](https://lighthouse-dot-webdotdevsite.appspot.com/lh/html?url=https%3A%2F%2Fwww.reddit.com%2F)).
|
Results from Google PageSpeed Insights ([Libreddit Report](https://pagespeed.web.dev/report?url=https%3A%2F%2Flibreddit.spike.codes%2F), [Reddit Report](https://pagespeed.web.dev/report?url=https://www.reddit.com)).
|
||||||
|
|
||||||
| | Libreddit | Reddit |
|
| | Libreddit | Reddit |
|
||||||
|------------------------|---------------|------------|
|
|------------------------|-------------|-----------|
|
||||||
| Requests | 20 | 70 |
|
| Requests | 60 | 83 |
|
||||||
| Resource Size (card ui)| 1,224 KiB | 1,690 KiB |
|
| Speed Index | 2.0s | 10.4s |
|
||||||
| Time to Interactive | **1.5 s** | **11.2 s** |
|
| Time to Interactive | **2.8s** | **12.4s** |
|
||||||
|
|
||||||
## Privacy
|
## Privacy
|
||||||
|
|
||||||
@ -104,7 +96,7 @@ Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdot
|
|||||||
- The requested URL
|
- The requested URL
|
||||||
- Search terms
|
- Search terms
|
||||||
|
|
||||||
**Location:** The same privacy policy goes on to describe location data may be collected through the use of:
|
**Location:** The same privacy policy goes on to describe that location data may be collected through the use of:
|
||||||
- GPS (consensual)
|
- GPS (consensual)
|
||||||
- Bluetooth (consensual)
|
- Bluetooth (consensual)
|
||||||
- Content associated with a location (consensual)
|
- Content associated with a location (consensual)
|
||||||
@ -122,13 +114,13 @@ Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdot
|
|||||||
|
|
||||||
For transparency, I hope to describe all the ways Libreddit handles user privacy.
|
For transparency, I hope to describe all the ways Libreddit handles user privacy.
|
||||||
|
|
||||||
**Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs when Reddit is ratelimiting Libreddit and when Reddit's JSON responses can't be parsed. When debugging (running from source without `--release`), Libreddit logs post IDs and URL paths fetched to aid with troubleshooting.
|
**Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs nothing. When debugging (running from source without `--release`), Libreddit logs post IDs fetched to aid with troubleshooting.
|
||||||
|
|
||||||
**DNS:** Both official domains (`libredd.it` and `libreddit.spike.codes`) use Cloudflare as the DNS resolver. Though, the sites are not proxied through Cloudflare meaning Cloudflare doesn't have access to user traffic.
|
**DNS:** Both official domains (`libredd.it` and `libreddit.spike.codes`) use Cloudflare as the DNS resolver. Though, the sites are not proxied through Cloudflare meaning Cloudflare doesn't have access to user traffic.
|
||||||
|
|
||||||
**Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libreddit.spike.codes/settings). This is not a cross-site cookie and the cookie holds no personal data, only a value of the possible layout.
|
**Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libreddit.spike.codes/settings). These are not cross-site cookies and the cookies hold no personal data.
|
||||||
|
|
||||||
**Hosting:** The official instances are hosted on [Replit](https://replit.com/) which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models and therefore, selfhosting and browsing through Tor are welcomed.
|
**Hosting:** The official instances are hosted on [Replit](https://replit.com/) which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models and therefore, self-hosting, using unofficial instances, and browsing through Tor are welcomed.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -156,6 +148,10 @@ docker pull spikecodes/libreddit
|
|||||||
docker run -d --name libreddit -p 80:8080 spikecodes/libreddit
|
docker run -d --name libreddit -p 80:8080 spikecodes/libreddit
|
||||||
```
|
```
|
||||||
|
|
||||||
|
To deploy on `arm64` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:arm`.
|
||||||
|
|
||||||
|
To deploy on `armv7` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:armv7`.
|
||||||
|
|
||||||
## 3) AUR
|
## 3) AUR
|
||||||
|
|
||||||
For ArchLinux users, Libreddit is available from the AUR as [`libreddit-git`](https://aur.archlinux.org/packages/libreddit-git).
|
For ArchLinux users, Libreddit is available from the AUR as [`libreddit-git`](https://aur.archlinux.org/packages/libreddit-git).
|
||||||
@ -166,17 +162,15 @@ yay -S libreddit-git
|
|||||||
|
|
||||||
## 4) GitHub Releases
|
## 4) GitHub Releases
|
||||||
|
|
||||||
If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/spikecodes/libreddit/releases/latest).
|
If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/libreddit/libreddit/releases/latest).
|
||||||
|
|
||||||
## 5) Replit
|
## 5) Replit/Heroku/Glitch
|
||||||
|
|
||||||
**Note:** Replit is a free option but they are *not* private and will monitor server usage to prevent abuse. If you need a free and easy setup, this method may work best for you.
|
**Note:** These are free hosting options but they are *not* private and will monitor server usage to prevent abuse. If you need a free and easy setup, this method may work best for you.
|
||||||
|
|
||||||
1. Create a Replit account (see note above)
|
<a href="https://repl.it/github/libreddit/libreddit"><img src="https://repl.it/badge/github/libreddit/libreddit" alt="Run on Repl.it" height="32" /></a>
|
||||||
2. Visit [the official Repl](https://replit.com/@spikethecoder/libreddit) and fork it
|
[](https://heroku.com/deploy?template=https://github.com/libreddit/libreddit)
|
||||||
3. Hit the run button to download the latest Libreddit version and start it
|
[](https://glitch.com/edit/#!/remix/libreddit)
|
||||||
|
|
||||||
In the web preview (defaults to top right), you should see your instance hosted where you can assign a [custom domain](https://docs.replit.com/repls/web-hosting#custom-domains).
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -188,18 +182,65 @@ Once installed, deploy Libreddit to `0.0.0.0:8080` by running:
|
|||||||
libreddit
|
libreddit
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Change Default Settings
|
||||||
|
|
||||||
|
Assign a default value for each setting by passing environment variables to Libreddit in the format `LIBREDDIT_DEFAULT_{X}`. Replace `{X}` with the setting name (see list below) in capital letters.
|
||||||
|
|
||||||
|
| Name | Possible values | Default value |
|
||||||
|
|-------------------------|-----------------------------------------------------------------------------------------------------|---------------|
|
||||||
|
| `THEME` | `["system", "light", "dark", "black", "dracula", "nord", "laserwave", "violet", "gold", "rosebox", "gruvboxdark", "gruvboxlight"]` | `system` |
|
||||||
|
| `FRONT_PAGE` | `["default", "popular", "all"]` | `default` |
|
||||||
|
| `LAYOUT` | `["card", "clean", "compact"]` | `card` |
|
||||||
|
| `WIDE` | `["on", "off"]` | `off` |
|
||||||
|
| `POST_SORT` | `["hot", "new", "top", "rising", "controversial"]` | `hot` |
|
||||||
|
| `COMMENT_SORT` | `["confidence", "top", "new", "controversial", "old"]` | `confidence` |
|
||||||
|
| `SHOW_NSFW` | `["on", "off"]` | `off` |
|
||||||
|
| `BLUR_NSFW` | `["on", "off"]` | `off` |
|
||||||
|
| `USE_HLS` | `["on", "off"]` | `off` |
|
||||||
|
| `HIDE_HLS_NOTIFICATION` | `["on", "off"]` | `off` |
|
||||||
|
| `AUTOPLAY_VIDEOS` | `["on", "off"]` | `off` |
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
LIBREDDIT_DEFAULT_SHOW_NSFW=on libreddit
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
LIBREDDIT_DEFAULT_WIDE=on LIBREDDIT_DEFAULT_THEME=dark libreddit -r
|
||||||
|
```
|
||||||
|
|
||||||
## Proxying using NGINX
|
## Proxying using NGINX
|
||||||
|
|
||||||
**NOTE** If you're [proxying Libreddit through a NGINX Reverse Proxy](https://github.com/spikecodes/libreddit/issues/122#issuecomment-782226853), add
|
**NOTE** If you're [proxying Libreddit through an NGINX Reverse Proxy](https://github.com/libreddit/libreddit/issues/122#issuecomment-782226853), add
|
||||||
```nginx
|
```nginx
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
```
|
```
|
||||||
to your NGINX configuration file above your `proxy_pass` line.
|
to your NGINX configuration file above your `proxy_pass` line.
|
||||||
|
|
||||||
|
## systemd
|
||||||
|
|
||||||
|
You can use the systemd service available in `contrib/libreddit.service`
|
||||||
|
(install it on `/etc/systemd/system/libreddit.service`).
|
||||||
|
|
||||||
|
That service can be optionally configured in terms of environment variables by
|
||||||
|
creating a file in `/etc/libreddit.conf`. Use the `contrib/libreddit.conf` as a
|
||||||
|
template. You can also add the `LIBREDDIT_DEFAULT__{X}` settings explained
|
||||||
|
above.
|
||||||
|
|
||||||
|
When "Proxying using NGINX" where the proxy is on the same machine, you should
|
||||||
|
guarantee nginx waits for this service to start. Edit
|
||||||
|
`/etc/systemd/system/libreddit.service.d/reverse-proxy.conf`:
|
||||||
|
|
||||||
|
```conf
|
||||||
|
[Unit]
|
||||||
|
Before=nginx.service
|
||||||
|
```
|
||||||
|
|
||||||
## Building
|
## Building
|
||||||
|
|
||||||
```
|
```
|
||||||
git clone https://github.com/spikecodes/libreddit
|
git clone https://github.com/libreddit/libreddit
|
||||||
cd libreddit
|
cd libreddit
|
||||||
cargo run
|
cargo run
|
||||||
```
|
```
|
||||||
|
45
app.json
Normal file
45
app.json
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
{
|
||||||
|
"name": "Libreddit",
|
||||||
|
"description": "Private front-end for Reddit",
|
||||||
|
"buildpacks": [
|
||||||
|
{
|
||||||
|
"url": "https://github.com/emk/heroku-buildpack-rust"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "emk/rust"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stack": "container",
|
||||||
|
"env": {
|
||||||
|
"LIBREDDIT_DEFAULT_THEME": {
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"LIBREDDIT_DEFAULT_FRONT_PAGE": {
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"LIBREDDIT_DEFAULT_LAYOUT": {
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"LIBREDDIT_DEFAULT_WIDE": {
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"LIBREDDIT_DEFAULT_COMMENT_SORT": {
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"LIBREDDIT_DEFAULT_POST_SORT": {
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"LIBREDDIT_DEFAULT_SHOW_NSFW": {
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"LIBREDDIT_DEFAULT_BLUR_NSFW": {
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"LIBREDDIT_USE_HLS": {
|
||||||
|
"required": false
|
||||||
|
},
|
||||||
|
"LIBREDDIT_HIDE_HLS_NOTIFICATION": {
|
||||||
|
"required": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
2
contrib/libreddit.conf
Normal file
2
contrib/libreddit.conf
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
ADDRESS=0.0.0.0
|
||||||
|
PORT=12345
|
37
contrib/libreddit.service
Normal file
37
contrib/libreddit.service
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=libreddit daemon
|
||||||
|
After=network.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
DynamicUser=yes
|
||||||
|
# Default Values
|
||||||
|
Environment=ADDRESS=0.0.0.0
|
||||||
|
Environment=PORT=8080
|
||||||
|
# Optional Override
|
||||||
|
EnvironmentFile=-/etc/libreddit.conf
|
||||||
|
ExecStart=/usr/bin/libreddit -a ${ADDRESS} -p ${PORT}
|
||||||
|
|
||||||
|
# Hardening
|
||||||
|
DeviceAllow=
|
||||||
|
LockPersonality=yes
|
||||||
|
MemoryDenyWriteExecute=yes
|
||||||
|
PrivateDevices=yes
|
||||||
|
ProcSubset=pid
|
||||||
|
ProtectClock=yes
|
||||||
|
ProtectControlGroups=yes
|
||||||
|
ProtectHome=yes
|
||||||
|
ProtectHostname=yes
|
||||||
|
ProtectKernelLogs=yes
|
||||||
|
ProtectKernelModules=yes
|
||||||
|
ProtectKernelTunables=yes
|
||||||
|
ProtectProc=invisible
|
||||||
|
RestrictAddressFamilies=AF_INET AF_INET6
|
||||||
|
RestrictNamespaces=yes
|
||||||
|
RestrictRealtime=yes
|
||||||
|
RestrictSUIDSGID=yes
|
||||||
|
SystemCallArchitectures=native
|
||||||
|
SystemCallFilter=@system-service ~@privileged ~@resources
|
||||||
|
UMask=0077
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=default.target
|
13
docker-compose.yml
Normal file
13
docker-compose.yml
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
build: .
|
||||||
|
restart: always
|
||||||
|
container_name: "libreddit"
|
||||||
|
ports:
|
||||||
|
- 8080:8080
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "--spider", "-q", "--tries=1", "http://localhost:8080/settings"]
|
||||||
|
interval: 5m
|
||||||
|
timeout: 3s
|
3
heroku.yml
Normal file
3
heroku.yml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
build:
|
||||||
|
docker:
|
||||||
|
web: Dockerfile
|
15
scripts/gen-credits.sh
Executable file
15
scripts/gen-credits.sh
Executable file
@ -0,0 +1,15 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# This scripts generates the CREDITS file in the repository root, which
|
||||||
|
# contains a list of all contributors ot the Libreddit project.
|
||||||
|
#
|
||||||
|
# We use git-log to surface the names and emails of all authors and committers,
|
||||||
|
# and grep will filter any automated commits due to GitHub.
|
||||||
|
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")/../" || exit 1
|
||||||
|
git --no-pager log --pretty='%an <%ae>%n%cn <%ce>' master \
|
||||||
|
| sort -t'<' -u -k1,1 -k2,2 \
|
||||||
|
| grep -Fv -- 'GitHub <noreply@github.com>' \
|
||||||
|
> CREDITS
|
215
src/client.rs
215
src/client.rs
@ -1,33 +1,90 @@
|
|||||||
use cached::proc_macro::cached;
|
use cached::proc_macro::cached;
|
||||||
use futures_lite::{future::Boxed, FutureExt};
|
use futures_lite::{future::Boxed, FutureExt};
|
||||||
use hyper::{body::Buf, client, Body, Request, Response, Uri};
|
use hyper::{body, body::Buf, client, header, Body, Method, Request, Response, Uri};
|
||||||
|
use libflate::gzip;
|
||||||
|
use percent_encoding::{percent_encode, CONTROLS};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::{result::Result, str::FromStr};
|
use std::{io, result::Result};
|
||||||
|
|
||||||
|
use crate::dbg_msg;
|
||||||
use crate::server::RequestExt;
|
use crate::server::RequestExt;
|
||||||
|
|
||||||
pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, String> {
|
const REDDIT_URL_BASE: &str = "https://www.reddit.com";
|
||||||
let mut url = format.to_string();
|
|
||||||
|
|
||||||
|
/// Gets the canonical path for a resource on Reddit. This is accomplished by
|
||||||
|
/// making a `HEAD` request to Reddit at the path given in `path`.
|
||||||
|
///
|
||||||
|
/// This function returns `Ok(Some(path))`, where `path`'s value is identical
|
||||||
|
/// to that of the value of the argument `path`, if Reddit responds to our
|
||||||
|
/// `HEAD` request with a 2xx-family HTTP code. It will also return an
|
||||||
|
/// `Ok(Some(String))` if Reddit responds to our `HEAD` request with a
|
||||||
|
/// `Location` header in the response, and the HTTP code is in the 3xx-family;
|
||||||
|
/// the `String` will contain the path as reported in `Location`. The return
|
||||||
|
/// value is `Ok(None)` if Reddit responded with a 3xx, but did not provide a
|
||||||
|
/// `Location` header. An `Err(String)` is returned if Reddit responds with a
|
||||||
|
/// 429, or if we were unable to decode the value in the `Location` header.
|
||||||
|
#[cached(size = 1024, time = 600, result = true)]
|
||||||
|
pub async fn canonical_path(path: String) -> Result<Option<String>, String> {
|
||||||
|
let res = reddit_head(path.clone(), true).await?;
|
||||||
|
|
||||||
|
if res.status() == 429 {
|
||||||
|
return Err("Too many requests.".to_string());
|
||||||
|
};
|
||||||
|
|
||||||
|
// If Reddit responds with a 2xx, then the path is already canonical.
|
||||||
|
if res.status().to_string().starts_with('2') {
|
||||||
|
return Ok(Some(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
// If Reddit responds with anything other than 3xx (except for the 2xx as
|
||||||
|
// above), return a None.
|
||||||
|
if !res.status().to_string().starts_with('3') {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(
|
||||||
|
res
|
||||||
|
.headers()
|
||||||
|
.get(header::LOCATION)
|
||||||
|
.map(|val| percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, String> {
|
||||||
|
let mut url = format!("{}?{}", format, req.uri().query().unwrap_or_default());
|
||||||
|
|
||||||
|
// For each parameter in request
|
||||||
for (name, value) in req.params().iter() {
|
for (name, value) in req.params().iter() {
|
||||||
|
// Fill the parameter value in the url
|
||||||
url = url.replace(&format!("{{{}}}", name), value);
|
url = url.replace(&format!("{{{}}}", name), value);
|
||||||
}
|
}
|
||||||
|
|
||||||
stream(&url).await
|
stream(&url, &req).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn stream(url: &str) -> Result<Response<Body>, String> {
|
async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String> {
|
||||||
// First parameter is target URL (mandatory).
|
// First parameter is target URL (mandatory).
|
||||||
let url = Uri::from_str(url).map_err(|_| "Couldn't parse URL".to_string())?;
|
let uri = url.parse::<Uri>().map_err(|_| "Couldn't parse URL".to_string())?;
|
||||||
|
|
||||||
// Prepare the HTTPS connector.
|
// Prepare the HTTPS connector.
|
||||||
let https = hyper_rustls::HttpsConnector::with_native_roots();
|
let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http1().build();
|
||||||
|
|
||||||
// Build the hyper client from the HTTPS connector.
|
// Build the hyper client from the HTTPS connector.
|
||||||
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
|
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
|
||||||
|
|
||||||
|
let mut builder = Request::get(uri);
|
||||||
|
|
||||||
|
// Copy useful headers from original request
|
||||||
|
for &key in &["Range", "If-Modified-Since", "Cache-Control"] {
|
||||||
|
if let Some(value) = req.headers().get(key) {
|
||||||
|
builder = builder.header(key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let stream_request = builder.body(Body::empty()).map_err(|_| "Couldn't build empty body in stream".to_string())?;
|
||||||
|
|
||||||
client
|
client
|
||||||
.get(url)
|
.request(stream_request)
|
||||||
.await
|
.await
|
||||||
.map(|mut res| {
|
.map(|mut res| {
|
||||||
let mut rm = |key: &str| res.headers_mut().remove(key);
|
let mut rm = |key: &str| res.headers_mut().remove(key);
|
||||||
@ -40,48 +97,143 @@ async fn stream(url: &str) -> Result<Response<Body>, String> {
|
|||||||
rm("x-cdn-client-region");
|
rm("x-cdn-client-region");
|
||||||
rm("x-cdn-name");
|
rm("x-cdn-name");
|
||||||
rm("x-cdn-server-region");
|
rm("x-cdn-server-region");
|
||||||
|
rm("x-reddit-cdn");
|
||||||
|
rm("x-reddit-video-features");
|
||||||
|
|
||||||
res
|
res
|
||||||
})
|
})
|
||||||
.map_err(|e| e.to_string())
|
.map_err(|e| e.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn request(url: String) -> Boxed<Result<Response<Body>, String>> {
|
/// Makes a GET request to Reddit at `path`. By default, this will honor HTTP
|
||||||
// Prepare the HTTPS connector.
|
/// 3xx codes Reddit returns and will automatically redirect.
|
||||||
let https = hyper_rustls::HttpsConnector::with_native_roots();
|
fn reddit_get(path: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||||
|
request(&Method::GET, path, true, quarantine)
|
||||||
|
}
|
||||||
|
|
||||||
// Build the hyper client from the HTTPS connector.
|
/// Makes a HEAD request to Reddit at `path`. This will not follow redirects.
|
||||||
|
fn reddit_head(path: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||||
|
request(&Method::HEAD, path, false, quarantine)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Makes a request to Reddit. If `redirect` is `true`, request_with_redirect
|
||||||
|
/// will recurse on the URL that Reddit provides in the Location HTTP header
|
||||||
|
/// in its response.
|
||||||
|
fn request(method: &'static Method, path: String, redirect: bool, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||||
|
// Build Reddit URL from path.
|
||||||
|
let url = format!("{}{}", REDDIT_URL_BASE, path);
|
||||||
|
|
||||||
|
// Prepare the HTTPS connector.
|
||||||
|
let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_or_http().enable_http1().build();
|
||||||
|
|
||||||
|
// Construct the hyper client from the HTTPS connector.
|
||||||
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
|
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
|
||||||
|
|
||||||
|
// Build request to Reddit. When making a GET, request gzip compression.
|
||||||
|
// (Reddit doesn't do brotli yet.)
|
||||||
let builder = Request::builder()
|
let builder = Request::builder()
|
||||||
.method("GET")
|
.method(method)
|
||||||
.uri(&url)
|
.uri(&url)
|
||||||
.header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION")))
|
.header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION")))
|
||||||
.header("Host", "www.reddit.com")
|
.header("Host", "www.reddit.com")
|
||||||
.header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
|
.header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
|
||||||
|
.header("Accept-Encoding", if method == Method::GET { "gzip" } else { "identity" })
|
||||||
.header("Accept-Language", "en-US,en;q=0.5")
|
.header("Accept-Language", "en-US,en;q=0.5")
|
||||||
.header("Connection", "keep-alive")
|
.header("Connection", "keep-alive")
|
||||||
|
.header("Cookie", if quarantine { "_options=%7B%22pref_quarantine_optin%22%3A%20true%7D" } else { "" })
|
||||||
.body(Body::empty());
|
.body(Body::empty());
|
||||||
|
|
||||||
async move {
|
async move {
|
||||||
match builder {
|
match builder {
|
||||||
Ok(req) => match client.request(req).await {
|
Ok(req) => match client.request(req).await {
|
||||||
Ok(response) => {
|
Ok(mut response) => {
|
||||||
|
// Reddit may respond with a 3xx. Decide whether or not to
|
||||||
|
// redirect based on caller params.
|
||||||
if response.status().to_string().starts_with('3') {
|
if response.status().to_string().starts_with('3') {
|
||||||
request(
|
if !redirect {
|
||||||
|
return Ok(response);
|
||||||
|
};
|
||||||
|
|
||||||
|
return request(
|
||||||
|
method,
|
||||||
response
|
response
|
||||||
.headers()
|
.headers()
|
||||||
.get("Location")
|
.get(header::LOCATION)
|
||||||
.map(|val| val.to_str().unwrap_or_default())
|
.map(|val| {
|
||||||
|
// We need to make adjustments to the URI
|
||||||
|
// we get back from Reddit. Namely, we
|
||||||
|
// must:
|
||||||
|
//
|
||||||
|
// 1. Remove the authority (e.g.
|
||||||
|
// https://www.reddit.com) that may be
|
||||||
|
// present, so that we recurse on the
|
||||||
|
// path (and query parameters) as
|
||||||
|
// required.
|
||||||
|
//
|
||||||
|
// 2. Percent-encode the path.
|
||||||
|
let new_path = percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string();
|
||||||
|
format!("{}{}raw_json=1", new_path, if new_path.contains('?') { "&" } else { "?" })
|
||||||
|
})
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.to_string(),
|
.to_string(),
|
||||||
|
true,
|
||||||
|
quarantine,
|
||||||
)
|
)
|
||||||
.await
|
.await;
|
||||||
} else {
|
};
|
||||||
Ok(response)
|
|
||||||
|
match response.headers().get(header::CONTENT_ENCODING) {
|
||||||
|
// Content not compressed.
|
||||||
|
None => Ok(response),
|
||||||
|
|
||||||
|
// Content encoded (hopefully with gzip).
|
||||||
|
Some(hdr) => {
|
||||||
|
match hdr.to_str() {
|
||||||
|
Ok(val) => match val {
|
||||||
|
"gzip" => {}
|
||||||
|
"identity" => return Ok(response),
|
||||||
|
_ => return Err("Reddit response was encoded with an unsupported compressor".to_string()),
|
||||||
|
},
|
||||||
|
Err(_) => return Err("Reddit response was invalid".to_string()),
|
||||||
|
}
|
||||||
|
|
||||||
|
// We get here if the body is gzip-compressed.
|
||||||
|
|
||||||
|
// The body must be something that implements
|
||||||
|
// std::io::Read, hence the conversion to
|
||||||
|
// bytes::buf::Buf and then transformation into a
|
||||||
|
// Reader.
|
||||||
|
let mut decompressed: Vec<u8>;
|
||||||
|
{
|
||||||
|
let mut aggregated_body = match body::aggregate(response.body_mut()).await {
|
||||||
|
Ok(b) => b.reader(),
|
||||||
|
Err(e) => return Err(e.to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut decoder = match gzip::Decoder::new(&mut aggregated_body) {
|
||||||
|
Ok(decoder) => decoder,
|
||||||
|
Err(e) => return Err(e.to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
decompressed = Vec::<u8>::new();
|
||||||
|
if let Err(e) = io::copy(&mut decoder, &mut decompressed) {
|
||||||
|
return Err(e.to_string());
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
response.headers_mut().remove(header::CONTENT_ENCODING);
|
||||||
|
response.headers_mut().insert(header::CONTENT_LENGTH, decompressed.len().into());
|
||||||
|
*(response.body_mut()) = Body::from(decompressed);
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e.to_string()),
|
Err(e) => {
|
||||||
|
dbg_msg!("{} {}: {}", method, path, e);
|
||||||
|
|
||||||
|
Err(e.to_string())
|
||||||
|
}
|
||||||
},
|
},
|
||||||
Err(_) => Err("Post url contains non-ASCII characters".to_string()),
|
Err(_) => Err("Post url contains non-ASCII characters".to_string()),
|
||||||
}
|
}
|
||||||
@ -91,10 +243,7 @@ fn request(url: String) -> Boxed<Result<Response<Body>, String>> {
|
|||||||
|
|
||||||
// Make a request to a Reddit API and parse the JSON response
|
// Make a request to a Reddit API and parse the JSON response
|
||||||
#[cached(size = 100, time = 30, result = true)]
|
#[cached(size = 100, time = 30, result = true)]
|
||||||
pub async fn json(path: String) -> Result<Value, String> {
|
pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
||||||
// Build Reddit url from path
|
|
||||||
let url = format!("https://www.reddit.com{}", path);
|
|
||||||
|
|
||||||
// Closure to quickly build errors
|
// Closure to quickly build errors
|
||||||
let err = |msg: &str, e: String| -> Result<Value, String> {
|
let err = |msg: &str, e: String| -> Result<Value, String> {
|
||||||
// eprintln!("{} - {}: {}", url, msg, e);
|
// eprintln!("{} - {}: {}", url, msg, e);
|
||||||
@ -102,8 +251,10 @@ pub async fn json(path: String) -> Result<Value, String> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Fetch the url...
|
// Fetch the url...
|
||||||
match request(url.clone()).await {
|
match reddit_get(path.clone(), quarantine).await {
|
||||||
Ok(response) => {
|
Ok(response) => {
|
||||||
|
let status = response.status();
|
||||||
|
|
||||||
// asynchronously aggregate the chunks of the body
|
// asynchronously aggregate the chunks of the body
|
||||||
match hyper::body::aggregate(response).await {
|
match hyper::body::aggregate(response).await {
|
||||||
Ok(body) => {
|
Ok(body) => {
|
||||||
@ -118,7 +269,7 @@ pub async fn json(path: String) -> Result<Value, String> {
|
|||||||
.as_str()
|
.as_str()
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
json["message"].as_str().unwrap_or_else(|| {
|
json["message"].as_str().unwrap_or_else(|| {
|
||||||
eprintln!("{} - Error parsing reddit error", url);
|
eprintln!("{}{} - Error parsing reddit error", REDDIT_URL_BASE, path);
|
||||||
"Error parsing reddit error"
|
"Error parsing reddit error"
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -128,7 +279,13 @@ pub async fn json(path: String) -> Result<Value, String> {
|
|||||||
Ok(json)
|
Ok(json)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => err("Failed to parse page JSON data", e.to_string()),
|
Err(e) => {
|
||||||
|
if status.is_server_error() {
|
||||||
|
Err("Reddit is having issues, check if there's an outage".to_string())
|
||||||
|
} else {
|
||||||
|
err("Failed to parse page JSON data", e.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => err("Failed receiving body from Reddit", e.to_string()),
|
Err(e) => err("Failed receiving body from Reddit", e.to_string()),
|
||||||
|
164
src/main.rs
164
src/main.rs
@ -1,14 +1,6 @@
|
|||||||
// Global specifiers
|
// Global specifiers
|
||||||
#![forbid(unsafe_code)]
|
#![forbid(unsafe_code)]
|
||||||
#![warn(clippy::pedantic, clippy::all)]
|
#![allow(clippy::cmp_owned)]
|
||||||
#![allow(
|
|
||||||
clippy::needless_pass_by_value,
|
|
||||||
clippy::match_wildcard_for_single_variants,
|
|
||||||
clippy::cast_possible_truncation,
|
|
||||||
clippy::similar_names,
|
|
||||||
clippy::cast_possible_wrap,
|
|
||||||
clippy::find_map
|
|
||||||
)]
|
|
||||||
|
|
||||||
// Reference local files
|
// Reference local files
|
||||||
mod post;
|
mod post;
|
||||||
@ -19,15 +11,15 @@ mod user;
|
|||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
// Import Crates
|
// Import Crates
|
||||||
use clap::{App as cli, Arg};
|
use clap::{Arg, Command};
|
||||||
|
|
||||||
use futures_lite::FutureExt;
|
use futures_lite::FutureExt;
|
||||||
use hyper::{header::HeaderValue, Body, Request, Response};
|
use hyper::{header::HeaderValue, Body, Request, Response};
|
||||||
|
|
||||||
mod client;
|
mod client;
|
||||||
use client::proxy;
|
use client::{canonical_path, proxy};
|
||||||
use server::RequestExt;
|
use server::RequestExt;
|
||||||
use utils::{error, redirect};
|
use utils::{error, redirect, ThemeAssets};
|
||||||
|
|
||||||
mod server;
|
mod server;
|
||||||
|
|
||||||
@ -66,6 +58,17 @@ async fn favicon() -> Result<Response<Body>, String> {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn font() -> Result<Response<Body>, String> {
|
||||||
|
Ok(
|
||||||
|
Response::builder()
|
||||||
|
.status(200)
|
||||||
|
.header("content-type", "font/woff2")
|
||||||
|
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||||
|
.body(include_bytes!("../static/Inter.var.woff2").as_ref().into())
|
||||||
|
.unwrap_or_default(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
async fn resource(body: &str, content_type: &str, cache: bool) -> Result<Response<Body>, String> {
|
async fn resource(body: &str, content_type: &str, cache: bool) -> Result<Response<Body>, String> {
|
||||||
let mut res = Response::builder()
|
let mut res = Response::builder()
|
||||||
.status(200)
|
.status(200)
|
||||||
@ -82,52 +85,69 @@ async fn resource(body: &str, content_type: &str, cache: bool) -> Result<Respons
|
|||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn style() -> Result<Response<Body>, String> {
|
||||||
|
let mut res = include_str!("../static/style.css").to_string();
|
||||||
|
for file in ThemeAssets::iter() {
|
||||||
|
res.push('\n');
|
||||||
|
let theme = ThemeAssets::get(file.as_ref()).unwrap();
|
||||||
|
res.push_str(std::str::from_utf8(theme.data.as_ref()).unwrap());
|
||||||
|
}
|
||||||
|
Ok(
|
||||||
|
Response::builder()
|
||||||
|
.status(200)
|
||||||
|
.header("content-type", "text/css")
|
||||||
|
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||||
|
.body(res.to_string().into())
|
||||||
|
.unwrap_or_default(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
let matches = cli::new("Libreddit")
|
let matches = Command::new("Libreddit")
|
||||||
.version(env!("CARGO_PKG_VERSION"))
|
.version(env!("CARGO_PKG_VERSION"))
|
||||||
.about("Private front-end for Reddit written in Rust ")
|
.about("Private front-end for Reddit written in Rust ")
|
||||||
.arg(
|
.arg(
|
||||||
Arg::with_name("address")
|
Arg::new("redirect-https")
|
||||||
.short("a")
|
.short('r')
|
||||||
|
.long("redirect-https")
|
||||||
|
.help("Redirect all HTTP requests to HTTPS (no longer functional)")
|
||||||
|
.num_args(0),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("address")
|
||||||
|
.short('a')
|
||||||
.long("address")
|
.long("address")
|
||||||
.value_name("ADDRESS")
|
.value_name("ADDRESS")
|
||||||
.help("Sets address to listen on")
|
.help("Sets address to listen on")
|
||||||
.default_value("0.0.0.0")
|
.default_value("0.0.0.0")
|
||||||
.takes_value(true),
|
.num_args(1),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::with_name("port")
|
Arg::new("port")
|
||||||
.short("p")
|
.short('p')
|
||||||
.long("port")
|
.long("port")
|
||||||
.value_name("PORT")
|
.value_name("PORT")
|
||||||
.help("Port to listen on")
|
.help("Port to listen on")
|
||||||
.default_value("8080")
|
.default_value("8080")
|
||||||
.takes_value(true),
|
.num_args(1),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::with_name("redirect-https")
|
Arg::new("hsts")
|
||||||
.short("r")
|
.short('H')
|
||||||
.long("redirect-https")
|
|
||||||
.help("Redirect all HTTP requests to HTTPS (no longer functional)")
|
|
||||||
.takes_value(false),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("hsts")
|
|
||||||
.short("H")
|
|
||||||
.long("hsts")
|
.long("hsts")
|
||||||
.value_name("EXPIRE_TIME")
|
.value_name("EXPIRE_TIME")
|
||||||
.help("HSTS header to tell browsers that this site should only be accessed over HTTPS")
|
.help("HSTS header to tell browsers that this site should only be accessed over HTTPS")
|
||||||
.default_value("604800")
|
.default_value("604800")
|
||||||
.takes_value(true),
|
.num_args(1),
|
||||||
)
|
)
|
||||||
.get_matches();
|
.get_matches();
|
||||||
|
|
||||||
let address = matches.value_of("address").unwrap_or("0.0.0.0");
|
let address = matches.get_one("address").map(|m: &String| m.as_str()).unwrap_or("0.0.0.0");
|
||||||
let port = matches.value_of("port").unwrap_or("8080");
|
let port = std::env::var("PORT").unwrap_or_else(|_| matches.get_one("port").map(|m: &String| m.as_str()).unwrap_or("8080").to_string());
|
||||||
let hsts = matches.value_of("hsts");
|
let hsts = matches.get_one("hsts").map(|m: &String| m.as_str());
|
||||||
|
|
||||||
let listener = format!("{}:{}", address, port);
|
let listener = [address, ":", &port].concat();
|
||||||
|
|
||||||
println!("Starting Libreddit...");
|
println!("Starting Libreddit...");
|
||||||
|
|
||||||
@ -139,7 +159,7 @@ async fn main() {
|
|||||||
"Referrer-Policy" => "no-referrer",
|
"Referrer-Policy" => "no-referrer",
|
||||||
"X-Content-Type-Options" => "nosniff",
|
"X-Content-Type-Options" => "nosniff",
|
||||||
"X-Frame-Options" => "DENY",
|
"X-Frame-Options" => "DENY",
|
||||||
"Content-Security-Policy" => "default-src 'none'; manifest-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none';"
|
"Content-Security-Policy" => "default-src 'none'; font-src 'self'; script-src 'self' blob:; manifest-src 'self'; media-src 'self' data: blob: about:; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none'; connect-src 'self'; worker-src blob:;"
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(expire_time) = hsts {
|
if let Some(expire_time) = hsts {
|
||||||
@ -149,22 +169,35 @@ async fn main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read static files
|
// Read static files
|
||||||
app.at("/style.css").get(|_| resource(include_str!("../static/style.css"), "text/css", false).boxed());
|
app.at("/style.css").get(|_| style().boxed());
|
||||||
app
|
app
|
||||||
.at("/manifest.json")
|
.at("/manifest.json")
|
||||||
.get(|_| resource(include_str!("../static/manifest.json"), "application/json", false).boxed());
|
.get(|_| resource(include_str!("../static/manifest.json"), "application/json", false).boxed());
|
||||||
app.at("/robots.txt").get(|_| resource("User-agent: *\nAllow: /", "text/plain", true).boxed());
|
app
|
||||||
|
.at("/robots.txt")
|
||||||
|
.get(|_| resource("User-agent: *\nDisallow: /u/\nDisallow: /user/", "text/plain", true).boxed());
|
||||||
app.at("/favicon.ico").get(|_| favicon().boxed());
|
app.at("/favicon.ico").get(|_| favicon().boxed());
|
||||||
app.at("/logo.png").get(|_| pwa_logo().boxed());
|
app.at("/logo.png").get(|_| pwa_logo().boxed());
|
||||||
|
app.at("/Inter.var.woff2").get(|_| font().boxed());
|
||||||
app.at("/touch-icon-iphone.png").get(|_| iphone_logo().boxed());
|
app.at("/touch-icon-iphone.png").get(|_| iphone_logo().boxed());
|
||||||
app.at("/apple-touch-icon.png").get(|_| iphone_logo().boxed());
|
app.at("/apple-touch-icon.png").get(|_| iphone_logo().boxed());
|
||||||
|
app
|
||||||
|
.at("/playHLSVideo.js")
|
||||||
|
.get(|_| resource(include_str!("../static/playHLSVideo.js"), "text/javascript", false).boxed());
|
||||||
|
app
|
||||||
|
.at("/hls.min.js")
|
||||||
|
.get(|_| resource(include_str!("../static/hls.min.js"), "text/javascript", false).boxed());
|
||||||
|
|
||||||
// Proxy media through Libreddit
|
// Proxy media through Libreddit
|
||||||
app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed());
|
app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed());
|
||||||
app.at("/img/:id").get(|r| proxy(r, "https://i.redd.it/{id}").boxed());
|
app.at("/hls/:id/*path").get(|r| proxy(r, "https://v.redd.it/{id}/{path}").boxed());
|
||||||
|
app.at("/img/*path").get(|r| proxy(r, "https://i.redd.it/{path}").boxed());
|
||||||
app.at("/thumb/:point/:id").get(|r| proxy(r, "https://{point}.thumbs.redditmedia.com/{id}").boxed());
|
app.at("/thumb/:point/:id").get(|r| proxy(r, "https://{point}.thumbs.redditmedia.com/{id}").boxed());
|
||||||
app.at("/emoji/:id/:name").get(|r| proxy(r, "https://emoji.redditmedia.com/{id}/{name}").boxed());
|
app.at("/emoji/:id/:name").get(|r| proxy(r, "https://emoji.redditmedia.com/{id}/{name}").boxed());
|
||||||
app.at("/preview/:loc/:id/:query").get(|r| proxy(r, "https://{loc}view.redd.it/{id}?{query}").boxed());
|
app
|
||||||
|
.at("/preview/:loc/award_images/:fullname/:id")
|
||||||
|
.get(|r| proxy(r, "https://{loc}view.redd.it/award_images/{fullname}/{id}").boxed());
|
||||||
|
app.at("/preview/:loc/:id").get(|r| proxy(r, "https://{loc}view.redd.it/{id}").boxed());
|
||||||
app.at("/style/*path").get(|r| proxy(r, "https://styles.redditmedia.com/{path}").boxed());
|
app.at("/style/*path").get(|r| proxy(r, "https://styles.redditmedia.com/{path}").boxed());
|
||||||
app.at("/static/*path").get(|r| proxy(r, "https://www.redditstatic.com/{path}").boxed());
|
app.at("/static/*path").get(|r| proxy(r, "https://www.redditstatic.com/{path}").boxed());
|
||||||
|
|
||||||
@ -177,6 +210,7 @@ async fn main() {
|
|||||||
|
|
||||||
app.at("/user/[deleted]").get(|req| error(req, "User has deleted their account".to_string()).boxed());
|
app.at("/user/[deleted]").get(|req| error(req, "User has deleted their account".to_string()).boxed());
|
||||||
app.at("/user/:name").get(|r| user::profile(r).boxed());
|
app.at("/user/:name").get(|r| user::profile(r).boxed());
|
||||||
|
app.at("/user/:name/:listing").get(|r| user::profile(r).boxed());
|
||||||
app.at("/user/:name/comments/:id").get(|r| post::item(r).boxed());
|
app.at("/user/:name/comments/:id").get(|r| post::item(r).boxed());
|
||||||
app.at("/user/:name/comments/:id/:title").get(|r| post::item(r).boxed());
|
app.at("/user/:name/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||||
app.at("/user/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
app.at("/user/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||||
@ -184,20 +218,31 @@ async fn main() {
|
|||||||
// Configure settings
|
// Configure settings
|
||||||
app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed());
|
app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed());
|
||||||
app.at("/settings/restore").get(|r| settings::restore(r).boxed());
|
app.at("/settings/restore").get(|r| settings::restore(r).boxed());
|
||||||
|
app.at("/settings/update").get(|r| settings::update(r).boxed());
|
||||||
|
|
||||||
// Subreddit services
|
// Subreddit services
|
||||||
app.at("/r/:sub").get(|r| subreddit::community(r).boxed());
|
app
|
||||||
|
.at("/r/:sub")
|
||||||
|
.get(|r| subreddit::community(r).boxed())
|
||||||
|
.post(|r| subreddit::add_quarantine_exception(r).boxed());
|
||||||
|
|
||||||
app
|
app
|
||||||
.at("/r/u_:name")
|
.at("/r/u_:name")
|
||||||
.get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
|
.get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
|
||||||
|
|
||||||
app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions(r).boxed());
|
app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions_filters(r).boxed());
|
||||||
app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions(r).boxed());
|
app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions_filters(r).boxed());
|
||||||
|
app.at("/r/:sub/filter").post(|r| subreddit::subscriptions_filters(r).boxed());
|
||||||
|
app.at("/r/:sub/unfilter").post(|r| subreddit::subscriptions_filters(r).boxed());
|
||||||
|
|
||||||
app.at("/r/:sub/comments/:id").get(|r| post::item(r).boxed());
|
app.at("/r/:sub/comments/:id").get(|r| post::item(r).boxed());
|
||||||
app.at("/r/:sub/comments/:id/:title").get(|r| post::item(r).boxed());
|
app.at("/r/:sub/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||||
app.at("/r/:sub/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
app.at("/r/:sub/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||||
|
app.at("/comments/:id").get(|r| post::item(r).boxed());
|
||||||
|
app.at("/comments/:id/comments").get(|r| post::item(r).boxed());
|
||||||
|
app.at("/comments/:id/comments/:comment_id").get(|r| post::item(r).boxed());
|
||||||
|
app.at("/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||||
|
app.at("/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||||
|
|
||||||
app.at("/r/:sub/search").get(|r| search::find(r).boxed());
|
app.at("/r/:sub/search").get(|r| search::find(r).boxed());
|
||||||
|
|
||||||
@ -205,28 +250,25 @@ async fn main() {
|
|||||||
.at("/r/:sub/w")
|
.at("/r/:sub/w")
|
||||||
.get(|r| async move { Ok(redirect(format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed());
|
.get(|r| async move { Ok(redirect(format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed());
|
||||||
app
|
app
|
||||||
.at("/r/:sub/w/:page")
|
.at("/r/:sub/w/*page")
|
||||||
.get(|r| async move { Ok(redirect(format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed());
|
.get(|r| async move { Ok(redirect(format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed());
|
||||||
app.at("/r/:sub/wiki").get(|r| subreddit::wiki(r).boxed());
|
app.at("/r/:sub/wiki").get(|r| subreddit::wiki(r).boxed());
|
||||||
app.at("/r/:sub/wiki/:page").get(|r| subreddit::wiki(r).boxed());
|
app.at("/r/:sub/wiki/*page").get(|r| subreddit::wiki(r).boxed());
|
||||||
|
|
||||||
app.at("/r/:sub/about/sidebar").get(|r| subreddit::sidebar(r).boxed());
|
app.at("/r/:sub/about/sidebar").get(|r| subreddit::sidebar(r).boxed());
|
||||||
|
|
||||||
app.at("/r/:sub/:sort").get(|r| subreddit::community(r).boxed());
|
app.at("/r/:sub/:sort").get(|r| subreddit::community(r).boxed());
|
||||||
|
|
||||||
// Comments handler
|
|
||||||
app.at("/comments/:id").get(|r| post::item(r).boxed());
|
|
||||||
|
|
||||||
// Front page
|
// Front page
|
||||||
app.at("/").get(|r| subreddit::community(r).boxed());
|
app.at("/").get(|r| subreddit::community(r).boxed());
|
||||||
|
|
||||||
// View Reddit wiki
|
// View Reddit wiki
|
||||||
app.at("/w").get(|_| async { Ok(redirect("/wiki".to_string())) }.boxed());
|
app.at("/w").get(|_| async { Ok(redirect("/wiki".to_string())) }.boxed());
|
||||||
app
|
app
|
||||||
.at("/w/:page")
|
.at("/w/*page")
|
||||||
.get(|r| async move { Ok(redirect(format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed());
|
.get(|r| async move { Ok(redirect(format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed());
|
||||||
app.at("/wiki").get(|r| subreddit::wiki(r).boxed());
|
app.at("/wiki").get(|r| subreddit::wiki(r).boxed());
|
||||||
app.at("/wiki/:page").get(|r| subreddit::wiki(r).boxed());
|
app.at("/wiki/*page").get(|r| subreddit::wiki(r).boxed());
|
||||||
|
|
||||||
// Search all of Reddit
|
// Search all of Reddit
|
||||||
app.at("/search").get(|r| search::find(r).boxed());
|
app.at("/search").get(|r| search::find(r).boxed());
|
||||||
@ -234,13 +276,25 @@ async fn main() {
|
|||||||
// Handle about pages
|
// Handle about pages
|
||||||
app.at("/about").get(|req| error(req, "About pages aren't added yet".to_string()).boxed());
|
app.at("/about").get(|req| error(req, "About pages aren't added yet".to_string()).boxed());
|
||||||
|
|
||||||
app.at("/:id").get(|req: Request<Body>| match req.param("id").as_deref() {
|
app.at("/:id").get(|req: Request<Body>| {
|
||||||
// Sort front page
|
Box::pin(async move {
|
||||||
Some("best") | Some("hot") | Some("new") | Some("top") | Some("rising") | Some("controversial") => subreddit::community(req).boxed(),
|
match req.param("id").as_deref() {
|
||||||
// Short link for post
|
// Sort front page
|
||||||
Some(id) if id.len() > 4 && id.len() < 7 => post::item(req).boxed(),
|
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await,
|
||||||
// Error message for unknown pages
|
|
||||||
_ => error(req, "Nothing here".to_string()).boxed(),
|
// Short link for post
|
||||||
|
Some(id) if (5..7).contains(&id.len()) => match canonical_path(format!("/{}", id)).await {
|
||||||
|
Ok(path_opt) => match path_opt {
|
||||||
|
Some(path) => Ok(redirect(path)),
|
||||||
|
None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,
|
||||||
|
},
|
||||||
|
Err(e) => error(req, e).await,
|
||||||
|
},
|
||||||
|
|
||||||
|
// Error message for unknown pages
|
||||||
|
_ => error(req, "Nothing here".to_string()).await,
|
||||||
|
}
|
||||||
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
// Default service in case no routes match
|
// Default service in case no routes match
|
||||||
|
205
src/post.rs
205
src/post.rs
@ -1,40 +1,46 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
use crate::client::json;
|
use crate::client::json;
|
||||||
use crate::esc;
|
|
||||||
use crate::server::RequestExt;
|
use crate::server::RequestExt;
|
||||||
use crate::utils::{cookie, error, format_num, format_url, param, rewrite_urls, template, time, val, Author, Comment, Flags, Flair, FlairPart, Media, Post, Preferences};
|
use crate::subreddit::{can_access_quarantine, quarantine};
|
||||||
|
use crate::utils::{
|
||||||
|
error, format_num, format_url, get_filters, param, rewrite_urls, setting, template, time, val, Author, Awards, Comment, Flags, Flair, FlairPart, Media, Post, Preferences,
|
||||||
|
};
|
||||||
use hyper::{Body, Request, Response};
|
use hyper::{Body, Request, Response};
|
||||||
|
|
||||||
use async_recursion::async_recursion;
|
|
||||||
|
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
// STRUCTS
|
// STRUCTS
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(path = "post.html", escape = "none")]
|
#[template(path = "post.html")]
|
||||||
struct PostTemplate {
|
struct PostTemplate {
|
||||||
comments: Vec<Comment>,
|
comments: Vec<Comment>,
|
||||||
post: Post,
|
post: Post,
|
||||||
sort: String,
|
sort: String,
|
||||||
prefs: Preferences,
|
prefs: Preferences,
|
||||||
single_thread: bool,
|
single_thread: bool,
|
||||||
|
url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
// Build Reddit API path
|
// Build Reddit API path
|
||||||
let mut path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default());
|
let mut path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default());
|
||||||
|
let sub = req.param("sub").unwrap_or_default();
|
||||||
|
let quarantined = can_access_quarantine(&req, &sub);
|
||||||
|
|
||||||
// Set sort to sort query parameter
|
// Set sort to sort query parameter
|
||||||
let mut sort: String = param(&path, "sort");
|
let sort = param(&path, "sort").unwrap_or_else(|| {
|
||||||
|
// Grab default comment sort method from Cookies
|
||||||
|
let default_sort = setting(&req, "comment_sort");
|
||||||
|
|
||||||
// Grab default comment sort method from Cookies
|
// If there's no sort query but there's a default sort, set sort to default_sort
|
||||||
let default_sort = cookie(&req, "comment_sort");
|
if default_sort.is_empty() {
|
||||||
|
String::new()
|
||||||
// If there's no sort query but there's a default sort, set sort to default_sort
|
} else {
|
||||||
if sort.is_empty() && !default_sort.is_empty() {
|
path = format!("{}.json?{}&sort={}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), default_sort);
|
||||||
sort = default_sort;
|
default_sort
|
||||||
path = format!("{}.json?{}&sort={}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), sort);
|
}
|
||||||
}
|
});
|
||||||
|
|
||||||
// Log the post ID being fetched in debug mode
|
// Log the post ID being fetched in debug mode
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
@ -44,12 +50,13 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
let highlighted_comment = &req.param("comment_id").unwrap_or_default();
|
let highlighted_comment = &req.param("comment_id").unwrap_or_default();
|
||||||
|
|
||||||
// Send a request to the url, receive JSON in response
|
// Send a request to the url, receive JSON in response
|
||||||
match json(path).await {
|
match json(path, quarantined).await {
|
||||||
// Otherwise, grab the JSON output from the request
|
// Otherwise, grab the JSON output from the request
|
||||||
Ok(res) => {
|
Ok(response) => {
|
||||||
// Parse the JSON into Post and Comment structs
|
// Parse the JSON into Post and Comment structs
|
||||||
let post = parse_post(&res[0]).await;
|
let post = parse_post(&response[0]).await;
|
||||||
let comments = parse_comments(&res[1], &post.permalink, &post.author.name, highlighted_comment).await;
|
let comments = parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req));
|
||||||
|
let url = req.uri().to_string();
|
||||||
|
|
||||||
// Use the Post and Comment structs to generate a website to show users
|
// Use the Post and Comment structs to generate a website to show users
|
||||||
template(PostTemplate {
|
template(PostTemplate {
|
||||||
@ -58,10 +65,18 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
sort,
|
sort,
|
||||||
prefs: Preferences::new(req),
|
prefs: Preferences::new(req),
|
||||||
single_thread,
|
single_thread,
|
||||||
|
url,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
// If the Reddit API returns an error, exit and send error page to user
|
// If the Reddit API returns an error, exit and send error page to user
|
||||||
Err(msg) => error(req, msg).await,
|
Err(msg) => {
|
||||||
|
if msg == "quarantined" {
|
||||||
|
let sub = req.param("sub").unwrap_or_default();
|
||||||
|
quarantine(req, sub)
|
||||||
|
} else {
|
||||||
|
error(req, msg).await
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,12 +94,25 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
// Determine the type of media along with the media URL
|
// Determine the type of media along with the media URL
|
||||||
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
||||||
|
|
||||||
|
let awards: Awards = Awards::parse(&post["data"]["all_awardings"]);
|
||||||
|
|
||||||
|
let permalink = val(post, "permalink");
|
||||||
|
|
||||||
|
let body = if val(post, "removed_by_category") == "moderator" {
|
||||||
|
format!(
|
||||||
|
"<div class=\"md\"><p>[removed] — <a href=\"https://www.unddit.com{}\">view removed post</a></p></div>",
|
||||||
|
permalink
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
rewrite_urls(&val(post, "selftext_html"))
|
||||||
|
};
|
||||||
|
|
||||||
// Build a post using data parsed from Reddit post API
|
// Build a post using data parsed from Reddit post API
|
||||||
Post {
|
Post {
|
||||||
id: val(post, "id"),
|
id: val(post, "id"),
|
||||||
title: esc!(post, "title"),
|
title: val(post, "title"),
|
||||||
community: val(post, "subreddit"),
|
community: val(post, "subreddit"),
|
||||||
body: rewrite_urls(&val(post, "selftext_html")).replace("\\", ""),
|
body,
|
||||||
author: Author {
|
author: Author {
|
||||||
name: val(post, "author"),
|
name: val(post, "author"),
|
||||||
flair: Flair {
|
flair: Flair {
|
||||||
@ -93,19 +121,20 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
post["data"]["author_flair_richtext"].as_array(),
|
post["data"]["author_flair_richtext"].as_array(),
|
||||||
post["data"]["author_flair_text"].as_str(),
|
post["data"]["author_flair_text"].as_str(),
|
||||||
),
|
),
|
||||||
text: esc!(post, "link_flair_text"),
|
text: val(post, "link_flair_text"),
|
||||||
background_color: val(post, "author_flair_background_color"),
|
background_color: val(post, "author_flair_background_color"),
|
||||||
foreground_color: val(post, "author_flair_text_color"),
|
foreground_color: val(post, "author_flair_text_color"),
|
||||||
},
|
},
|
||||||
distinguished: val(post, "distinguished"),
|
distinguished: val(post, "distinguished"),
|
||||||
},
|
},
|
||||||
permalink: val(post, "permalink"),
|
permalink,
|
||||||
score: format_num(score),
|
score: format_num(score),
|
||||||
upvote_ratio: ratio as i64,
|
upvote_ratio: ratio as i64,
|
||||||
post_type,
|
post_type,
|
||||||
media,
|
media,
|
||||||
thumbnail: Media {
|
thumbnail: Media {
|
||||||
url: format_url(val(post, "thumbnail").as_str()),
|
url: format_url(val(post, "thumbnail").as_str()),
|
||||||
|
alt_url: String::new(),
|
||||||
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
|
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||||
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
|
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||||
poster: "".to_string(),
|
poster: "".to_string(),
|
||||||
@ -116,7 +145,7 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
post["data"]["link_flair_richtext"].as_array(),
|
post["data"]["link_flair_richtext"].as_array(),
|
||||||
post["data"]["link_flair_text"].as_str(),
|
post["data"]["link_flair_text"].as_str(),
|
||||||
),
|
),
|
||||||
text: esc!(post, "link_flair_text"),
|
text: val(post, "link_flair_text"),
|
||||||
background_color: val(post, "link_flair_background_color"),
|
background_color: val(post, "link_flair_background_color"),
|
||||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||||
"black".to_string()
|
"black".to_string()
|
||||||
@ -126,65 +155,62 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
},
|
},
|
||||||
flags: Flags {
|
flags: Flags {
|
||||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
||||||
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
stickied: post["data"]["stickied"].as_bool().unwrap_or(false)
|
||||||
|
|| post["data"]["pinned"].as_bool().unwrap_or(false),
|
||||||
},
|
},
|
||||||
domain: val(post, "domain"),
|
domain: val(post, "domain"),
|
||||||
rel_time,
|
rel_time,
|
||||||
created,
|
created,
|
||||||
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
|
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
|
||||||
gallery,
|
gallery,
|
||||||
|
awards,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// COMMENTS
|
// COMMENTS
|
||||||
#[async_recursion]
|
fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str, filters: &HashSet<String>) -> Vec<Comment> {
|
||||||
async fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str) -> Vec<Comment> {
|
// Parse the comment JSON into a Vector of Comments
|
||||||
// Separate the comment JSON into a Vector of comments
|
let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned);
|
||||||
let comment_data = match json["data"]["children"].as_array() {
|
|
||||||
Some(f) => f.to_owned(),
|
|
||||||
None => Vec::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut comments: Vec<Comment> = Vec::new();
|
|
||||||
|
|
||||||
// For each comment, retrieve the values to build a Comment object
|
// For each comment, retrieve the values to build a Comment object
|
||||||
for comment in comment_data {
|
comments
|
||||||
let kind = comment["kind"].as_str().unwrap_or_default().to_string();
|
.into_iter()
|
||||||
let data = &comment["data"];
|
.map(|comment| {
|
||||||
|
let kind = comment["kind"].as_str().unwrap_or_default().to_string();
|
||||||
|
let data = &comment["data"];
|
||||||
|
|
||||||
let unix_time = data["created_utc"].as_f64().unwrap_or_default();
|
let unix_time = data["created_utc"].as_f64().unwrap_or_default();
|
||||||
let (rel_time, created) = time(unix_time);
|
let (rel_time, created) = time(unix_time);
|
||||||
|
|
||||||
let edited = match data["edited"].as_f64() {
|
let edited = data["edited"].as_f64().map_or((String::new(), String::new()), time);
|
||||||
Some(stamp) => time(stamp),
|
|
||||||
None => (String::new(), String::new()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let score = data["score"].as_i64().unwrap_or(0);
|
let score = data["score"].as_i64().unwrap_or(0);
|
||||||
let body = rewrite_urls(&val(&comment, "body_html"));
|
|
||||||
|
|
||||||
// If this comment contains replies, handle those too
|
// If this comment contains replies, handle those too
|
||||||
let replies: Vec<Comment> = if data["replies"].is_object() {
|
let replies: Vec<Comment> = if data["replies"].is_object() {
|
||||||
parse_comments(&data["replies"], post_link, post_author, highlighted_comment).await
|
parse_comments(&data["replies"], post_link, post_author, highlighted_comment, filters)
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
let parent_kind_and_id = val(&comment, "parent_id");
|
let awards: Awards = Awards::parse(&data["all_awardings"]);
|
||||||
let parent_info = parent_kind_and_id.split('_').collect::<Vec<&str>>();
|
|
||||||
|
|
||||||
let id = val(&comment, "id");
|
let parent_kind_and_id = val(&comment, "parent_id");
|
||||||
let highlighted = id == highlighted_comment;
|
let parent_info = parent_kind_and_id.split('_').collect::<Vec<&str>>();
|
||||||
|
|
||||||
comments.push(Comment {
|
let id = val(&comment, "id");
|
||||||
id,
|
let highlighted = id == highlighted_comment;
|
||||||
kind,
|
|
||||||
parent_id: parent_info[1].to_string(),
|
let body = if (val(&comment, "author") == "[deleted]" && val(&comment, "body") == "[removed]") || val(&comment, "body") == "[ Removed by Reddit ]" {
|
||||||
parent_kind: parent_info[0].to_string(),
|
format!(
|
||||||
post_link: post_link.to_string(),
|
"<div class=\"md\"><p>[removed] — <a href=\"https://www.unddit.com{}{}\">view removed comment</a></p></div>",
|
||||||
post_author: post_author.to_string(),
|
post_link, id
|
||||||
body,
|
)
|
||||||
author: Author {
|
} else {
|
||||||
|
rewrite_urls(&val(&comment, "body_html"))
|
||||||
|
};
|
||||||
|
|
||||||
|
let author = Author {
|
||||||
name: val(&comment, "author"),
|
name: val(&comment, "author"),
|
||||||
flair: Flair {
|
flair: Flair {
|
||||||
flair_parts: FlairPart::parse(
|
flair_parts: FlairPart::parse(
|
||||||
@ -192,24 +218,45 @@ async fn parse_comments(json: &serde_json::Value, post_link: &str, post_author:
|
|||||||
data["author_flair_richtext"].as_array(),
|
data["author_flair_richtext"].as_array(),
|
||||||
data["author_flair_text"].as_str(),
|
data["author_flair_text"].as_str(),
|
||||||
),
|
),
|
||||||
text: esc!(&comment, "link_flair_text"),
|
text: val(&comment, "link_flair_text"),
|
||||||
background_color: val(&comment, "author_flair_background_color"),
|
background_color: val(&comment, "author_flair_background_color"),
|
||||||
foreground_color: val(&comment, "author_flair_text_color"),
|
foreground_color: val(&comment, "author_flair_text_color"),
|
||||||
},
|
},
|
||||||
distinguished: val(&comment, "distinguished"),
|
distinguished: val(&comment, "distinguished"),
|
||||||
},
|
};
|
||||||
score: if data["score_hidden"].as_bool().unwrap_or_default() {
|
let is_filtered = filters.contains(&["u_", author.name.as_str()].concat());
|
||||||
("\u{2022}".to_string(), "Hidden".to_string())
|
|
||||||
} else {
|
|
||||||
format_num(score)
|
|
||||||
},
|
|
||||||
rel_time,
|
|
||||||
created,
|
|
||||||
edited,
|
|
||||||
replies,
|
|
||||||
highlighted,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
comments
|
// Many subreddits have a default comment posted about the sub's rules etc.
|
||||||
|
// Many libreddit users do not wish to see this kind of comment by default.
|
||||||
|
// Reddit does not tell us which users are "bots", so a good heuristic is to
|
||||||
|
// collapse stickied moderator comments.
|
||||||
|
let is_moderator_comment = data["distinguished"].as_str().unwrap_or_default() == "moderator";
|
||||||
|
let is_stickied = data["stickied"].as_bool().unwrap_or_default();
|
||||||
|
let collapsed = (is_moderator_comment && is_stickied) || is_filtered;
|
||||||
|
|
||||||
|
Comment {
|
||||||
|
id,
|
||||||
|
kind,
|
||||||
|
parent_id: parent_info[1].to_string(),
|
||||||
|
parent_kind: parent_info[0].to_string(),
|
||||||
|
post_link: post_link.to_string(),
|
||||||
|
post_author: post_author.to_string(),
|
||||||
|
body,
|
||||||
|
author,
|
||||||
|
score: if data["score_hidden"].as_bool().unwrap_or_default() {
|
||||||
|
("\u{2022}".to_string(), "Hidden".to_string())
|
||||||
|
} else {
|
||||||
|
format_num(score)
|
||||||
|
},
|
||||||
|
rel_time,
|
||||||
|
created,
|
||||||
|
edited,
|
||||||
|
replies,
|
||||||
|
highlighted,
|
||||||
|
awards,
|
||||||
|
collapsed,
|
||||||
|
is_filtered,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
173
src/search.rs
173
src/search.rs
@ -1,6 +1,10 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
use crate::utils::{cookie, error, format_num, format_url, param, template, val, Post, Preferences};
|
use crate::utils::{catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences};
|
||||||
use crate::{client::json, RequestExt};
|
use crate::{
|
||||||
|
client::json,
|
||||||
|
subreddit::{can_access_quarantine, quarantine},
|
||||||
|
RequestExt,
|
||||||
|
};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use hyper::{Body, Request, Response};
|
use hyper::{Body, Request, Response};
|
||||||
|
|
||||||
@ -12,6 +16,7 @@ struct SearchParams {
|
|||||||
before: String,
|
before: String,
|
||||||
after: String,
|
after: String,
|
||||||
restrict_sr: String,
|
restrict_sr: String,
|
||||||
|
typed: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
// STRUCTS
|
// STRUCTS
|
||||||
@ -24,86 +29,140 @@ struct Subreddit {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(path = "search.html", escape = "none")]
|
#[template(path = "search.html")]
|
||||||
struct SearchTemplate {
|
struct SearchTemplate {
|
||||||
posts: Vec<Post>,
|
posts: Vec<Post>,
|
||||||
subreddits: Vec<Subreddit>,
|
subreddits: Vec<Subreddit>,
|
||||||
sub: String,
|
sub: String,
|
||||||
params: SearchParams,
|
params: SearchParams,
|
||||||
prefs: Preferences,
|
prefs: Preferences,
|
||||||
|
url: String,
|
||||||
|
/// Whether the subreddit itself is filtered.
|
||||||
|
is_filtered: bool,
|
||||||
|
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||||
|
/// and all fetched posts being filtered).
|
||||||
|
all_posts_filtered: bool,
|
||||||
|
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||||
|
all_posts_hidden_nsfw: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
// SERVICES
|
// SERVICES
|
||||||
pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
let nsfw_results = if cookie(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" };
|
let nsfw_results = if setting(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" };
|
||||||
let path = format!("{}.json?{}{}", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
|
let path = format!("{}.json?{}{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
|
||||||
|
let query = param(&path, "q").unwrap_or_default();
|
||||||
|
|
||||||
|
if query.is_empty() {
|
||||||
|
return Ok(redirect("/".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if query.starts_with("r/") {
|
||||||
|
return Ok(redirect(format!("/{}", query)));
|
||||||
|
}
|
||||||
|
|
||||||
let sub = req.param("sub").unwrap_or_default();
|
let sub = req.param("sub").unwrap_or_default();
|
||||||
let query = param(&path, "q");
|
let quarantined = can_access_quarantine(&req, &sub);
|
||||||
|
// Handle random subreddits
|
||||||
|
if let Ok(random) = catch_random(&sub, "/find").await {
|
||||||
|
return Ok(random);
|
||||||
|
}
|
||||||
|
|
||||||
let sort = if param(&path, "sort").is_empty() {
|
let typed = param(&path, "type").unwrap_or_default();
|
||||||
"relevance".to_string()
|
|
||||||
} else {
|
|
||||||
param(&path, "sort")
|
|
||||||
};
|
|
||||||
|
|
||||||
let subreddits = if param(&path, "restrict_sr").is_empty() {
|
let sort = param(&path, "sort").unwrap_or_else(|| "relevance".to_string());
|
||||||
search_subreddits(&query).await
|
let filters = get_filters(&req);
|
||||||
|
|
||||||
|
// If search is not restricted to this subreddit, show other subreddits in search results
|
||||||
|
let subreddits = if param(&path, "restrict_sr").is_none() {
|
||||||
|
let mut subreddits = search_subreddits(&query, &typed).await;
|
||||||
|
subreddits.retain(|s| !filters.contains(s.name.as_str()));
|
||||||
|
subreddits
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
match Post::fetch(&path, String::new()).await {
|
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||||
Ok((posts, after)) => template(SearchTemplate {
|
|
||||||
posts,
|
// If all requested subs are filtered, we don't need to fetch posts.
|
||||||
|
if sub.split('+').all(|s| filters.contains(s)) {
|
||||||
|
template(SearchTemplate {
|
||||||
|
posts: Vec::new(),
|
||||||
subreddits,
|
subreddits,
|
||||||
sub,
|
sub,
|
||||||
params: SearchParams {
|
params: SearchParams {
|
||||||
q: query.replace('"', """),
|
q: query.replace('"', """),
|
||||||
sort,
|
sort,
|
||||||
t: param(&path, "t"),
|
t: param(&path, "t").unwrap_or_default(),
|
||||||
before: param(&path, "after"),
|
before: param(&path, "after").unwrap_or_default(),
|
||||||
after,
|
after: "".to_string(),
|
||||||
restrict_sr: param(&path, "restrict_sr"),
|
restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
|
||||||
|
typed,
|
||||||
},
|
},
|
||||||
prefs: Preferences::new(req),
|
prefs: Preferences::new(req),
|
||||||
}),
|
url,
|
||||||
Err(msg) => error(req, msg).await,
|
is_filtered: true,
|
||||||
}
|
all_posts_filtered: false,
|
||||||
}
|
all_posts_hidden_nsfw: false,
|
||||||
|
})
|
||||||
async fn search_subreddits(q: &str) -> Vec<Subreddit> {
|
} else {
|
||||||
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit=3", q.replace(' ', "+"));
|
match Post::fetch(&path, quarantined).await {
|
||||||
|
Ok((mut posts, after)) => {
|
||||||
// Send a request to the url
|
let all_posts_filtered = filter_posts(&mut posts, &filters);
|
||||||
match json(subreddit_search_path).await {
|
let all_posts_hidden_nsfw = posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on";
|
||||||
// If success, receive JSON in response
|
template(SearchTemplate {
|
||||||
Ok(response) => {
|
posts,
|
||||||
match response["data"]["children"].as_array() {
|
subreddits,
|
||||||
// For each subreddit from subreddit list
|
sub,
|
||||||
Some(list) => list
|
params: SearchParams {
|
||||||
.iter()
|
q: query.replace('"', """),
|
||||||
.map(|subreddit| {
|
sort,
|
||||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
t: param(&path, "t").unwrap_or_default(),
|
||||||
let community_icon: &str = subreddit["data"]["community_icon"].as_str().map_or("", |s| s.split('?').collect::<Vec<&str>>()[0]);
|
before: param(&path, "after").unwrap_or_default(),
|
||||||
let icon = if community_icon.is_empty() {
|
after,
|
||||||
val(&subreddit, "icon_img")
|
restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
|
||||||
} else {
|
typed,
|
||||||
community_icon.to_string()
|
},
|
||||||
};
|
prefs: Preferences::new(req),
|
||||||
|
url,
|
||||||
Subreddit {
|
is_filtered: false,
|
||||||
name: val(subreddit, "display_name_prefixed"),
|
all_posts_filtered,
|
||||||
url: val(subreddit, "url"),
|
all_posts_hidden_nsfw,
|
||||||
icon: format_url(&icon),
|
})
|
||||||
description: val(subreddit, "public_description"),
|
}
|
||||||
subscribers: format_num(subreddit["data"]["subscribers"].as_f64().unwrap_or_default() as i64),
|
Err(msg) => {
|
||||||
}
|
if msg == "quarantined" {
|
||||||
})
|
let sub = req.param("sub").unwrap_or_default();
|
||||||
.collect::<Vec<Subreddit>>(),
|
quarantine(req, sub)
|
||||||
_ => Vec::new(),
|
} else {
|
||||||
|
error(req, msg).await
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// If the Reddit API returns an error, exit this function
|
|
||||||
_ => Vec::new(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn search_subreddits(q: &str, typed: &str) -> Vec<Subreddit> {
|
||||||
|
let limit = if typed == "sr_user" { "50" } else { "3" };
|
||||||
|
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit={}", q.replace(' ', "+"), limit);
|
||||||
|
|
||||||
|
// Send a request to the url
|
||||||
|
json(subreddit_search_path, false).await.unwrap_or_default()["data"]["children"]
|
||||||
|
.as_array()
|
||||||
|
.map(ToOwned::to_owned)
|
||||||
|
.unwrap_or_default()
|
||||||
|
.iter()
|
||||||
|
.map(|subreddit| {
|
||||||
|
// For each subreddit from subreddit list
|
||||||
|
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||||
|
let icon = subreddit["data"]["community_icon"].as_str().map_or_else(|| val(subreddit, "icon_img"), ToString::to_string);
|
||||||
|
|
||||||
|
Subreddit {
|
||||||
|
name: val(subreddit, "display_name"),
|
||||||
|
url: val(subreddit, "url"),
|
||||||
|
icon: format_url(&icon),
|
||||||
|
description: val(subreddit, "public_description"),
|
||||||
|
subscribers: format_num(subreddit["data"]["subscribers"].as_f64().unwrap_or_default() as i64),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<Subreddit>>()
|
||||||
|
}
|
||||||
|
636
src/server.rs
636
src/server.rs
@ -1,17 +1,80 @@
|
|||||||
|
use brotli::enc::{BrotliCompress, BrotliEncoderParams};
|
||||||
|
use cached::proc_macro::cached;
|
||||||
use cookie::Cookie;
|
use cookie::Cookie;
|
||||||
|
use core::f64;
|
||||||
use futures_lite::{future::Boxed, Future, FutureExt};
|
use futures_lite::{future::Boxed, Future, FutureExt};
|
||||||
use hyper::{
|
use hyper::{
|
||||||
header::HeaderValue,
|
body,
|
||||||
|
body::HttpBody,
|
||||||
|
header,
|
||||||
service::{make_service_fn, service_fn},
|
service::{make_service_fn, service_fn},
|
||||||
HeaderMap,
|
HeaderMap,
|
||||||
};
|
};
|
||||||
use hyper::{Body, Method, Request, Response, Server as HyperServer};
|
use hyper::{Body, Method, Request, Response, Server as HyperServer};
|
||||||
|
use libflate::gzip;
|
||||||
use route_recognizer::{Params, Router};
|
use route_recognizer::{Params, Router};
|
||||||
use std::{pin::Pin, result::Result};
|
use std::{
|
||||||
|
cmp::Ordering,
|
||||||
|
io,
|
||||||
|
pin::Pin,
|
||||||
|
result::Result,
|
||||||
|
str::{from_utf8, Split},
|
||||||
|
string::ToString,
|
||||||
|
};
|
||||||
use time::Duration;
|
use time::Duration;
|
||||||
|
|
||||||
|
use crate::dbg_msg;
|
||||||
|
|
||||||
type BoxResponse = Pin<Box<dyn Future<Output = Result<Response<Body>, String>> + Send>>;
|
type BoxResponse = Pin<Box<dyn Future<Output = Result<Response<Body>, String>> + Send>>;
|
||||||
|
|
||||||
|
/// Compressors for the response Body, in ascending order of preference.
|
||||||
|
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
||||||
|
enum CompressionType {
|
||||||
|
Passthrough,
|
||||||
|
Gzip,
|
||||||
|
Brotli,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// All browsers support gzip, so if we are given `Accept-Encoding: *`, deliver
|
||||||
|
/// gzipped-content.
|
||||||
|
///
|
||||||
|
/// Brotli would be nice universally, but Safari (iOS, iPhone, macOS) reportedly
|
||||||
|
/// doesn't support it yet.
|
||||||
|
const DEFAULT_COMPRESSOR: CompressionType = CompressionType::Gzip;
|
||||||
|
|
||||||
|
impl CompressionType {
|
||||||
|
/// Returns a `CompressionType` given a content coding
|
||||||
|
/// in [RFC 7231](https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.4)
|
||||||
|
/// format.
|
||||||
|
fn parse(s: &str) -> Option<CompressionType> {
|
||||||
|
let c = match s {
|
||||||
|
// Compressors we support.
|
||||||
|
"gzip" => CompressionType::Gzip,
|
||||||
|
"br" => CompressionType::Brotli,
|
||||||
|
|
||||||
|
// The wildcard means that we can choose whatever
|
||||||
|
// compression we prefer. In this case, use the
|
||||||
|
// default.
|
||||||
|
"*" => DEFAULT_COMPRESSOR,
|
||||||
|
|
||||||
|
// Compressor not supported.
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToString for CompressionType {
|
||||||
|
fn to_string(&self) -> String {
|
||||||
|
match self {
|
||||||
|
CompressionType::Gzip => "gzip".to_string(),
|
||||||
|
CompressionType::Brotli => "br".to_string(),
|
||||||
|
_ => String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct Route<'a> {
|
pub struct Route<'a> {
|
||||||
router: &'a mut Router<fn(Request<Body>) -> BoxResponse>,
|
router: &'a mut Router<fn(Request<Body>) -> BoxResponse>,
|
||||||
path: String,
|
path: String,
|
||||||
@ -53,7 +116,7 @@ pub trait ResponseExt {
|
|||||||
|
|
||||||
impl RequestExt for Request<Body> {
|
impl RequestExt for Request<Body> {
|
||||||
fn params(&self) -> Params {
|
fn params(&self) -> Params {
|
||||||
self.extensions().get::<Params>().unwrap_or(&Params::new()).to_owned()
|
self.extensions().get::<Params>().unwrap_or(&Params::new()).clone()
|
||||||
// self.extensions()
|
// self.extensions()
|
||||||
// .get::<RequestMeta>()
|
// .get::<RequestMeta>()
|
||||||
// .and_then(|meta| meta.route_params())
|
// .and_then(|meta| meta.route_params())
|
||||||
@ -69,33 +132,35 @@ impl RequestExt for Request<Body> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn cookies(&self) -> Vec<Cookie> {
|
fn cookies(&self) -> Vec<Cookie> {
|
||||||
let mut cookies = Vec::new();
|
self.headers().get("Cookie").map_or(Vec::new(), |header| {
|
||||||
if let Some(header) = self.headers().get("Cookie") {
|
header
|
||||||
for cookie in header.to_str().unwrap_or_default().split("; ") {
|
.to_str()
|
||||||
cookies.push(Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named("")));
|
.unwrap_or_default()
|
||||||
}
|
.split("; ")
|
||||||
}
|
.map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named("")))
|
||||||
cookies
|
.collect()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cookie(&self, name: &str) -> Option<Cookie> {
|
fn cookie(&self, name: &str) -> Option<Cookie> {
|
||||||
self.cookies().iter().find(|c| c.name() == name).map(std::borrow::ToOwned::to_owned)
|
self.cookies().into_iter().find(|c| c.name() == name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ResponseExt for Response<Body> {
|
impl ResponseExt for Response<Body> {
|
||||||
fn cookies(&self) -> Vec<Cookie> {
|
fn cookies(&self) -> Vec<Cookie> {
|
||||||
let mut cookies = Vec::new();
|
self.headers().get("Cookie").map_or(Vec::new(), |header| {
|
||||||
for header in self.headers().get_all("Cookie") {
|
header
|
||||||
if let Ok(cookie) = Cookie::parse(header.to_str().unwrap_or_default()) {
|
.to_str()
|
||||||
cookies.push(cookie);
|
.unwrap_or_default()
|
||||||
}
|
.split("; ")
|
||||||
}
|
.map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named("")))
|
||||||
cookies
|
.collect()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_cookie(&mut self, cookie: Cookie) {
|
fn insert_cookie(&mut self, cookie: Cookie) {
|
||||||
if let Ok(val) = HeaderValue::from_str(&cookie.to_string()) {
|
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||||
self.headers_mut().append("Set-Cookie", val);
|
self.headers_mut().append("Set-Cookie", val);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -103,8 +168,8 @@ impl ResponseExt for Response<Body> {
|
|||||||
fn remove_cookie(&mut self, name: String) {
|
fn remove_cookie(&mut self, name: String) {
|
||||||
let mut cookie = Cookie::named(name);
|
let mut cookie = Cookie::named(name);
|
||||||
cookie.set_path("/");
|
cookie.set_path("/");
|
||||||
cookie.set_max_age(Duration::second());
|
cookie.set_max_age(Duration::seconds(1));
|
||||||
if let Ok(val) = HeaderValue::from_str(&cookie.to_string()) {
|
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||||
self.headers_mut().append("Set-Cookie", val);
|
self.headers_mut().append("Set-Cookie", val);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -144,6 +209,7 @@ impl Server {
|
|||||||
|
|
||||||
pub fn listen(self, addr: String) -> Boxed<Result<(), hyper::Error>> {
|
pub fn listen(self, addr: String) -> Boxed<Result<(), hyper::Error>> {
|
||||||
let make_svc = make_service_fn(move |_conn| {
|
let make_svc = make_service_fn(move |_conn| {
|
||||||
|
// For correct borrowing, these values need to be borrowed
|
||||||
let router = self.router.clone();
|
let router = self.router.clone();
|
||||||
let default_headers = self.default_headers.clone();
|
let default_headers = self.default_headers.clone();
|
||||||
|
|
||||||
@ -153,13 +219,14 @@ impl Server {
|
|||||||
// let shared_router = router.clone();
|
// let shared_router = router.clone();
|
||||||
async move {
|
async move {
|
||||||
Ok::<_, String>(service_fn(move |req: Request<Body>| {
|
Ok::<_, String>(service_fn(move |req: Request<Body>| {
|
||||||
let headers = default_headers.clone();
|
let req_headers = req.headers().clone();
|
||||||
|
let def_headers = default_headers.clone();
|
||||||
|
|
||||||
// Remove double slashes
|
// Remove double slashes and decode encoded slashes
|
||||||
let mut path = req.uri().path().replace("//", "/");
|
let mut path = req.uri().path().replace("//", "/").replace("%2F", "/");
|
||||||
|
|
||||||
// Remove trailing slashes
|
// Remove trailing slashes
|
||||||
if path.ends_with('/') && path != "/" {
|
if path != "/" && path.ends_with('/') {
|
||||||
path.pop();
|
path.pop();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -168,47 +235,516 @@ impl Server {
|
|||||||
// If a route was configured for this path
|
// If a route was configured for this path
|
||||||
Ok(found) => {
|
Ok(found) => {
|
||||||
let mut parammed = req;
|
let mut parammed = req;
|
||||||
parammed.set_params(found.params().to_owned());
|
parammed.set_params(found.params().clone());
|
||||||
|
|
||||||
// Run the route's function
|
// Run the route's function
|
||||||
let yeet = (found.handler().to_owned().to_owned())(parammed);
|
let func = (found.handler().to_owned().to_owned())(parammed);
|
||||||
async move {
|
async move {
|
||||||
let res: Result<Response<Body>, String> = yeet.await;
|
match func.await {
|
||||||
// Add default headers to response
|
Ok(mut res) => {
|
||||||
res.map(|mut response| {
|
res.headers_mut().extend(def_headers);
|
||||||
response.headers_mut().extend(headers);
|
let _ = compress_response(req_headers, &mut res).await;
|
||||||
response
|
|
||||||
})
|
Ok(res)
|
||||||
|
}
|
||||||
|
Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
// If there was a routing error
|
// If there was a routing error
|
||||||
Err(e) => async move {
|
Err(e) => async move { new_boilerplate(def_headers, req_headers, 404, e.into()).await }.boxed(),
|
||||||
// Return a 404 error
|
|
||||||
let res: Result<Response<Body>, String> = Ok(Response::builder().status(404).body(e.into()).unwrap_or_default());
|
|
||||||
// Add default headers to response
|
|
||||||
res.map(|mut response| {
|
|
||||||
response.headers_mut().extend(headers);
|
|
||||||
response
|
|
||||||
})
|
|
||||||
}
|
|
||||||
.boxed(),
|
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Build SocketAddr from provided address
|
||||||
let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {} as address (example format: 0.0.0.0:8080)", addr));
|
let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {} as address (example format: 0.0.0.0:8080)", addr));
|
||||||
|
|
||||||
let server = HyperServer::bind(address).serve(make_svc);
|
// Bind server to address specified above. Gracefully shut down if CTRL+C is pressed
|
||||||
|
let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async {
|
||||||
|
// Wait for the CTRL+C signal
|
||||||
|
tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler");
|
||||||
|
});
|
||||||
|
|
||||||
let graceful = server.with_graceful_shutdown(shutdown_signal());
|
server.boxed()
|
||||||
|
|
||||||
graceful.boxed()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn shutdown_signal() {
|
/// Create a boilerplate Response for error conditions. This response will be
|
||||||
// Wait for the CTRL+C signal
|
/// compressed if requested by client.
|
||||||
tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler");
|
async fn new_boilerplate(
|
||||||
|
default_headers: HeaderMap<header::HeaderValue>,
|
||||||
|
req_headers: HeaderMap<header::HeaderValue>,
|
||||||
|
status: u16,
|
||||||
|
body: Body,
|
||||||
|
) -> Result<Response<Body>, String> {
|
||||||
|
match Response::builder().status(status).body(body) {
|
||||||
|
Ok(mut res) => {
|
||||||
|
let _ = compress_response(req_headers, &mut res).await;
|
||||||
|
|
||||||
|
res.headers_mut().extend(default_headers.clone());
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
Err(msg) => Err(msg.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Determines the desired compressor based on the Accept-Encoding header.
|
||||||
|
///
|
||||||
|
/// This function will honor the [q-value](https://developer.mozilla.org/en-US/docs/Glossary/Quality_values)
|
||||||
|
/// for each compressor. The q-value is an optional parameter, a decimal value
|
||||||
|
/// on \[0..1\], to order the compressors by preference. An Accept-Encoding value
|
||||||
|
/// with no q-values is also accepted.
|
||||||
|
///
|
||||||
|
/// Here are [examples](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding#examples)
|
||||||
|
/// of valid Accept-Encoding headers.
|
||||||
|
///
|
||||||
|
/// ```http
|
||||||
|
/// Accept-Encoding: gzip
|
||||||
|
/// Accept-Encoding: gzip, compress, br
|
||||||
|
/// Accept-Encoding: br;q=1.0, gzip;q=0.8, *;q=0.1
|
||||||
|
/// ```
|
||||||
|
fn determine_compressor(accept_encoding: &str) -> Option<CompressionType> {
|
||||||
|
if accept_encoding.is_empty() {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Keep track of the compressor candidate based on both the client's
|
||||||
|
// preference and our own. Concrete examples:
|
||||||
|
//
|
||||||
|
// 1. "Accept-Encoding: gzip, br" => assuming we like brotli more than
|
||||||
|
// gzip, and the browser supports brotli, we choose brotli
|
||||||
|
//
|
||||||
|
// 2. "Accept-Encoding: gzip;q=0.8, br;q=0.3" => the client has stated a
|
||||||
|
// preference for gzip over brotli, so we choose gzip
|
||||||
|
//
|
||||||
|
// To do this, we need to define a struct which contains the requested
|
||||||
|
// requested compressor (abstracted as a CompressionType enum) and the
|
||||||
|
// q-value. If no q-value is defined for the compressor, we assume one of
|
||||||
|
// 1.0. We first compare compressor candidates by comparing q-values, and
|
||||||
|
// then CompressionTypes. We keep track of whatever is the greatest per our
|
||||||
|
// ordering.
|
||||||
|
|
||||||
|
struct CompressorCandidate {
|
||||||
|
alg: CompressionType,
|
||||||
|
q: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for CompressorCandidate {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
// Compare q-values. Break ties with the
|
||||||
|
// CompressionType values.
|
||||||
|
|
||||||
|
match self.q.total_cmp(&other.q) {
|
||||||
|
Ordering::Equal => self.alg.cmp(&other.alg),
|
||||||
|
ord => ord,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for CompressorCandidate {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
// Guard against NAN, both on our end and on the other.
|
||||||
|
if self.q.is_nan() || other.q.is_nan() {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
// f64 and CompressionType are ordered, except in the case
|
||||||
|
// where the f64 is NAN (which we checked against), so we
|
||||||
|
// can safely return a Some here.
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for CompressorCandidate {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
(self.q == other.q) && (self.alg == other.alg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for CompressorCandidate {}
|
||||||
|
|
||||||
|
// This is the current candidate.
|
||||||
|
//
|
||||||
|
// Assmume no candidate so far. We do this by assigning the sentinel value
|
||||||
|
// of negative infinity to the q-value. If this value is negative infinity,
|
||||||
|
// that means there was no viable compressor candidate.
|
||||||
|
let mut cur_candidate = CompressorCandidate {
|
||||||
|
alg: CompressionType::Passthrough,
|
||||||
|
q: f64::NEG_INFINITY,
|
||||||
|
};
|
||||||
|
|
||||||
|
// This loop reads the requested compressors and keeps track of whichever
|
||||||
|
// one has the highest priority per our heuristic.
|
||||||
|
for val in accept_encoding.to_string().split(',') {
|
||||||
|
let mut q: f64 = 1.0;
|
||||||
|
|
||||||
|
// The compressor and q-value (if the latter is defined)
|
||||||
|
// will be delimited by semicolons.
|
||||||
|
let mut spl: Split<char> = val.split(';');
|
||||||
|
|
||||||
|
// Get the compressor. For example, in
|
||||||
|
// gzip;q=0.8
|
||||||
|
// this grabs "gzip" in the string. It
|
||||||
|
// will further validate the compressor against the
|
||||||
|
// list of those we support. If it is not supported,
|
||||||
|
// we move onto the next one.
|
||||||
|
let compressor: CompressionType = match spl.next() {
|
||||||
|
// CompressionType::parse will return the appropriate enum given
|
||||||
|
// a string. For example, it will return CompressionType::Gzip
|
||||||
|
// when given "gzip".
|
||||||
|
Some(s) => match CompressionType::parse(s.trim()) {
|
||||||
|
Some(candidate) => candidate,
|
||||||
|
|
||||||
|
// We don't support the requested compression algorithm.
|
||||||
|
None => continue,
|
||||||
|
},
|
||||||
|
|
||||||
|
// We should never get here, but I'm paranoid.
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get the q-value. This might not be defined, in which case assume
|
||||||
|
// 1.0.
|
||||||
|
if let Some(s) = spl.next() {
|
||||||
|
if !(s.len() > 2 && s.starts_with("q=")) {
|
||||||
|
// If the q-value is malformed, the header is malformed, so
|
||||||
|
// abort.
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
match s[2..].parse::<f64>() {
|
||||||
|
Ok(val) => {
|
||||||
|
if (0.0..=1.0).contains(&val) {
|
||||||
|
q = val;
|
||||||
|
} else {
|
||||||
|
// If the value is outside [0..1], header is malformed.
|
||||||
|
// Abort.
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
// If this isn't a f64, then assume a malformed header
|
||||||
|
// value and abort.
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// If new_candidate > cur_candidate, make new_candidate the new
|
||||||
|
// cur_candidate. But do this safely! It is very possible that
|
||||||
|
// someone gave us the string "NAN", which (&str).parse::<f64>
|
||||||
|
// will happily translate to f64::NAN.
|
||||||
|
let new_candidate = CompressorCandidate { alg: compressor, q };
|
||||||
|
if let Some(ord) = new_candidate.partial_cmp(&cur_candidate) {
|
||||||
|
if ord == Ordering::Greater {
|
||||||
|
cur_candidate = new_candidate;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if cur_candidate.q != f64::NEG_INFINITY {
|
||||||
|
Some(cur_candidate.alg)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compress the response body, if possible or desirable. The Body will be
|
||||||
|
/// compressed in place, and a new header Content-Encoding will be set
|
||||||
|
/// indicating the compression algorithm.
|
||||||
|
///
|
||||||
|
/// This function deems Body eligible compression if and only if the following
|
||||||
|
/// conditions are met:
|
||||||
|
///
|
||||||
|
/// 1. the HTTP client requests a compression encoding in the Content-Encoding
|
||||||
|
/// header (hence the need for the req_headers);
|
||||||
|
///
|
||||||
|
/// 2. the content encoding corresponds to a compression algorithm we support;
|
||||||
|
///
|
||||||
|
/// 3. the Media type in the Content-Type response header is text with any
|
||||||
|
/// subtype (e.g. text/plain) or application/json.
|
||||||
|
///
|
||||||
|
/// compress_response returns Ok on successful compression, or if not all three
|
||||||
|
/// conditions above are met. It returns Err if there was a problem decoding
|
||||||
|
/// any header in either req_headers or res, but res will remain intact.
|
||||||
|
///
|
||||||
|
/// This function logs errors to stderr, but only in debug mode. No information
|
||||||
|
/// is logged in release builds.
|
||||||
|
async fn compress_response(req_headers: HeaderMap<header::HeaderValue>, res: &mut Response<Body>) -> Result<(), String> {
|
||||||
|
// Check if the data is eligible for compression.
|
||||||
|
if let Some(hdr) = res.headers().get(header::CONTENT_TYPE) {
|
||||||
|
match from_utf8(hdr.as_bytes()) {
|
||||||
|
Ok(val) => {
|
||||||
|
let s = val.to_string();
|
||||||
|
|
||||||
|
// TODO: better determination of what is eligible for compression
|
||||||
|
if !(s.starts_with("text/") || s.starts_with("application/json")) {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
dbg_msg!(e);
|
||||||
|
return Err(e.to_string());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// Response declares no Content-Type. Assume for simplicity that it
|
||||||
|
// cannot be compressed.
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
// Don't bother if the size of the size of the response body will fit
|
||||||
|
// within an IP frame (less the bytes that make up the TCP/IP and HTTP
|
||||||
|
// headers).
|
||||||
|
if res.body().size_hint().lower() < 1452 {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
// Quick and dirty closure for extracting a header from the request and
|
||||||
|
// returning it as a &str.
|
||||||
|
let get_req_header = |k: header::HeaderName| -> Option<&str> {
|
||||||
|
match req_headers.get(k) {
|
||||||
|
Some(hdr) => match from_utf8(hdr.as_bytes()) {
|
||||||
|
Ok(val) => Some(val),
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
Err(e) => {
|
||||||
|
dbg_msg!(e);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
Err(_) => None,
|
||||||
|
},
|
||||||
|
None => None,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check to see which compressor is requested, and if we can use it.
|
||||||
|
let accept_encoding: &str = match get_req_header(header::ACCEPT_ENCODING) {
|
||||||
|
Some(val) => val,
|
||||||
|
None => return Ok(()), // Client requested no compression.
|
||||||
|
};
|
||||||
|
|
||||||
|
let compressor: CompressionType = match determine_compressor(accept_encoding) {
|
||||||
|
Some(c) => c,
|
||||||
|
None => return Ok(()),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get the body from the response.
|
||||||
|
let body_bytes: Vec<u8> = match body::to_bytes(res.body_mut()).await {
|
||||||
|
Ok(b) => b.to_vec(),
|
||||||
|
Err(e) => {
|
||||||
|
dbg_msg!(e);
|
||||||
|
return Err(e.to_string());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Compress!
|
||||||
|
match compress_body(compressor, body_bytes) {
|
||||||
|
Ok(compressed) => {
|
||||||
|
// We get here iff the compression was successful. Replace the body
|
||||||
|
// with the compressed payload, and add the appropriate
|
||||||
|
// Content-Encoding header in the response.
|
||||||
|
res.headers_mut().insert(header::CONTENT_ENCODING, compressor.to_string().parse().unwrap());
|
||||||
|
*(res.body_mut()) = Body::from(compressed);
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(e) => return Err(e),
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compresses a `Vec<u8>` given a [`CompressionType`].
|
||||||
|
///
|
||||||
|
/// This is a helper function for [`compress_response`] and should not be
|
||||||
|
/// called directly.
|
||||||
|
|
||||||
|
// I've chosen a TTL of 600 (== 10 minutes) since compression is
|
||||||
|
// computationally expensive and we don't want to be doing it often. This is
|
||||||
|
// larger than client::json's TTL, but that's okay, because if client::json
|
||||||
|
// returns a new serde_json::Value, body_bytes changes, so this function will
|
||||||
|
// execute again.
|
||||||
|
#[cached(size = 100, time = 600, result = true)]
|
||||||
|
fn compress_body(compressor: CompressionType, body_bytes: Vec<u8>) -> Result<Vec<u8>, String> {
|
||||||
|
// io::Cursor implements io::Read, required for our encoders.
|
||||||
|
let mut reader = io::Cursor::new(body_bytes);
|
||||||
|
|
||||||
|
let compressed: Vec<u8> = match compressor {
|
||||||
|
CompressionType::Gzip => {
|
||||||
|
let mut gz: gzip::Encoder<Vec<u8>> = match gzip::Encoder::new(Vec::new()) {
|
||||||
|
Ok(gz) => gz,
|
||||||
|
Err(e) => {
|
||||||
|
dbg_msg!(e);
|
||||||
|
return Err(e.to_string());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match io::copy(&mut reader, &mut gz) {
|
||||||
|
Ok(_) => match gz.finish().into_result() {
|
||||||
|
Ok(compressed) => compressed,
|
||||||
|
Err(e) => {
|
||||||
|
dbg_msg!(e);
|
||||||
|
return Err(e.to_string());
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
dbg_msg!(e);
|
||||||
|
return Err(e.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
CompressionType::Brotli => {
|
||||||
|
// We may want to make the compression parameters configurable
|
||||||
|
// in the future. For now, the defaults are sufficient.
|
||||||
|
let brotli_params = BrotliEncoderParams::default();
|
||||||
|
|
||||||
|
let mut compressed = Vec::<u8>::new();
|
||||||
|
match BrotliCompress(&mut reader, &mut compressed, &brotli_params) {
|
||||||
|
Ok(_) => compressed,
|
||||||
|
Err(e) => {
|
||||||
|
dbg_msg!(e);
|
||||||
|
return Err(e.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This arm is for any requested compressor for which we don't yet
|
||||||
|
// have an implementation.
|
||||||
|
_ => {
|
||||||
|
let msg = "unsupported compressor".to_string();
|
||||||
|
return Err(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(compressed)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use brotli::Decompressor as BrotliDecompressor;
|
||||||
|
use futures_lite::future::block_on;
|
||||||
|
use lipsum::lipsum;
|
||||||
|
use std::{boxed::Box, io};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_determine_compressor() {
|
||||||
|
// Single compressor given.
|
||||||
|
assert_eq!(determine_compressor("unsupported"), None);
|
||||||
|
assert_eq!(determine_compressor("gzip"), Some(CompressionType::Gzip));
|
||||||
|
assert_eq!(determine_compressor("*"), Some(DEFAULT_COMPRESSOR));
|
||||||
|
|
||||||
|
// Multiple compressors.
|
||||||
|
assert_eq!(determine_compressor("gzip, br"), Some(CompressionType::Brotli));
|
||||||
|
assert_eq!(determine_compressor("gzip;q=0.8, br;q=0.3"), Some(CompressionType::Gzip));
|
||||||
|
assert_eq!(determine_compressor("br, gzip"), Some(CompressionType::Brotli));
|
||||||
|
assert_eq!(determine_compressor("br;q=0.3, gzip;q=0.4"), Some(CompressionType::Gzip));
|
||||||
|
|
||||||
|
// Invalid q-values.
|
||||||
|
assert_eq!(determine_compressor("gzip;q=NAN"), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_compress_response() {
|
||||||
|
// This macro generates an Accept-Encoding header value given any number of
|
||||||
|
// compressors.
|
||||||
|
macro_rules! ae_gen {
|
||||||
|
($x:expr) => {
|
||||||
|
$x.to_string().as_str()
|
||||||
|
};
|
||||||
|
|
||||||
|
($x:expr, $($y:expr),+) => {
|
||||||
|
format!("{}, {}", $x.to_string(), ae_gen!($($y),+)).as_str()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
for accept_encoding in [
|
||||||
|
"*",
|
||||||
|
ae_gen!(CompressionType::Gzip),
|
||||||
|
ae_gen!(CompressionType::Brotli, CompressionType::Gzip),
|
||||||
|
ae_gen!(CompressionType::Brotli),
|
||||||
|
] {
|
||||||
|
// Determine what the expected encoding should be based on both the
|
||||||
|
// specific encodings we accept.
|
||||||
|
let expected_encoding: CompressionType = match determine_compressor(accept_encoding) {
|
||||||
|
Some(s) => s,
|
||||||
|
None => panic!("determine_compressor(accept_encoding) => None"),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build headers with our Accept-Encoding.
|
||||||
|
let mut req_headers = HeaderMap::new();
|
||||||
|
req_headers.insert(header::ACCEPT_ENCODING, header::HeaderValue::from_str(accept_encoding).unwrap());
|
||||||
|
|
||||||
|
// Build test response.
|
||||||
|
let lorem_ipsum: String = lipsum(10000);
|
||||||
|
let expected_lorem_ipsum = Vec::<u8>::from(lorem_ipsum.as_str());
|
||||||
|
let mut res = Response::builder()
|
||||||
|
.status(200)
|
||||||
|
.header(header::CONTENT_TYPE, "text/plain")
|
||||||
|
.body(Body::from(lorem_ipsum))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Perform the compression.
|
||||||
|
if let Err(e) = block_on(compress_response(req_headers, &mut res)) {
|
||||||
|
panic!("compress_response(req_headers, &mut res) => Err(\"{}\")", e);
|
||||||
|
};
|
||||||
|
|
||||||
|
// If the content was compressed, we expect the Content-Encoding
|
||||||
|
// header to be modified.
|
||||||
|
assert_eq!(
|
||||||
|
res
|
||||||
|
.headers()
|
||||||
|
.get(header::CONTENT_ENCODING)
|
||||||
|
.unwrap_or_else(|| panic!("missing content-encoding header"))
|
||||||
|
.to_str()
|
||||||
|
.unwrap_or_else(|_| panic!("failed to convert Content-Encoding header::HeaderValue to String")),
|
||||||
|
expected_encoding.to_string()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Decompress body and make sure it's equal to what we started
|
||||||
|
// with.
|
||||||
|
//
|
||||||
|
// In the case of no compression, just make sure the "new" body in
|
||||||
|
// the Response is the same as what with which we start.
|
||||||
|
let body_vec = match block_on(body::to_bytes(res.body_mut())) {
|
||||||
|
Ok(b) => b.to_vec(),
|
||||||
|
Err(e) => panic!("{}", e),
|
||||||
|
};
|
||||||
|
|
||||||
|
if expected_encoding == CompressionType::Passthrough {
|
||||||
|
assert!(body_vec.eq(&expected_lorem_ipsum));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This provides an io::Read for the underlying body.
|
||||||
|
let mut body_cursor: io::Cursor<Vec<u8>> = io::Cursor::new(body_vec);
|
||||||
|
|
||||||
|
// Match the appropriate decompresor for the given
|
||||||
|
// expected_encoding.
|
||||||
|
let mut decoder: Box<dyn io::Read> = match expected_encoding {
|
||||||
|
CompressionType::Gzip => match gzip::Decoder::new(&mut body_cursor) {
|
||||||
|
Ok(dgz) => Box::new(dgz),
|
||||||
|
Err(e) => panic!("{}", e),
|
||||||
|
},
|
||||||
|
|
||||||
|
CompressionType::Brotli => Box::new(BrotliDecompressor::new(body_cursor, expected_lorem_ipsum.len())),
|
||||||
|
|
||||||
|
_ => panic!("no decompressor for {}", expected_encoding.to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut decompressed = Vec::<u8>::new();
|
||||||
|
match io::copy(&mut decoder, &mut decompressed) {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => panic!("{}", e),
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(decompressed.eq(&expected_lorem_ipsum));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -14,13 +14,34 @@ use time::{Duration, OffsetDateTime};
|
|||||||
#[template(path = "settings.html")]
|
#[template(path = "settings.html")]
|
||||||
struct SettingsTemplate {
|
struct SettingsTemplate {
|
||||||
prefs: Preferences,
|
prefs: Preferences,
|
||||||
|
url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CONSTANTS
|
||||||
|
|
||||||
|
const PREFS: [&str; 11] = [
|
||||||
|
"theme",
|
||||||
|
"front_page",
|
||||||
|
"layout",
|
||||||
|
"wide",
|
||||||
|
"comment_sort",
|
||||||
|
"post_sort",
|
||||||
|
"show_nsfw",
|
||||||
|
"blur_nsfw",
|
||||||
|
"use_hls",
|
||||||
|
"hide_hls_notification",
|
||||||
|
"autoplay_videos",
|
||||||
|
];
|
||||||
|
|
||||||
// FUNCTIONS
|
// FUNCTIONS
|
||||||
|
|
||||||
// Retrieve cookies from request "Cookie" header
|
// Retrieve cookies from request "Cookie" header
|
||||||
pub async fn get(req: Request<Body>) -> Result<Response<Body>, String> {
|
pub async fn get(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
template(SettingsTemplate { prefs: Preferences::new(req) })
|
let url = req.uri().to_string();
|
||||||
|
template(SettingsTemplate {
|
||||||
|
prefs: Preferences::new(req),
|
||||||
|
url,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set cookies using response "Set-Cookie" header
|
// Set cookies using response "Set-Cookie" header
|
||||||
@ -29,12 +50,12 @@ pub async fn set(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
let (parts, mut body) = req.into_parts();
|
let (parts, mut body) = req.into_parts();
|
||||||
|
|
||||||
// Grab existing cookies
|
// Grab existing cookies
|
||||||
let mut cookies = Vec::new();
|
let _cookies: Vec<Cookie> = parts
|
||||||
for header in parts.headers.get_all("Cookie") {
|
.headers
|
||||||
if let Ok(cookie) = Cookie::parse(header.to_str().unwrap_or_default()) {
|
.get_all("Cookie")
|
||||||
cookies.push(cookie);
|
.iter()
|
||||||
}
|
.filter_map(|header| Cookie::parse(header.to_str().unwrap_or_default()).ok())
|
||||||
}
|
.collect();
|
||||||
|
|
||||||
// Aggregate the body...
|
// Aggregate the body...
|
||||||
// let whole_body = hyper::body::aggregate(req).await.map_err(|e| e.to_string())?;
|
// let whole_body = hyper::body::aggregate(req).await.map_err(|e| e.to_string())?;
|
||||||
@ -48,64 +69,72 @@ pub async fn set(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
|
|
||||||
let form = url::form_urlencoded::parse(&body_bytes).collect::<HashMap<_, _>>();
|
let form = url::form_urlencoded::parse(&body_bytes).collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
let mut res = redirect("/settings".to_string());
|
let mut response = redirect("/settings".to_string());
|
||||||
|
|
||||||
let names = vec!["theme", "front_page", "layout", "wide", "comment_sort", "post_sort", "show_nsfw"];
|
for &name in &PREFS {
|
||||||
|
|
||||||
for name in names {
|
|
||||||
match form.get(name) {
|
match form.get(name) {
|
||||||
Some(value) => res.insert_cookie(
|
Some(value) => response.insert_cookie(
|
||||||
Cookie::build(name.to_owned(), value.to_owned())
|
Cookie::build(name.to_owned(), value.clone())
|
||||||
.path("/")
|
.path("/")
|
||||||
.http_only(true)
|
.http_only(true)
|
||||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||||
.finish(),
|
.finish(),
|
||||||
),
|
),
|
||||||
None => res.remove_cookie(name.to_string()),
|
None => response.remove_cookie(name.to_string()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(res)
|
Ok(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set cookies using response "Set-Cookie" header
|
fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body> {
|
||||||
pub async fn restore(req: Request<Body>) -> Result<Response<Body>, String> {
|
|
||||||
// Split the body into parts
|
// Split the body into parts
|
||||||
let (parts, _) = req.into_parts();
|
let (parts, _) = req.into_parts();
|
||||||
|
|
||||||
// Grab existing cookies
|
// Grab existing cookies
|
||||||
let mut cookies = Vec::new();
|
let _cookies: Vec<Cookie> = parts
|
||||||
for header in parts.headers.get_all("Cookie") {
|
.headers
|
||||||
if let Ok(cookie) = Cookie::parse(header.to_str().unwrap_or_default()) {
|
.get_all("Cookie")
|
||||||
cookies.push(cookie);
|
.iter()
|
||||||
}
|
.filter_map(|header| Cookie::parse(header.to_str().unwrap_or_default()).ok())
|
||||||
}
|
.collect();
|
||||||
|
|
||||||
let query = parts.uri.query().unwrap_or_default().as_bytes();
|
let query = parts.uri.query().unwrap_or_default().as_bytes();
|
||||||
|
|
||||||
let form = url::form_urlencoded::parse(query).collect::<HashMap<_, _>>();
|
let form = url::form_urlencoded::parse(query).collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
let names = vec!["theme", "front_page", "layout", "wide", "comment_sort", "post_sort", "show_nsfw", "subscriptions"];
|
|
||||||
|
|
||||||
let path = match form.get("redirect") {
|
let path = match form.get("redirect") {
|
||||||
Some(value) => format!("/{}/", value),
|
Some(value) => format!("/{}", value.replace("%26", "&").replace("%23", "#")),
|
||||||
None => "/".to_string(),
|
None => "/".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut res = redirect(path);
|
let mut response = redirect(path);
|
||||||
|
|
||||||
for name in names {
|
for name in [PREFS.to_vec(), vec!["subscriptions", "filters"]].concat() {
|
||||||
match form.get(name) {
|
match form.get(name) {
|
||||||
Some(value) => res.insert_cookie(
|
Some(value) => response.insert_cookie(
|
||||||
Cookie::build(name.to_owned(), value.to_owned())
|
Cookie::build(name.to_owned(), value.clone())
|
||||||
.path("/")
|
.path("/")
|
||||||
.http_only(true)
|
.http_only(true)
|
||||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||||
.finish(),
|
.finish(),
|
||||||
),
|
),
|
||||||
None => res.remove_cookie(name.to_string()),
|
None => {
|
||||||
|
if remove_cookies {
|
||||||
|
response.remove_cookie(name.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(res)
|
response
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set cookies using response "Set-Cookie" header
|
||||||
|
pub async fn restore(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
|
Ok(set_cookies_method(req, true))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
|
Ok(set_cookies_method(req, false))
|
||||||
}
|
}
|
||||||
|
422
src/subreddit.rs
422
src/subreddit.rs
@ -1,6 +1,7 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
use crate::esc;
|
use crate::utils::{
|
||||||
use crate::utils::{cookie, error, format_num, format_url, param, redirect, rewrite_urls, template, val, Post, Preferences, Subreddit};
|
catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit,
|
||||||
|
};
|
||||||
use crate::{client::json, server::ResponseExt, RequestExt};
|
use crate::{client::json, server::ResponseExt, RequestExt};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use cookie::Cookie;
|
use cookie::Cookie;
|
||||||
@ -9,125 +10,266 @@ use time::{Duration, OffsetDateTime};
|
|||||||
|
|
||||||
// STRUCTS
|
// STRUCTS
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(path = "subreddit.html", escape = "none")]
|
#[template(path = "subreddit.html")]
|
||||||
struct SubredditTemplate {
|
struct SubredditTemplate {
|
||||||
sub: Subreddit,
|
sub: Subreddit,
|
||||||
posts: Vec<Post>,
|
posts: Vec<Post>,
|
||||||
sort: (String, String),
|
sort: (String, String),
|
||||||
ends: (String, String),
|
ends: (String, String),
|
||||||
prefs: Preferences,
|
prefs: Preferences,
|
||||||
|
url: String,
|
||||||
|
redirect_url: String,
|
||||||
|
/// Whether the subreddit itself is filtered.
|
||||||
|
is_filtered: bool,
|
||||||
|
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||||
|
/// and all fetched posts being filtered).
|
||||||
|
all_posts_filtered: bool,
|
||||||
|
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||||
|
all_posts_hidden_nsfw: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(path = "wiki.html", escape = "none")]
|
#[template(path = "wiki.html")]
|
||||||
struct WikiTemplate {
|
struct WikiTemplate {
|
||||||
sub: String,
|
sub: String,
|
||||||
wiki: String,
|
wiki: String,
|
||||||
page: String,
|
page: String,
|
||||||
prefs: Preferences,
|
prefs: Preferences,
|
||||||
|
url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "wall.html")]
|
||||||
|
struct WallTemplate {
|
||||||
|
title: String,
|
||||||
|
sub: String,
|
||||||
|
msg: String,
|
||||||
|
prefs: Preferences,
|
||||||
|
url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
// SERVICES
|
// SERVICES
|
||||||
pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
// Build Reddit API path
|
// Build Reddit API path
|
||||||
let subscribed = cookie(&req, "subscriptions");
|
let root = req.uri().path() == "/";
|
||||||
let front_page = cookie(&req, "front_page");
|
let subscribed = setting(&req, "subscriptions");
|
||||||
|
let front_page = setting(&req, "front_page");
|
||||||
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
|
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
|
||||||
let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort));
|
let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort));
|
||||||
|
|
||||||
let sub = req.param("sub").map_or(
|
let sub_name = req.param("sub").unwrap_or(if front_page == "default" || front_page.is_empty() {
|
||||||
if front_page == "default" || front_page.is_empty() {
|
if subscribed.is_empty() {
|
||||||
if subscribed.is_empty() {
|
"popular".to_string()
|
||||||
"popular".to_string()
|
|
||||||
} else {
|
|
||||||
subscribed.to_owned()
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
front_page.to_owned()
|
subscribed.clone()
|
||||||
},
|
}
|
||||||
String::from,
|
} else {
|
||||||
);
|
front_page.clone()
|
||||||
|
});
|
||||||
let path = format!("/r/{}/{}.json?{}&raw_json=1", sub, sort, req.uri().query().unwrap_or_default());
|
let quarantined = can_access_quarantine(&req, &sub_name) || root;
|
||||||
|
|
||||||
match Post::fetch(&path, String::new()).await {
|
// Handle random subreddits
|
||||||
Ok((posts, after)) => {
|
if let Ok(random) = catch_random(&sub_name, "").await {
|
||||||
// If you can get subreddit posts, also request subreddit metadata
|
return Ok(random);
|
||||||
let sub = if !sub.contains('+') && sub != subscribed && sub != "popular" && sub != "all" {
|
}
|
||||||
// Regular subreddit
|
|
||||||
subreddit(&sub).await.unwrap_or_default()
|
if req.param("sub").is_some() && sub_name.starts_with("u_") {
|
||||||
} else if sub == subscribed {
|
return Ok(redirect(["/user/", &sub_name[2..]].concat()));
|
||||||
// Subscription feed
|
}
|
||||||
if req.uri().path().starts_with("/r/") {
|
|
||||||
subreddit(&sub).await.unwrap_or_default()
|
// Request subreddit metadata
|
||||||
} else {
|
let sub = if !sub_name.contains('+') && sub_name != subscribed && sub_name != "popular" && sub_name != "all" {
|
||||||
Subreddit::default()
|
// Regular subreddit
|
||||||
}
|
subreddit(&sub_name, quarantined).await.unwrap_or_default()
|
||||||
} else if sub.contains('+') {
|
} else if sub_name == subscribed {
|
||||||
// Multireddit
|
// Subscription feed
|
||||||
Subreddit {
|
if req.uri().path().starts_with("/r/") {
|
||||||
name: sub,
|
subreddit(&sub_name, quarantined).await.unwrap_or_default()
|
||||||
..Subreddit::default()
|
} else {
|
||||||
}
|
Subreddit::default()
|
||||||
} else {
|
}
|
||||||
Subreddit::default()
|
} else {
|
||||||
};
|
// Multireddit, all, popular
|
||||||
|
Subreddit {
|
||||||
template(SubredditTemplate {
|
name: sub_name.clone(),
|
||||||
sub,
|
..Subreddit::default()
|
||||||
posts,
|
}
|
||||||
sort: (sort, param(&path, "t")),
|
};
|
||||||
ends: (param(&path, "after"), after),
|
|
||||||
prefs: Preferences::new(req),
|
let path = format!("/r/{}/{}.json?{}&raw_json=1", sub_name.clone(), sort, req.uri().query().unwrap_or_default());
|
||||||
})
|
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||||
|
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B");
|
||||||
|
let filters = get_filters(&req);
|
||||||
|
|
||||||
|
// If all requested subs are filtered, we don't need to fetch posts.
|
||||||
|
if sub_name.split('+').all(|s| filters.contains(s)) {
|
||||||
|
template(SubredditTemplate {
|
||||||
|
sub,
|
||||||
|
posts: Vec::new(),
|
||||||
|
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||||
|
ends: (param(&path, "after").unwrap_or_default(), "".to_string()),
|
||||||
|
prefs: Preferences::new(req),
|
||||||
|
url,
|
||||||
|
redirect_url,
|
||||||
|
is_filtered: true,
|
||||||
|
all_posts_filtered: false,
|
||||||
|
all_posts_hidden_nsfw: false,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
match Post::fetch(&path, quarantined).await {
|
||||||
|
Ok((mut posts, after)) => {
|
||||||
|
let all_posts_filtered = filter_posts(&mut posts, &filters);
|
||||||
|
let all_posts_hidden_nsfw = posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on";
|
||||||
|
template(SubredditTemplate {
|
||||||
|
sub,
|
||||||
|
posts,
|
||||||
|
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||||
|
ends: (param(&path, "after").unwrap_or_default(), after),
|
||||||
|
prefs: Preferences::new(req),
|
||||||
|
url,
|
||||||
|
redirect_url,
|
||||||
|
is_filtered: false,
|
||||||
|
all_posts_filtered,
|
||||||
|
all_posts_hidden_nsfw,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Err(msg) => match msg.as_str() {
|
||||||
|
"quarantined" => quarantine(req, sub_name),
|
||||||
|
"private" => error(req, format!("r/{} is a private community", sub_name)).await,
|
||||||
|
"banned" => error(req, format!("r/{} has been banned from Reddit", sub_name)).await,
|
||||||
|
_ => error(req, msg).await,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
Err(msg) => match msg.as_str() {
|
|
||||||
"quarantined" => error(req, format!("r/{} has been quarantined by Reddit", sub)).await,
|
|
||||||
"private" => error(req, format!("r/{} is a private community", sub)).await,
|
|
||||||
"banned" => error(req, format!("r/{} has been banned from Reddit", sub)).await,
|
|
||||||
_ => error(req, msg).await,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sub or unsub by setting subscription cookie using response "Set-Cookie" header
|
pub fn quarantine(req: Request<Body>, sub: String) -> Result<Response<Body>, String> {
|
||||||
pub async fn subscriptions(req: Request<Body>) -> Result<Response<Body>, String> {
|
let wall = WallTemplate {
|
||||||
|
title: format!("r/{} is quarantined", sub),
|
||||||
|
msg: "Please click the button below to continue to this subreddit.".to_string(),
|
||||||
|
url: req.uri().to_string(),
|
||||||
|
sub,
|
||||||
|
prefs: Preferences::new(req),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(
|
||||||
|
Response::builder()
|
||||||
|
.status(403)
|
||||||
|
.header("content-type", "text/html")
|
||||||
|
.body(wall.render().unwrap_or_default().into())
|
||||||
|
.unwrap_or_default(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn add_quarantine_exception(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
|
let subreddit = req.param("sub").ok_or("Invalid URL")?;
|
||||||
|
let redir = param(&format!("?{}", req.uri().query().unwrap_or_default()), "redir").ok_or("Invalid URL")?;
|
||||||
|
let mut response = redirect(redir);
|
||||||
|
response.insert_cookie(
|
||||||
|
Cookie::build(&format!("allow_quaran_{}", subreddit.to_lowercase()), "true")
|
||||||
|
.path("/")
|
||||||
|
.http_only(true)
|
||||||
|
.expires(cookie::Expiration::Session)
|
||||||
|
.finish(),
|
||||||
|
);
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn can_access_quarantine(req: &Request<Body>, sub: &str) -> bool {
|
||||||
|
// Determine if the subreddit can be accessed
|
||||||
|
setting(req, &format!("allow_quaran_{}", sub.to_lowercase())).parse().unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sub, filter, unfilter, or unsub by setting subscription cookie using response "Set-Cookie" header
|
||||||
|
pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
let sub = req.param("sub").unwrap_or_default();
|
let sub = req.param("sub").unwrap_or_default();
|
||||||
let query = req.uri().query().unwrap_or_default().to_string();
|
|
||||||
let action: Vec<String> = req.uri().path().split('/').map(String::from).collect();
|
let action: Vec<String> = req.uri().path().split('/').map(String::from).collect();
|
||||||
|
|
||||||
let mut sub_list = Preferences::new(req).subscriptions;
|
// Handle random subreddits
|
||||||
|
if sub == "random" || sub == "randnsfw" {
|
||||||
|
if action.contains(&"filter".to_string()) || action.contains(&"unfilter".to_string()) {
|
||||||
|
return Err("Can't filter random subreddit!".to_string());
|
||||||
|
} else {
|
||||||
|
return Err("Can't subscribe to random subreddit!".to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let query = req.uri().query().unwrap_or_default().to_string();
|
||||||
|
|
||||||
|
let preferences = Preferences::new(req);
|
||||||
|
let mut sub_list = preferences.subscriptions;
|
||||||
|
let mut filters = preferences.filters;
|
||||||
|
|
||||||
|
// Retrieve list of posts for these subreddits to extract display names
|
||||||
|
let posts = json(format!("/r/{}/hot.json?raw_json=1", sub), true).await?;
|
||||||
|
let display_lookup: Vec<(String, &str)> = posts["data"]["children"]
|
||||||
|
.as_array()
|
||||||
|
.map(|list| {
|
||||||
|
list
|
||||||
|
.iter()
|
||||||
|
.map(|post| {
|
||||||
|
let display_name = post["data"]["subreddit"].as_str().unwrap_or_default();
|
||||||
|
(display_name.to_lowercase(), display_name)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
// Find each subreddit name (separated by '+') in sub parameter
|
// Find each subreddit name (separated by '+') in sub parameter
|
||||||
for part in sub.split('+') {
|
for part in sub.split('+').filter(|x| x != &"") {
|
||||||
|
// Retrieve display name for the subreddit
|
||||||
|
let display;
|
||||||
|
let part = if part.starts_with("u_") {
|
||||||
|
part
|
||||||
|
} else if let Some(&(_, display)) = display_lookup.iter().find(|x| x.0 == part.to_lowercase()) {
|
||||||
|
// This is already known, doesn't require separate request
|
||||||
|
display
|
||||||
|
} else {
|
||||||
|
// This subreddit display name isn't known, retrieve it
|
||||||
|
let path: String = format!("/r/{}/about.json?raw_json=1", part);
|
||||||
|
display = json(path, true).await?;
|
||||||
|
display["data"]["display_name"].as_str().ok_or_else(|| "Failed to query subreddit name".to_string())?
|
||||||
|
};
|
||||||
|
|
||||||
// Modify sub list based on action
|
// Modify sub list based on action
|
||||||
if action.contains(&"subscribe".to_string()) && !sub_list.contains(&part.to_owned()) {
|
if action.contains(&"subscribe".to_string()) && !sub_list.contains(&part.to_owned()) {
|
||||||
// Add each sub name to the subscribed list
|
// Add each sub name to the subscribed list
|
||||||
sub_list.push(part.to_owned());
|
sub_list.push(part.to_owned());
|
||||||
// Reorder sub names alphabettically
|
filters.retain(|s| s.to_lowercase() != part.to_lowercase());
|
||||||
sub_list.sort_by_key(|a| a.to_lowercase())
|
// Reorder sub names alphabetically
|
||||||
|
sub_list.sort_by_key(|a| a.to_lowercase());
|
||||||
|
filters.sort_by_key(|a| a.to_lowercase());
|
||||||
} else if action.contains(&"unsubscribe".to_string()) {
|
} else if action.contains(&"unsubscribe".to_string()) {
|
||||||
// Remove sub name from subscribed list
|
// Remove sub name from subscribed list
|
||||||
sub_list.retain(|s| s != part);
|
sub_list.retain(|s| s.to_lowercase() != part.to_lowercase());
|
||||||
|
} else if action.contains(&"filter".to_string()) && !filters.contains(&part.to_owned()) {
|
||||||
|
// Add each sub name to the filtered list
|
||||||
|
filters.push(part.to_owned());
|
||||||
|
sub_list.retain(|s| s.to_lowercase() != part.to_lowercase());
|
||||||
|
// Reorder sub names alphabetically
|
||||||
|
filters.sort_by_key(|a| a.to_lowercase());
|
||||||
|
sub_list.sort_by_key(|a| a.to_lowercase());
|
||||||
|
} else if action.contains(&"unfilter".to_string()) {
|
||||||
|
// Remove sub name from filtered list
|
||||||
|
filters.retain(|s| s.to_lowercase() != part.to_lowercase());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Redirect back to subreddit
|
// Redirect back to subreddit
|
||||||
// check for redirect parameter if unsubscribing from outside sidebar
|
// check for redirect parameter if unsubscribing/unfiltering from outside sidebar
|
||||||
let redirect_path = param(&format!("/?{}", query), "redirect");
|
let path = if let Some(redirect_path) = param(&format!("?{}", query), "redirect") {
|
||||||
let path = if redirect_path.is_empty() {
|
format!("/{}", redirect_path)
|
||||||
format!("/r/{}", sub)
|
|
||||||
} else {
|
} else {
|
||||||
format!("/{}/", redirect_path)
|
format!("/r/{}", sub)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut res = redirect(path);
|
let mut response = redirect(path);
|
||||||
|
|
||||||
// Delete cookie if empty, else set
|
// Delete cookie if empty, else set
|
||||||
if sub_list.is_empty() {
|
if sub_list.is_empty() {
|
||||||
res.remove_cookie("subscriptions".to_string());
|
response.remove_cookie("subscriptions".to_string());
|
||||||
} else {
|
} else {
|
||||||
res.insert_cookie(
|
response.insert_cookie(
|
||||||
Cookie::build("subscriptions", sub_list.join("+"))
|
Cookie::build("subscriptions", sub_list.join("+"))
|
||||||
.path("/")
|
.path("/")
|
||||||
.http_only(true)
|
.http_only(true)
|
||||||
@ -135,76 +277,148 @@ pub async fn subscriptions(req: Request<Body>) -> Result<Response<Body>, String>
|
|||||||
.finish(),
|
.finish(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
if filters.is_empty() {
|
||||||
|
response.remove_cookie("filters".to_string());
|
||||||
|
} else {
|
||||||
|
response.insert_cookie(
|
||||||
|
Cookie::build("filters", filters.join("+"))
|
||||||
|
.path("/")
|
||||||
|
.http_only(true)
|
||||||
|
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||||
|
.finish(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(res)
|
Ok(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn wiki(req: Request<Body>) -> Result<Response<Body>, String> {
|
pub async fn wiki(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
let sub = req.param("sub").unwrap_or_else(|| "reddit.com".to_string());
|
let sub = req.param("sub").unwrap_or_else(|| "reddit.com".to_string());
|
||||||
|
let quarantined = can_access_quarantine(&req, &sub);
|
||||||
|
// Handle random subreddits
|
||||||
|
if let Ok(random) = catch_random(&sub, "/wiki").await {
|
||||||
|
return Ok(random);
|
||||||
|
}
|
||||||
|
|
||||||
let page = req.param("page").unwrap_or_else(|| "index".to_string());
|
let page = req.param("page").unwrap_or_else(|| "index".to_string());
|
||||||
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
|
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
|
||||||
|
let url = req.uri().to_string();
|
||||||
|
|
||||||
match json(path).await {
|
match json(path, quarantined).await {
|
||||||
Ok(response) => template(WikiTemplate {
|
Ok(response) => template(WikiTemplate {
|
||||||
sub,
|
sub,
|
||||||
wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or_default()),
|
wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or("<h3>Wiki not found</h3>")),
|
||||||
page,
|
page,
|
||||||
prefs: Preferences::new(req),
|
prefs: Preferences::new(req),
|
||||||
|
url,
|
||||||
}),
|
}),
|
||||||
Err(msg) => error(req, msg).await,
|
Err(msg) => {
|
||||||
|
if msg == "quarantined" {
|
||||||
|
quarantine(req, sub)
|
||||||
|
} else {
|
||||||
|
error(req, msg).await
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn sidebar(req: Request<Body>) -> Result<Response<Body>, String> {
|
pub async fn sidebar(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
let sub = req.param("sub").unwrap_or_else(|| "reddit.com".to_string());
|
let sub = req.param("sub").unwrap_or_else(|| "reddit.com".to_string());
|
||||||
|
let quarantined = can_access_quarantine(&req, &sub);
|
||||||
|
|
||||||
|
// Handle random subreddits
|
||||||
|
if let Ok(random) = catch_random(&sub, "/about/sidebar").await {
|
||||||
|
return Ok(random);
|
||||||
|
}
|
||||||
|
|
||||||
// Build the Reddit JSON API url
|
// Build the Reddit JSON API url
|
||||||
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
||||||
|
let url = req.uri().to_string();
|
||||||
|
|
||||||
// Send a request to the url
|
// Send a request to the url
|
||||||
match json(path).await {
|
match json(path, quarantined).await {
|
||||||
// If success, receive JSON in response
|
// If success, receive JSON in response
|
||||||
Ok(response) => template(WikiTemplate {
|
Ok(response) => template(WikiTemplate {
|
||||||
|
wiki: rewrite_urls(&val(&response, "description_html")),
|
||||||
|
// wiki: format!(
|
||||||
|
// "{}<hr><h1>Moderators</h1><br><ul>{}</ul>",
|
||||||
|
// rewrite_urls(&val(&response, "description_html"),
|
||||||
|
// moderators(&sub, quarantined).await.unwrap_or(vec!["Could not fetch moderators".to_string()]).join(""),
|
||||||
|
// ),
|
||||||
sub,
|
sub,
|
||||||
wiki: rewrite_urls(&val(&response, "description_html").replace("\\", "")),
|
|
||||||
page: "Sidebar".to_string(),
|
page: "Sidebar".to_string(),
|
||||||
prefs: Preferences::new(req),
|
prefs: Preferences::new(req),
|
||||||
|
url,
|
||||||
}),
|
}),
|
||||||
Err(msg) => error(req, msg).await,
|
Err(msg) => {
|
||||||
|
if msg == "quarantined" {
|
||||||
|
quarantine(req, sub)
|
||||||
|
} else {
|
||||||
|
error(req, msg).await
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// pub async fn moderators(sub: &str, quarantined: bool) -> Result<Vec<String>, String> {
|
||||||
|
// // Retrieve and format the html for the moderators list
|
||||||
|
// Ok(
|
||||||
|
// moderators_list(sub, quarantined)
|
||||||
|
// .await?
|
||||||
|
// .iter()
|
||||||
|
// .map(|m| format!("<li><a style=\"color: var(--accent)\" href=\"/u/{name}\">{name}</a></li>", name = m))
|
||||||
|
// .collect(),
|
||||||
|
// )
|
||||||
|
// }
|
||||||
|
|
||||||
|
// async fn moderators_list(sub: &str, quarantined: bool) -> Result<Vec<String>, String> {
|
||||||
|
// // Build the moderator list URL
|
||||||
|
// let path: String = format!("/r/{}/about/moderators.json?raw_json=1", sub);
|
||||||
|
|
||||||
|
// // Retrieve response
|
||||||
|
// json(path, quarantined).await.map(|response| {
|
||||||
|
// // Traverse json tree and format into list of strings
|
||||||
|
// response["data"]["children"]
|
||||||
|
// .as_array()
|
||||||
|
// .unwrap_or(&Vec::new())
|
||||||
|
// .iter()
|
||||||
|
// .filter_map(|moderator| {
|
||||||
|
// let name = moderator["name"].as_str().unwrap_or_default();
|
||||||
|
// if name.is_empty() {
|
||||||
|
// None
|
||||||
|
// } else {
|
||||||
|
// Some(name.to_string())
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
// .collect::<Vec<_>>()
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
|
||||||
// SUBREDDIT
|
// SUBREDDIT
|
||||||
async fn subreddit(sub: &str) -> Result<Subreddit, String> {
|
async fn subreddit(sub: &str, quarantined: bool) -> Result<Subreddit, String> {
|
||||||
// Build the Reddit JSON API url
|
// Build the Reddit JSON API url
|
||||||
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
||||||
|
|
||||||
// Send a request to the url
|
// Send a request to the url
|
||||||
match json(path).await {
|
let res = json(path, quarantined).await?;
|
||||||
// If success, receive JSON in response
|
|
||||||
Ok(res) => {
|
|
||||||
// Metadata regarding the subreddit
|
|
||||||
let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
|
|
||||||
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
|
|
||||||
|
|
||||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
// Metadata regarding the subreddit
|
||||||
let community_icon: &str = res["data"]["community_icon"].as_str().map_or("", |s| s.split('?').collect::<Vec<&str>>()[0]);
|
let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
|
||||||
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
|
||||||
|
|
||||||
let sub = Subreddit {
|
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||||
name: esc!(&res, "display_name"),
|
let community_icon: &str = res["data"]["community_icon"].as_str().unwrap_or_default();
|
||||||
title: esc!(&res, "title"),
|
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
||||||
description: esc!(&res, "public_description"),
|
|
||||||
info: rewrite_urls(&val(&res, "description_html").replace("\\", "")),
|
|
||||||
icon: format_url(&icon),
|
|
||||||
members: format_num(members),
|
|
||||||
active: format_num(active),
|
|
||||||
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(sub)
|
Ok(Subreddit {
|
||||||
}
|
name: val(&res, "display_name"),
|
||||||
// If the Reddit API returns an error, exit this function
|
title: val(&res, "title"),
|
||||||
Err(msg) => return Err(msg),
|
description: val(&res, "public_description"),
|
||||||
}
|
info: rewrite_urls(&val(&res, "description_html")),
|
||||||
|
// moderators: moderators_list(sub, quarantined).await.unwrap_or_default(),
|
||||||
|
icon: format_url(&icon),
|
||||||
|
members: format_num(members),
|
||||||
|
active: format_num(active),
|
||||||
|
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
120
src/user.rs
120
src/user.rs
@ -1,54 +1,90 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
use crate::client::json;
|
use crate::client::json;
|
||||||
use crate::esc;
|
|
||||||
use crate::server::RequestExt;
|
use crate::server::RequestExt;
|
||||||
use crate::utils::{error, format_url, param, template, Post, Preferences, User};
|
use crate::utils::{error, filter_posts, format_url, get_filters, param, setting, template, Post, Preferences, User};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use hyper::{Body, Request, Response};
|
use hyper::{Body, Request, Response};
|
||||||
use time::OffsetDateTime;
|
use time::{macros::format_description, OffsetDateTime};
|
||||||
|
|
||||||
// STRUCTS
|
// STRUCTS
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(path = "user.html", escape = "none")]
|
#[template(path = "user.html")]
|
||||||
struct UserTemplate {
|
struct UserTemplate {
|
||||||
user: User,
|
user: User,
|
||||||
posts: Vec<Post>,
|
posts: Vec<Post>,
|
||||||
sort: (String, String),
|
sort: (String, String),
|
||||||
ends: (String, String),
|
ends: (String, String),
|
||||||
|
/// "overview", "comments", or "submitted"
|
||||||
|
listing: String,
|
||||||
prefs: Preferences,
|
prefs: Preferences,
|
||||||
|
url: String,
|
||||||
|
redirect_url: String,
|
||||||
|
/// Whether the user themself is filtered.
|
||||||
|
is_filtered: bool,
|
||||||
|
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||||
|
/// and all fetched posts being filtered).
|
||||||
|
all_posts_filtered: bool,
|
||||||
|
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||||
|
all_posts_hidden_nsfw: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
// FUNCTIONS
|
// FUNCTIONS
|
||||||
pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
|
let listing = req.param("listing").unwrap_or_else(|| "overview".to_string());
|
||||||
|
|
||||||
// Build the Reddit JSON API path
|
// Build the Reddit JSON API path
|
||||||
let path = format!(
|
let path = format!(
|
||||||
"/user/{}.json?{}&raw_json=1",
|
"/user/{}/{}.json?{}&raw_json=1",
|
||||||
req.param("name").unwrap_or_else(|| "reddit".to_string()),
|
req.param("name").unwrap_or_else(|| "reddit".to_string()),
|
||||||
req.uri().query().unwrap_or_default()
|
listing,
|
||||||
|
req.uri().query().unwrap_or_default(),
|
||||||
);
|
);
|
||||||
|
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||||
|
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26");
|
||||||
|
|
||||||
// Retrieve other variables from Libreddit request
|
// Retrieve other variables from Libreddit request
|
||||||
let sort = param(&path, "sort");
|
let sort = param(&path, "sort").unwrap_or_default();
|
||||||
let username = req.param("name").unwrap_or_default();
|
let username = req.param("name").unwrap_or_default();
|
||||||
|
let user = user(&username).await.unwrap_or_default();
|
||||||
|
|
||||||
// Request user posts/comments from Reddit
|
let filters = get_filters(&req);
|
||||||
let posts = Post::fetch(&path, "Comment".to_string()).await;
|
if filters.contains(&["u_", &username].concat()) {
|
||||||
|
template(UserTemplate {
|
||||||
match posts {
|
user,
|
||||||
Ok((posts, after)) => {
|
posts: Vec::new(),
|
||||||
// If you can get user posts, also request user data
|
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||||
let user = user(&username).await.unwrap_or_default();
|
ends: (param(&path, "after").unwrap_or_default(), "".to_string()),
|
||||||
|
listing,
|
||||||
template(UserTemplate {
|
prefs: Preferences::new(req),
|
||||||
user,
|
url,
|
||||||
posts,
|
redirect_url,
|
||||||
sort: (sort, param(&path, "t")),
|
is_filtered: true,
|
||||||
ends: (param(&path, "after"), after),
|
all_posts_filtered: false,
|
||||||
prefs: Preferences::new(req),
|
all_posts_hidden_nsfw: false,
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
// Request user posts/comments from Reddit
|
||||||
|
match Post::fetch(&path, false).await {
|
||||||
|
Ok((mut posts, after)) => {
|
||||||
|
let all_posts_filtered = filter_posts(&mut posts, &filters);
|
||||||
|
let all_posts_hidden_nsfw = posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on";
|
||||||
|
template(UserTemplate {
|
||||||
|
user,
|
||||||
|
posts,
|
||||||
|
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||||
|
ends: (param(&path, "after").unwrap_or_default(), after),
|
||||||
|
listing,
|
||||||
|
prefs: Preferences::new(req),
|
||||||
|
url,
|
||||||
|
redirect_url,
|
||||||
|
is_filtered: false,
|
||||||
|
all_posts_filtered,
|
||||||
|
all_posts_hidden_nsfw,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// If there is an error show error page
|
||||||
|
Err(msg) => error(req, msg).await,
|
||||||
}
|
}
|
||||||
// If there is an error show error page
|
|
||||||
Err(msg) => error(req, msg).await,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,27 +94,23 @@ async fn user(name: &str) -> Result<User, String> {
|
|||||||
let path: String = format!("/user/{}/about.json?raw_json=1", name);
|
let path: String = format!("/user/{}/about.json?raw_json=1", name);
|
||||||
|
|
||||||
// Send a request to the url
|
// Send a request to the url
|
||||||
match json(path).await {
|
json(path, false).await.map(|res| {
|
||||||
// If success, receive JSON in response
|
// Grab creation date as unix timestamp
|
||||||
Ok(res) => {
|
let created_unix = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
|
||||||
// Grab creation date as unix timestamp
|
let created = OffsetDateTime::from_unix_timestamp(created_unix).unwrap_or(OffsetDateTime::UNIX_EPOCH);
|
||||||
let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
|
|
||||||
|
|
||||||
// Closure used to parse JSON from Reddit APIs
|
// Closure used to parse JSON from Reddit APIs
|
||||||
let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string();
|
let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string();
|
||||||
|
|
||||||
// Parse the JSON output into a User struct
|
// Parse the JSON output into a User struct
|
||||||
Ok(User {
|
User {
|
||||||
name: name.to_string(),
|
name: res["data"]["name"].as_str().unwrap_or(name).to_owned(),
|
||||||
title: esc!(about("title")),
|
title: about("title"),
|
||||||
icon: format_url(&about("icon_img")),
|
icon: format_url(&about("icon_img")),
|
||||||
karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
|
karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
|
||||||
created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"),
|
created: created.format(format_description!("[month repr:short] [day] '[year repr:last_two]")).unwrap_or_default(),
|
||||||
banner: esc!(about("banner_img")),
|
banner: about("banner_img"),
|
||||||
description: about("public_description"),
|
description: about("public_description"),
|
||||||
})
|
|
||||||
}
|
}
|
||||||
// If the Reddit API returns an error, exit this function
|
})
|
||||||
Err(msg) => return Err(msg),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
485
src/utils.rs
485
src/utils.rs
@ -1,16 +1,33 @@
|
|||||||
//
|
//
|
||||||
// CRATES
|
// CRATES
|
||||||
//
|
//
|
||||||
use crate::{client::json, esc, server::RequestExt};
|
use crate::{client::json, server::RequestExt};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use cookie::Cookie;
|
use cookie::Cookie;
|
||||||
use hyper::{Body, Request, Response};
|
use hyper::{Body, Request, Response};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use rust_embed::RustEmbed;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::collections::HashMap;
|
use std::collections::{HashMap, HashSet};
|
||||||
use time::{Duration, OffsetDateTime};
|
use std::str::FromStr;
|
||||||
|
use time::{macros::format_description, Duration, OffsetDateTime};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
/// Write a message to stderr on debug mode. This function is a no-op on
|
||||||
|
/// release code.
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! dbg_msg {
|
||||||
|
($x:expr) => {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
eprintln!("{}:{}: {}", file!(), line!(), $x.to_string())
|
||||||
|
};
|
||||||
|
|
||||||
|
($($x:expr),+) => {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
dbg_msg!(format!($($x),+))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// Post flair with content, background color and foreground color
|
// Post flair with content, background color and foreground color
|
||||||
pub struct Flair {
|
pub struct Flair {
|
||||||
pub flair_parts: Vec<FlairPart>,
|
pub flair_parts: Vec<FlairPart>,
|
||||||
@ -20,6 +37,7 @@ pub struct Flair {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Part of flair, either emoji or text
|
// Part of flair, either emoji or text
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct FlairPart {
|
pub struct FlairPart {
|
||||||
pub flair_part_type: String,
|
pub flair_part_type: String,
|
||||||
pub value: String,
|
pub value: String,
|
||||||
@ -73,8 +91,10 @@ pub struct Flags {
|
|||||||
pub stickied: bool,
|
pub stickied: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct Media {
|
pub struct Media {
|
||||||
pub url: String,
|
pub url: String,
|
||||||
|
pub alt_url: String,
|
||||||
pub width: i64,
|
pub width: i64,
|
||||||
pub height: i64,
|
pub height: i64,
|
||||||
pub poster: String,
|
pub poster: String,
|
||||||
@ -84,13 +104,30 @@ impl Media {
|
|||||||
pub async fn parse(data: &Value) -> (String, Self, Vec<GalleryMedia>) {
|
pub async fn parse(data: &Value) -> (String, Self, Vec<GalleryMedia>) {
|
||||||
let mut gallery = Vec::new();
|
let mut gallery = Vec::new();
|
||||||
|
|
||||||
|
// Define the various known places that Reddit might put video URLs.
|
||||||
|
let data_preview = &data["preview"]["reddit_video_preview"];
|
||||||
|
let secure_media = &data["secure_media"]["reddit_video"];
|
||||||
|
let crosspost_parent_media = &data["crosspost_parent_list"][0]["secure_media"]["reddit_video"];
|
||||||
|
|
||||||
// If post is a video, return the video
|
// If post is a video, return the video
|
||||||
let (post_type, url_val) = if data["preview"]["reddit_video_preview"]["fallback_url"].is_string() {
|
let (post_type, url_val, alt_url_val) = if data_preview["fallback_url"].is_string() {
|
||||||
// Return reddit video
|
(
|
||||||
("video", &data["preview"]["reddit_video_preview"]["fallback_url"])
|
if data_preview["is_gif"].as_bool().unwrap_or(false) { "gif" } else { "video" },
|
||||||
} else if data["secure_media"]["reddit_video"]["fallback_url"].is_string() {
|
&data_preview["fallback_url"],
|
||||||
// Return reddit video
|
Some(&data_preview["hls_url"]),
|
||||||
("video", &data["secure_media"]["reddit_video"]["fallback_url"])
|
)
|
||||||
|
} else if secure_media["fallback_url"].is_string() {
|
||||||
|
(
|
||||||
|
if secure_media["is_gif"].as_bool().unwrap_or(false) { "gif" } else { "video" },
|
||||||
|
&secure_media["fallback_url"],
|
||||||
|
Some(&secure_media["hls_url"]),
|
||||||
|
)
|
||||||
|
} else if crosspost_parent_media["fallback_url"].is_string() {
|
||||||
|
(
|
||||||
|
if crosspost_parent_media["is_gif"].as_bool().unwrap_or(false) { "gif" } else { "video" },
|
||||||
|
&crosspost_parent_media["fallback_url"],
|
||||||
|
Some(&crosspost_parent_media["hls_url"]),
|
||||||
|
)
|
||||||
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
||||||
// Handle images, whether GIFs or pics
|
// Handle images, whether GIFs or pics
|
||||||
let preview = &data["preview"]["images"][0];
|
let preview = &data["preview"]["images"][0];
|
||||||
@ -98,40 +135,37 @@ impl Media {
|
|||||||
|
|
||||||
if mp4.is_object() {
|
if mp4.is_object() {
|
||||||
// Return the mp4 if the media is a gif
|
// Return the mp4 if the media is a gif
|
||||||
("gif", &mp4["source"]["url"])
|
("gif", &mp4["source"]["url"], None)
|
||||||
} else {
|
} else {
|
||||||
// Return the picture if the media is an image
|
// Return the picture if the media is an image
|
||||||
if data["domain"] == "i.redd.it" {
|
if data["domain"] == "i.redd.it" {
|
||||||
("image", &data["url"])
|
("image", &data["url"], None)
|
||||||
} else {
|
} else {
|
||||||
("image", &preview["source"]["url"])
|
("image", &preview["source"]["url"], None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if data["is_self"].as_bool().unwrap_or_default() {
|
} else if data["is_self"].as_bool().unwrap_or_default() {
|
||||||
// If type is self, return permalink
|
// If type is self, return permalink
|
||||||
("self", &data["permalink"])
|
("self", &data["permalink"], None)
|
||||||
} else if data["is_gallery"].as_bool().unwrap_or_default() {
|
} else if data["is_gallery"].as_bool().unwrap_or_default() {
|
||||||
// If this post contains a gallery of images
|
// If this post contains a gallery of images
|
||||||
gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]);
|
gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]);
|
||||||
|
|
||||||
("gallery", &data["url"])
|
("gallery", &data["url"], None)
|
||||||
} else {
|
} else {
|
||||||
// If type can't be determined, return url
|
// If type can't be determined, return url
|
||||||
("link", &data["url"])
|
("link", &data["url"], None)
|
||||||
};
|
};
|
||||||
|
|
||||||
let source = &data["preview"]["images"][0]["source"];
|
let source = &data["preview"]["images"][0]["source"];
|
||||||
|
|
||||||
let url = if post_type == "self" || post_type == "link" {
|
let alt_url = alt_url_val.map_or(String::new(), |val| format_url(val.as_str().unwrap_or_default()));
|
||||||
url_val.as_str().unwrap_or_default().to_string()
|
|
||||||
} else {
|
|
||||||
format_url(url_val.as_str().unwrap_or_default())
|
|
||||||
};
|
|
||||||
|
|
||||||
(
|
(
|
||||||
post_type.to_string(),
|
post_type.to_string(),
|
||||||
Self {
|
Self {
|
||||||
url,
|
url: format_url(url_val.as_str().unwrap_or_default()),
|
||||||
|
alt_url,
|
||||||
width: source["width"].as_i64().unwrap_or_default(),
|
width: source["width"].as_i64().unwrap_or_default(),
|
||||||
height: source["height"].as_i64().unwrap_or_default(),
|
height: source["height"].as_i64().unwrap_or_default(),
|
||||||
poster: format_url(source["url"].as_str().unwrap_or_default()),
|
poster: format_url(source["url"].as_str().unwrap_or_default()),
|
||||||
@ -193,29 +227,25 @@ pub struct Post {
|
|||||||
pub created: String,
|
pub created: String,
|
||||||
pub comments: (String, String),
|
pub comments: (String, String),
|
||||||
pub gallery: Vec<GalleryMedia>,
|
pub gallery: Vec<GalleryMedia>,
|
||||||
|
pub awards: Awards,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Post {
|
impl Post {
|
||||||
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
|
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
|
||||||
pub async fn fetch(path: &str, fallback_title: String) -> Result<(Vec<Self>, String), String> {
|
pub async fn fetch(path: &str, quarantine: bool) -> Result<(Vec<Self>, String), String> {
|
||||||
let res;
|
|
||||||
let post_list;
|
|
||||||
|
|
||||||
// Send a request to the url
|
// Send a request to the url
|
||||||
match json(path.to_string()).await {
|
let res = match json(path.to_string(), quarantine).await {
|
||||||
// If success, receive JSON in response
|
// If success, receive JSON in response
|
||||||
Ok(response) => {
|
Ok(response) => response,
|
||||||
res = response;
|
|
||||||
}
|
|
||||||
// If the Reddit API returns an error, exit this function
|
// If the Reddit API returns an error, exit this function
|
||||||
Err(msg) => return Err(msg),
|
Err(msg) => return Err(msg),
|
||||||
}
|
};
|
||||||
|
|
||||||
// Fetch the list of posts from the JSON response
|
// Fetch the list of posts from the JSON response
|
||||||
match res["data"]["children"].as_array() {
|
let post_list = match res["data"]["children"].as_array() {
|
||||||
Some(list) => post_list = list,
|
Some(list) => list,
|
||||||
None => return Err("No posts found".to_string()),
|
None => return Err("No posts found".to_string()),
|
||||||
}
|
};
|
||||||
|
|
||||||
let mut posts: Vec<Self> = Vec::new();
|
let mut posts: Vec<Self> = Vec::new();
|
||||||
|
|
||||||
@ -226,16 +256,23 @@ impl Post {
|
|||||||
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
|
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
|
||||||
let score = data["score"].as_i64().unwrap_or_default();
|
let score = data["score"].as_i64().unwrap_or_default();
|
||||||
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||||
let title = esc!(post, "title");
|
let title = val(post, "title");
|
||||||
|
|
||||||
// Determine the type of media along with the media URL
|
// Determine the type of media along with the media URL
|
||||||
let (post_type, media, gallery) = Media::parse(&data).await;
|
let (post_type, media, gallery) = Media::parse(data).await;
|
||||||
|
let awards = Awards::parse(&data["all_awardings"]);
|
||||||
|
|
||||||
|
// selftext_html is set for text posts when browsing.
|
||||||
|
let mut body = rewrite_urls(&val(post, "selftext_html"));
|
||||||
|
if body.is_empty() {
|
||||||
|
body = rewrite_urls(&val(post, "body_html"));
|
||||||
|
}
|
||||||
|
|
||||||
posts.push(Self {
|
posts.push(Self {
|
||||||
id: val(post, "id"),
|
id: val(post, "id"),
|
||||||
title: esc!(if title.is_empty() { fallback_title.to_owned() } else { title }),
|
title,
|
||||||
community: val(post, "subreddit"),
|
community: val(post, "subreddit"),
|
||||||
body: rewrite_urls(&val(post, "body_html")),
|
body,
|
||||||
author: Author {
|
author: Author {
|
||||||
name: val(post, "author"),
|
name: val(post, "author"),
|
||||||
flair: Flair {
|
flair: Flair {
|
||||||
@ -244,7 +281,7 @@ impl Post {
|
|||||||
data["author_flair_richtext"].as_array(),
|
data["author_flair_richtext"].as_array(),
|
||||||
data["author_flair_text"].as_str(),
|
data["author_flair_text"].as_str(),
|
||||||
),
|
),
|
||||||
text: esc!(post, "link_flair_text"),
|
text: val(post, "link_flair_text"),
|
||||||
background_color: val(post, "author_flair_background_color"),
|
background_color: val(post, "author_flair_background_color"),
|
||||||
foreground_color: val(post, "author_flair_text_color"),
|
foreground_color: val(post, "author_flair_text_color"),
|
||||||
},
|
},
|
||||||
@ -259,6 +296,7 @@ impl Post {
|
|||||||
post_type,
|
post_type,
|
||||||
thumbnail: Media {
|
thumbnail: Media {
|
||||||
url: format_url(val(post, "thumbnail").as_str()),
|
url: format_url(val(post, "thumbnail").as_str()),
|
||||||
|
alt_url: String::new(),
|
||||||
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
|
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||||
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
|
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||||
poster: "".to_string(),
|
poster: "".to_string(),
|
||||||
@ -271,7 +309,7 @@ impl Post {
|
|||||||
data["link_flair_richtext"].as_array(),
|
data["link_flair_richtext"].as_array(),
|
||||||
data["link_flair_text"].as_str(),
|
data["link_flair_text"].as_str(),
|
||||||
),
|
),
|
||||||
text: esc!(post, "link_flair_text"),
|
text: val(post, "link_flair_text"),
|
||||||
background_color: val(post, "link_flair_background_color"),
|
background_color: val(post, "link_flair_background_color"),
|
||||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||||
"black".to_string()
|
"black".to_string()
|
||||||
@ -281,13 +319,14 @@ impl Post {
|
|||||||
},
|
},
|
||||||
flags: Flags {
|
flags: Flags {
|
||||||
nsfw: data["over_18"].as_bool().unwrap_or_default(),
|
nsfw: data["over_18"].as_bool().unwrap_or_default(),
|
||||||
stickied: data["stickied"].as_bool().unwrap_or_default(),
|
stickied: data["stickied"].as_bool().unwrap_or_default() || data["pinned"].as_bool().unwrap_or_default(),
|
||||||
},
|
},
|
||||||
permalink: val(post, "permalink"),
|
permalink: val(post, "permalink"),
|
||||||
rel_time,
|
rel_time,
|
||||||
created,
|
created,
|
||||||
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
|
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
|
||||||
gallery,
|
gallery,
|
||||||
|
awards,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -296,7 +335,7 @@ impl Post {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(path = "comment.html", escape = "none")]
|
#[template(path = "comment.html")]
|
||||||
// Comment with content, post, score and data/time that it was posted
|
// Comment with content, post, score and data/time that it was posted
|
||||||
pub struct Comment {
|
pub struct Comment {
|
||||||
pub id: String,
|
pub id: String,
|
||||||
@ -313,13 +352,70 @@ pub struct Comment {
|
|||||||
pub edited: (String, String),
|
pub edited: (String, String),
|
||||||
pub replies: Vec<Comment>,
|
pub replies: Vec<Comment>,
|
||||||
pub highlighted: bool,
|
pub highlighted: bool,
|
||||||
|
pub awards: Awards,
|
||||||
|
pub collapsed: bool,
|
||||||
|
pub is_filtered: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Clone)]
|
||||||
|
pub struct Award {
|
||||||
|
pub name: String,
|
||||||
|
pub icon_url: String,
|
||||||
|
pub description: String,
|
||||||
|
pub count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Award {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
|
write!(f, "{} {} {}", self.name, self.icon_url, self.description)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Awards(pub Vec<Award>);
|
||||||
|
|
||||||
|
impl std::ops::Deref for Awards {
|
||||||
|
type Target = Vec<Award>;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Awards {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
|
self.iter().fold(Ok(()), |result, award| result.and_then(|_| writeln!(f, "{}", award)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert Reddit awards JSON to Awards struct
|
||||||
|
impl Awards {
|
||||||
|
pub fn parse(items: &Value) -> Self {
|
||||||
|
let parsed = items.as_array().unwrap_or(&Vec::new()).iter().fold(Vec::new(), |mut awards, item| {
|
||||||
|
let name = item["name"].as_str().unwrap_or_default().to_string();
|
||||||
|
let icon_url = format_url(item["resized_icons"][0]["url"].as_str().unwrap_or_default());
|
||||||
|
let description = item["description"].as_str().unwrap_or_default().to_string();
|
||||||
|
let count: i64 = i64::from_str(&item["count"].to_string()).unwrap_or(1);
|
||||||
|
|
||||||
|
awards.push(Award {
|
||||||
|
name,
|
||||||
|
icon_url,
|
||||||
|
description,
|
||||||
|
count,
|
||||||
|
});
|
||||||
|
|
||||||
|
awards
|
||||||
|
});
|
||||||
|
|
||||||
|
Self(parsed)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(path = "error.html", escape = "none")]
|
#[template(path = "error.html")]
|
||||||
pub struct ErrorTemplate {
|
pub struct ErrorTemplate {
|
||||||
pub msg: String,
|
pub msg: String,
|
||||||
pub prefs: Preferences,
|
pub prefs: Preferences,
|
||||||
|
pub url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
@ -341,6 +437,7 @@ pub struct Subreddit {
|
|||||||
pub title: String,
|
pub title: String,
|
||||||
pub description: String,
|
pub description: String,
|
||||||
pub info: String,
|
pub info: String,
|
||||||
|
// pub moderators: Vec<String>,
|
||||||
pub icon: String,
|
pub icon: String,
|
||||||
pub members: (String, String),
|
pub members: (String, String),
|
||||||
pub active: (String, String),
|
pub active: (String, String),
|
||||||
@ -359,29 +456,70 @@ pub struct Params {
|
|||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Preferences {
|
pub struct Preferences {
|
||||||
|
pub available_themes: Vec<String>,
|
||||||
pub theme: String,
|
pub theme: String,
|
||||||
pub front_page: String,
|
pub front_page: String,
|
||||||
pub layout: String,
|
pub layout: String,
|
||||||
pub wide: String,
|
pub wide: String,
|
||||||
pub show_nsfw: String,
|
pub show_nsfw: String,
|
||||||
|
pub blur_nsfw: String,
|
||||||
|
pub hide_hls_notification: String,
|
||||||
|
pub use_hls: String,
|
||||||
|
pub autoplay_videos: String,
|
||||||
pub comment_sort: String,
|
pub comment_sort: String,
|
||||||
pub post_sort: String,
|
pub post_sort: String,
|
||||||
pub subscriptions: Vec<String>,
|
pub subscriptions: Vec<String>,
|
||||||
|
pub filters: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(RustEmbed)]
|
||||||
|
#[folder = "static/themes/"]
|
||||||
|
#[include = "*.css"]
|
||||||
|
pub struct ThemeAssets;
|
||||||
|
|
||||||
impl Preferences {
|
impl Preferences {
|
||||||
// Build preferences from cookies
|
// Build preferences from cookies
|
||||||
pub fn new(req: Request<Body>) -> Self {
|
pub fn new(req: Request<Body>) -> Self {
|
||||||
Self {
|
// Read available theme names from embedded css files.
|
||||||
theme: cookie(&req, "theme"),
|
// Always make the default "system" theme available.
|
||||||
front_page: cookie(&req, "front_page"),
|
let mut themes = vec!["system".to_string()];
|
||||||
layout: cookie(&req, "layout"),
|
for file in ThemeAssets::iter() {
|
||||||
wide: cookie(&req, "wide"),
|
let chunks: Vec<&str> = file.as_ref().split(".css").collect();
|
||||||
show_nsfw: cookie(&req, "show_nsfw"),
|
themes.push(chunks[0].to_owned())
|
||||||
comment_sort: cookie(&req, "comment_sort"),
|
|
||||||
post_sort: cookie(&req, "post_sort"),
|
|
||||||
subscriptions: cookie(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
|
||||||
}
|
}
|
||||||
|
Self {
|
||||||
|
available_themes: themes,
|
||||||
|
theme: setting(&req, "theme"),
|
||||||
|
front_page: setting(&req, "front_page"),
|
||||||
|
layout: setting(&req, "layout"),
|
||||||
|
wide: setting(&req, "wide"),
|
||||||
|
show_nsfw: setting(&req, "show_nsfw"),
|
||||||
|
blur_nsfw: setting(&req, "blur_nsfw"),
|
||||||
|
use_hls: setting(&req, "use_hls"),
|
||||||
|
hide_hls_notification: setting(&req, "hide_hls_notification"),
|
||||||
|
autoplay_videos: setting(&req, "autoplay_videos"),
|
||||||
|
comment_sort: setting(&req, "comment_sort"),
|
||||||
|
post_sort: setting(&req, "post_sort"),
|
||||||
|
subscriptions: setting(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
||||||
|
filters: setting(&req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets a `HashSet` of filters from the cookie in the given `Request`.
|
||||||
|
pub fn get_filters(req: &Request<Body>) -> HashSet<String> {
|
||||||
|
setting(req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect::<HashSet<String>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Filters a `Vec<Post>` by the given `HashSet` of filters (each filter being a subreddit name or a user name). If a
|
||||||
|
/// `Post`'s subreddit or author is found in the filters, it is removed. Returns `true` if _all_ posts were filtered
|
||||||
|
/// out, or `false` otherwise.
|
||||||
|
pub fn filter_posts(posts: &mut Vec<Post>, filters: &HashSet<String>) -> bool {
|
||||||
|
if posts.is_empty() {
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
posts.retain(|p| !filters.contains(&p.community) && !filters.contains(&["u_", &p.author.name].concat()));
|
||||||
|
posts.is_empty()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -390,17 +528,46 @@ impl Preferences {
|
|||||||
//
|
//
|
||||||
|
|
||||||
// Grab a query parameter from a url
|
// Grab a query parameter from a url
|
||||||
pub fn param(path: &str, value: &str) -> String {
|
pub fn param(path: &str, value: &str) -> Option<String> {
|
||||||
match Url::parse(format!("https://libredd.it/{}", path).as_str()) {
|
Some(
|
||||||
Ok(url) => url.query_pairs().into_owned().collect::<HashMap<_, _>>().get(value).unwrap_or(&String::new()).to_owned(),
|
Url::parse(format!("https://libredd.it/{}", path).as_str())
|
||||||
_ => String::new(),
|
.ok()?
|
||||||
}
|
.query_pairs()
|
||||||
|
.into_owned()
|
||||||
|
.collect::<HashMap<_, _>>()
|
||||||
|
.get(value)?
|
||||||
|
.clone(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse a cookie value from request
|
// Retrieve the value of a setting by name
|
||||||
pub fn cookie(req: &Request<Body>, name: &str) -> String {
|
pub fn setting(req: &Request<Body>, name: &str) -> String {
|
||||||
let cookie = req.cookie(name).unwrap_or_else(|| Cookie::named(name));
|
// Parse a cookie value from request
|
||||||
cookie.value().to_string()
|
req
|
||||||
|
.cookie(name)
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
// If there is no cookie for this setting, try receiving a default from an environment variable
|
||||||
|
if let Ok(default) = std::env::var(format!("LIBREDDIT_DEFAULT_{}", name.to_uppercase())) {
|
||||||
|
Cookie::new(name, default)
|
||||||
|
} else {
|
||||||
|
Cookie::named(name)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.value()
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect and redirect in the event of a random subreddit
|
||||||
|
pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> {
|
||||||
|
if sub == "random" || sub == "randnsfw" {
|
||||||
|
let new_sub = json(format!("/r/{}/about.json?raw_json=1", sub), false).await?["data"]["display_name"]
|
||||||
|
.as_str()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string();
|
||||||
|
Ok(redirect(format!("/r/{}{}", new_sub, additional)))
|
||||||
|
} else {
|
||||||
|
Err("No redirect needed".to_string())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Direct urls to proxy if proxy is enabled
|
// Direct urls to proxy if proxy is enabled
|
||||||
@ -408,55 +575,89 @@ pub fn format_url(url: &str) -> String {
|
|||||||
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
|
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
|
||||||
String::new()
|
String::new()
|
||||||
} else {
|
} else {
|
||||||
match Url::parse(url) {
|
Url::parse(url).map_or(url.to_string(), |parsed| {
|
||||||
Ok(parsed) => {
|
let domain = parsed.domain().unwrap_or_default();
|
||||||
let domain = parsed.domain().unwrap_or_default();
|
|
||||||
|
|
||||||
let capture = |regex: &str, format: &str, segments: i16| {
|
let capture = |regex: &str, format: &str, segments: i16| {
|
||||||
Regex::new(regex)
|
Regex::new(regex).map_or(String::new(), |re| {
|
||||||
.map(|re| match re.captures(url) {
|
re.captures(url).map_or(String::new(), |caps| match segments {
|
||||||
Some(caps) => match segments {
|
1 => [format, &caps[1]].join(""),
|
||||||
1 => [format, &caps[1]].join(""),
|
2 => [format, &caps[1], "/", &caps[2]].join(""),
|
||||||
2 => [format, &caps[1], "/", &caps[2]].join(""),
|
_ => String::new(),
|
||||||
_ => String::new(),
|
})
|
||||||
},
|
})
|
||||||
None => String::new(),
|
};
|
||||||
})
|
|
||||||
.unwrap_or_default()
|
macro_rules! chain {
|
||||||
|
() => {
|
||||||
|
{
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match domain {
|
( $first_fn:expr, $($other_fns:expr), *) => {
|
||||||
"v.redd.it" => capture(r"https://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$))", "/vid/", 2),
|
{
|
||||||
"i.redd.it" => capture(r"https://i\.redd\.it/(.*)", "/img/", 1),
|
let result = $first_fn;
|
||||||
"a.thumbs.redditmedia.com" => capture(r"https://a\.thumbs\.redditmedia\.com/(.*)", "/thumb/a/", 1),
|
if result.is_empty() {
|
||||||
"b.thumbs.redditmedia.com" => capture(r"https://b\.thumbs\.redditmedia\.com/(.*)", "/thumb/b/", 1),
|
chain!($($other_fns,)*)
|
||||||
"emoji.redditmedia.com" => capture(r"https://emoji\.redditmedia\.com/(.*)/(.*)", "/emoji/", 2),
|
}
|
||||||
"preview.redd.it" => capture(r"https://preview\.redd\.it/(.*)\?(.*)", "/preview/pre/", 2),
|
else
|
||||||
"external-preview.redd.it" => capture(r"https://external\-preview\.redd\.it/(.*)\?(.*)", "/preview/external-pre/", 2),
|
{
|
||||||
"styles.redditmedia.com" => capture(r"https://styles\.redditmedia\.com/(.*)", "/style/", 1),
|
result
|
||||||
"www.redditstatic.com" => capture(r"https://www\.redditstatic\.com/(.*)", "/static/", 1),
|
}
|
||||||
_ => String::new(),
|
}
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
Err(_) => String::new(),
|
|
||||||
}
|
match domain {
|
||||||
|
"www.reddit.com" => capture(r"https://www\.reddit\.com/(.*)", "/", 1),
|
||||||
|
"old.reddit.com" => capture(r"https://old\.reddit\.com/(.*)", "/", 1),
|
||||||
|
"np.reddit.com" => capture(r"https://np\.reddit\.com/(.*)", "/", 1),
|
||||||
|
"reddit.com" => capture(r"https://reddit\.com/(.*)", "/", 1),
|
||||||
|
"v.redd.it" => chain!(
|
||||||
|
capture(r"https://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$|\?source=fallback))", "/vid/", 2),
|
||||||
|
capture(r"https://v\.redd\.it/(.+)/(HLSPlaylist\.m3u8.*)$", "/hls/", 2)
|
||||||
|
),
|
||||||
|
"i.redd.it" => capture(r"https://i\.redd\.it/(.*)", "/img/", 1),
|
||||||
|
"a.thumbs.redditmedia.com" => capture(r"https://a\.thumbs\.redditmedia\.com/(.*)", "/thumb/a/", 1),
|
||||||
|
"b.thumbs.redditmedia.com" => capture(r"https://b\.thumbs\.redditmedia\.com/(.*)", "/thumb/b/", 1),
|
||||||
|
"emoji.redditmedia.com" => capture(r"https://emoji\.redditmedia\.com/(.*)/(.*)", "/emoji/", 2),
|
||||||
|
"preview.redd.it" => capture(r"https://preview\.redd\.it/(.*)", "/preview/pre/", 1),
|
||||||
|
"external-preview.redd.it" => capture(r"https://external\-preview\.redd\.it/(.*)", "/preview/external-pre/", 1),
|
||||||
|
"styles.redditmedia.com" => capture(r"https://styles\.redditmedia\.com/(.*)", "/style/", 1),
|
||||||
|
"www.redditstatic.com" => capture(r"https://www\.redditstatic\.com/(.*)", "/static/", 1),
|
||||||
|
_ => url.to_string(),
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rewrite Reddit links to Libreddit in body of text
|
// Rewrite Reddit links to Libreddit in body of text
|
||||||
pub fn rewrite_urls(text: &str) -> String {
|
pub fn rewrite_urls(input_text: &str) -> String {
|
||||||
match Regex::new(r#"href="(https|http|)://(www.|old.|np.|amp.|)(reddit).(com)/"#) {
|
let text1 = Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|)(reddit\.com|redd\.it)/"#)
|
||||||
Ok(re) => re.replace_all(text, r#"href="/"#).to_string(),
|
.map_or(String::new(), |re| re.replace_all(input_text, r#"href="/"#).to_string())
|
||||||
Err(_) => String::new(),
|
// Remove (html-encoded) "\" from URLs.
|
||||||
}
|
.replace("%5C", "")
|
||||||
|
.replace('\\', "");
|
||||||
|
|
||||||
|
// Rewrite external media previews to Libreddit
|
||||||
|
Regex::new(r"https://external-preview\.redd\.it(.*)[^?]").map_or(String::new(), |re| {
|
||||||
|
if re.is_match(&text1) {
|
||||||
|
re.replace_all(&text1, format_url(re.find(&text1).map(|x| x.as_str()).unwrap_or_default())).to_string()
|
||||||
|
} else {
|
||||||
|
text1
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Append `m` and `k` for millions and thousands respectively
|
// Format vote count to a string that will be displayed.
|
||||||
|
// Append `m` and `k` for millions and thousands respectively, and
|
||||||
|
// round to the nearest tenth.
|
||||||
pub fn format_num(num: i64) -> (String, String) {
|
pub fn format_num(num: i64) -> (String, String) {
|
||||||
let truncated = if num >= 1_000_000 || num <= -1_000_000 {
|
let truncated = if num >= 1_000_000 || num <= -1_000_000 {
|
||||||
format!("{}m", num / 1_000_000)
|
format!("{:.1}m", num as f64 / 1_000_000.0)
|
||||||
} else if num >= 1000 || num <= -1000 {
|
} else if num >= 1000 || num <= -1000 {
|
||||||
format!("{}k", num / 1_000)
|
format!("{:.1}k", num as f64 / 1_000.0)
|
||||||
} else {
|
} else {
|
||||||
num.to_string()
|
num.to_string()
|
||||||
};
|
};
|
||||||
@ -466,12 +667,12 @@ pub fn format_num(num: i64) -> (String, String) {
|
|||||||
|
|
||||||
// Parse a relative and absolute time from a UNIX timestamp
|
// Parse a relative and absolute time from a UNIX timestamp
|
||||||
pub fn time(created: f64) -> (String, String) {
|
pub fn time(created: f64) -> (String, String) {
|
||||||
let time = OffsetDateTime::from_unix_timestamp(created.round() as i64);
|
let time = OffsetDateTime::from_unix_timestamp(created.round() as i64).unwrap_or(OffsetDateTime::UNIX_EPOCH);
|
||||||
let time_delta = OffsetDateTime::now_utc() - time;
|
let time_delta = OffsetDateTime::now_utc() - time;
|
||||||
|
|
||||||
// If the time difference is more than a month, show full date
|
// If the time difference is more than a month, show full date
|
||||||
let rel_time = if time_delta > Duration::days(30) {
|
let rel_time = if time_delta > Duration::days(30) {
|
||||||
time.format("%b %d '%y")
|
time.format(format_description!("[month repr:short] [day] '[year repr:last_two]")).unwrap_or_default()
|
||||||
// Otherwise, show relative date/time
|
// Otherwise, show relative date/time
|
||||||
} else if time_delta.whole_days() > 0 {
|
} else if time_delta.whole_days() > 0 {
|
||||||
format!("{}d ago", time_delta.whole_days())
|
format!("{}d ago", time_delta.whole_days())
|
||||||
@ -481,7 +682,12 @@ pub fn time(created: f64) -> (String, String) {
|
|||||||
format!("{}m ago", time_delta.whole_minutes())
|
format!("{}m ago", time_delta.whole_minutes())
|
||||||
};
|
};
|
||||||
|
|
||||||
(rel_time, time.format("%b %d %Y, %H:%M:%S UTC"))
|
(
|
||||||
|
rel_time,
|
||||||
|
time
|
||||||
|
.format(format_description!("[month repr:short] [day] [year], [hour]:[minute]:[second] UTC"))
|
||||||
|
.unwrap_or_default(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// val() function used to parse JSON from Reddit APIs
|
// val() function used to parse JSON from Reddit APIs
|
||||||
@ -489,27 +695,6 @@ pub fn val(j: &Value, k: &str) -> String {
|
|||||||
j["data"][k].as_str().unwrap_or_default().to_string()
|
j["data"][k].as_str().unwrap_or_default().to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! esc {
|
|
||||||
($f:expr) => {
|
|
||||||
$f.replace('<', "<").replace('>', ">")
|
|
||||||
};
|
|
||||||
($j:expr, $k:expr) => {
|
|
||||||
$j["data"][$k].as_str().unwrap_or_default().to_string().replace('<', "<").replace('>', ">")
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Escape < and > to accurately render HTML
|
|
||||||
// pub fn esc(j: &Value, k: &str) -> String {
|
|
||||||
// val(j,k)
|
|
||||||
// // .replace('&', "&")
|
|
||||||
// .replace('<', "<")
|
|
||||||
// .replace('>', ">")
|
|
||||||
// // .replace('"', """)
|
|
||||||
// // .replace('\'', "'")
|
|
||||||
// // .replace('/', "/")
|
|
||||||
// }
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// NETWORKING
|
// NETWORKING
|
||||||
//
|
//
|
||||||
@ -533,13 +718,69 @@ pub fn redirect(path: String) -> Response<Body> {
|
|||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn error(req: Request<Body>, msg: String) -> Result<Response<Body>, String> {
|
/// Renders a generic error landing page.
|
||||||
|
pub async fn error(req: Request<Body>, msg: impl ToString) -> Result<Response<Body>, String> {
|
||||||
|
let url = req.uri().to_string();
|
||||||
let body = ErrorTemplate {
|
let body = ErrorTemplate {
|
||||||
msg,
|
msg: msg.to_string(),
|
||||||
prefs: Preferences::new(req),
|
prefs: Preferences::new(req),
|
||||||
|
url,
|
||||||
}
|
}
|
||||||
.render()
|
.render()
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{format_num, format_url, rewrite_urls};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn format_num_works() {
|
||||||
|
assert_eq!(format_num(567), ("567".to_string(), "567".to_string()));
|
||||||
|
assert_eq!(format_num(1234), ("1.2k".to_string(), "1234".to_string()));
|
||||||
|
assert_eq!(format_num(1999), ("2.0k".to_string(), "1999".to_string()));
|
||||||
|
assert_eq!(format_num(1001), ("1.0k".to_string(), "1001".to_string()));
|
||||||
|
assert_eq!(format_num(1_999_999), ("2.0m".to_string(), "1999999".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rewrite_urls_removes_backslashes() {
|
||||||
|
let comment_body_html =
|
||||||
|
r#"<a href=\"https://www.reddit.com/r/linux%5C_gaming/comments/x/just%5C_a%5C_test%5C/\">https://www.reddit.com/r/linux\\_gaming/comments/x/just\\_a\\_test/</a>"#;
|
||||||
|
assert_eq!(
|
||||||
|
rewrite_urls(comment_body_html),
|
||||||
|
r#"<a href="https://www.reddit.com/r/linux_gaming/comments/x/just_a_test/">https://www.reddit.com/r/linux_gaming/comments/x/just_a_test/</a>"#
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_url() {
|
||||||
|
assert_eq!(format_url("https://a.thumbs.redditmedia.com/XYZ.jpg"), "/thumb/a/XYZ.jpg");
|
||||||
|
assert_eq!(format_url("https://emoji.redditmedia.com/a/b"), "/emoji/a/b");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
format_url("https://external-preview.redd.it/foo.jpg?auto=webp&s=bar"),
|
||||||
|
"/preview/external-pre/foo.jpg?auto=webp&s=bar"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(format_url("https://i.redd.it/foobar.jpg"), "/img/foobar.jpg");
|
||||||
|
assert_eq!(
|
||||||
|
format_url("https://preview.redd.it/qwerty.jpg?auto=webp&s=asdf"),
|
||||||
|
"/preview/pre/qwerty.jpg?auto=webp&s=asdf"
|
||||||
|
);
|
||||||
|
assert_eq!(format_url("https://v.redd.it/foo/DASH_360.mp4?source=fallback"), "/vid/foo/360.mp4");
|
||||||
|
assert_eq!(
|
||||||
|
format_url("https://v.redd.it/foo/HLSPlaylist.m3u8?a=bar&v=1&f=sd"),
|
||||||
|
"/hls/foo/HLSPlaylist.m3u8?a=bar&v=1&f=sd"
|
||||||
|
);
|
||||||
|
assert_eq!(format_url("https://www.redditstatic.com/gold/awards/icon/icon.png"), "/static/gold/awards/icon/icon.png");
|
||||||
|
|
||||||
|
assert_eq!(format_url(""), "");
|
||||||
|
assert_eq!(format_url("self"), "");
|
||||||
|
assert_eq!(format_url("default"), "");
|
||||||
|
assert_eq!(format_url("nsfw"), "");
|
||||||
|
assert_eq!(format_url("spoiler"), "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BIN
static/Inter.var.woff2
Normal file
BIN
static/Inter.var.woff2
Normal file
Binary file not shown.
5
static/hls.min.js
vendored
Normal file
5
static/hls.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
77
static/playHLSVideo.js
Normal file
77
static/playHLSVideo.js
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
// @license http://www.gnu.org/licenses/agpl-3.0.html AGPL-3.0
|
||||||
|
(function () {
|
||||||
|
if (Hls.isSupported()) {
|
||||||
|
var videoSources = document.querySelectorAll("video source[type='application/vnd.apple.mpegurl']");
|
||||||
|
videoSources.forEach(function (source) {
|
||||||
|
var playlist = source.src;
|
||||||
|
|
||||||
|
var oldVideo = source.parentNode;
|
||||||
|
var autoplay = oldVideo.classList.contains("hls_autoplay");
|
||||||
|
|
||||||
|
// If HLS is supported natively then don't use hls.js
|
||||||
|
if (oldVideo.canPlayType(source.type)) {
|
||||||
|
if (autoplay) {
|
||||||
|
oldVideo.play();
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace video with copy that will have all "source" elements removed
|
||||||
|
var newVideo = oldVideo.cloneNode(true);
|
||||||
|
var allSources = newVideo.querySelectorAll("source");
|
||||||
|
allSources.forEach(function (source) {
|
||||||
|
source.remove();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Empty source to enable play event
|
||||||
|
newVideo.src = "about:blank";
|
||||||
|
|
||||||
|
oldVideo.parentNode.replaceChild(newVideo, oldVideo);
|
||||||
|
|
||||||
|
function initializeHls() {
|
||||||
|
newVideo.removeEventListener('play', initializeHls);
|
||||||
|
|
||||||
|
var hls = new Hls({ autoStartLoad: false });
|
||||||
|
hls.loadSource(playlist);
|
||||||
|
hls.attachMedia(newVideo);
|
||||||
|
hls.on(Hls.Events.MANIFEST_PARSED, function () {
|
||||||
|
hls.loadLevel = hls.levels.length - 1;
|
||||||
|
hls.startLoad();
|
||||||
|
newVideo.play();
|
||||||
|
});
|
||||||
|
|
||||||
|
hls.on(Hls.Events.ERROR, function (event, data) {
|
||||||
|
var errorType = data.type;
|
||||||
|
var errorFatal = data.fatal;
|
||||||
|
if (errorFatal) {
|
||||||
|
switch (errorType) {
|
||||||
|
case Hls.ErrorType.NETWORK_ERROR:
|
||||||
|
hls.startLoad();
|
||||||
|
break;
|
||||||
|
case Hls.ErrorType.MEDIA_ERROR:
|
||||||
|
hls.recoverMediaError();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
hls.destroy();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.error("HLS error", data);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
newVideo.addEventListener('play', initializeHls);
|
||||||
|
|
||||||
|
if (autoplay) {
|
||||||
|
newVideo.play();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
var videos = document.querySelectorAll("video.hls_autoplay");
|
||||||
|
videos.forEach(function (video) {
|
||||||
|
video.setAttribute("autoplay", "");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
// @license-end
|
282
static/style.css
282
static/style.css
@ -6,6 +6,12 @@
|
|||||||
--admin: #ea0027;
|
--admin: #ea0027;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: 'Inter';
|
||||||
|
src: url('/Inter.var.woff2') format('woff2-variations');
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
|
||||||
/* Automatic theme selection */
|
/* Automatic theme selection */
|
||||||
:root, .dark{
|
:root, .dark{
|
||||||
/* Default & fallback theme (dark) */
|
/* Default & fallback theme (dark) */
|
||||||
@ -18,6 +24,7 @@
|
|||||||
--post: #161616;
|
--post: #161616;
|
||||||
--panel-border: 1px solid #333;
|
--panel-border: 1px solid #333;
|
||||||
--highlighted: #333;
|
--highlighted: #333;
|
||||||
|
--visited: #aaa;
|
||||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,38 +40,12 @@
|
|||||||
--post: #eee;
|
--post: #eee;
|
||||||
--panel-border: 1px solid #ccc;
|
--panel-border: 1px solid #ccc;
|
||||||
--highlighted: white;
|
--highlighted: white;
|
||||||
|
--visited: #555;
|
||||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Light theme setting */
|
/* Other themes are located in the "themes" folder */
|
||||||
.light {
|
|
||||||
--accent: #009a9a;
|
|
||||||
--green: #00a229;
|
|
||||||
--text: black;
|
|
||||||
--foreground: #f5f5f5;
|
|
||||||
--background: #ddd;
|
|
||||||
--outside: #ececec;
|
|
||||||
--post: #eee;
|
|
||||||
--panel-border: 1px solid #ccc;
|
|
||||||
--highlighted: white;
|
|
||||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Black theme setting */
|
|
||||||
.black {
|
|
||||||
--accent: #009a9a;
|
|
||||||
--green: #00a229;
|
|
||||||
--text: white;
|
|
||||||
--foreground: #0f0f0f;
|
|
||||||
--background: black;
|
|
||||||
--outside: black;
|
|
||||||
--post: black;
|
|
||||||
--panel-border: 2px solid #0f0f0f;
|
|
||||||
--highlighted: #0f0f0f;
|
|
||||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* General */
|
/* General */
|
||||||
|
|
||||||
@ -79,9 +60,10 @@
|
|||||||
|
|
||||||
html, body, div, h1, h2, h3, h4, h5, h6, ul, ol, dl, li, dt, dd, p, blockquote,
|
html, body, div, h1, h2, h3, h4, h5, h6, ul, ol, dl, li, dt, dd, p, blockquote,
|
||||||
pre, form, fieldset, table, th, td, select, input {
|
pre, form, fieldset, table, th, td, select, input {
|
||||||
|
accent-color: var(--accent);
|
||||||
margin: 0;
|
margin: 0;
|
||||||
color: var(--text);
|
color: var(--text);
|
||||||
font-family: sans-serif;
|
font-family: "Inter", sans-serif;
|
||||||
}
|
}
|
||||||
|
|
||||||
body {
|
body {
|
||||||
@ -143,10 +125,15 @@ nav #libreddit {
|
|||||||
|
|
||||||
#settings_link {
|
#settings_link {
|
||||||
opacity: 0.8;
|
opacity: 0.8;
|
||||||
|
margin-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#reddit_link {
|
||||||
|
opacity: 0.8;
|
||||||
}
|
}
|
||||||
|
|
||||||
#code {
|
#code {
|
||||||
margin-left: 5px;
|
margin-left: 10px;
|
||||||
}
|
}
|
||||||
|
|
||||||
main {
|
main {
|
||||||
@ -167,9 +154,10 @@ main {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#column_one {
|
#column_one {
|
||||||
|
width: 100%;
|
||||||
max-width: 750px;
|
max-width: 750px;
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
overflow: hidden;
|
overflow: inherit;
|
||||||
}
|
}
|
||||||
|
|
||||||
footer {
|
footer {
|
||||||
@ -262,6 +250,7 @@ aside {
|
|||||||
#user_description, #sub_description {
|
#user_description, #sub_description {
|
||||||
margin: 0 15px;
|
margin: 0 15px;
|
||||||
text-align: left;
|
text-align: left;
|
||||||
|
overflow-wrap: anywhere;
|
||||||
}
|
}
|
||||||
|
|
||||||
#user_name, #user_description:not(:empty), #user_icon,
|
#user_name, #user_description:not(:empty), #user_icon,
|
||||||
@ -269,7 +258,7 @@ aside {
|
|||||||
margin-bottom: 20px;
|
margin-bottom: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
#user_details, #sub_details {
|
#user_details, #sub_details, #sub_actions, #user_actions {
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-template-columns: repeat(2, 1fr);
|
grid-template-columns: repeat(2, 1fr);
|
||||||
grid-column-gap: 20px;
|
grid-column-gap: 20px;
|
||||||
@ -281,7 +270,7 @@ aside {
|
|||||||
|
|
||||||
/* Subscriptions */
|
/* Subscriptions */
|
||||||
|
|
||||||
#sub_subscription {
|
#sub_subscription, #user_subscription, #user_filter, #sub_filter {
|
||||||
margin-top: 20px;
|
margin-top: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -289,18 +278,18 @@ aside {
|
|||||||
margin-bottom: 20px;
|
margin-bottom: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.subscribe, .unsubscribe {
|
.subscribe, .unsubscribe, .filter, .unfilter {
|
||||||
padding: 10px 20px;
|
padding: 10px 20px;
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
|
|
||||||
.subscribe {
|
.subscribe, .filter {
|
||||||
color: var(--foreground);
|
color: var(--foreground);
|
||||||
background-color: var(--accent);
|
background-color: var(--accent);
|
||||||
}
|
}
|
||||||
|
|
||||||
.unsubscribe {
|
.unsubscribe, .unfilter {
|
||||||
color: var(--text);
|
color: var(--text);
|
||||||
background-color: var(--highlighted);
|
background-color: var(--highlighted);
|
||||||
}
|
}
|
||||||
@ -361,6 +350,7 @@ aside {
|
|||||||
#wiki {
|
#wiki {
|
||||||
background: var(--foreground);
|
background: var(--foreground);
|
||||||
padding: 35px;
|
padding: 35px;
|
||||||
|
overflow-wrap: anywhere;
|
||||||
}
|
}
|
||||||
|
|
||||||
#top {
|
#top {
|
||||||
@ -382,8 +372,8 @@ aside {
|
|||||||
|
|
||||||
/* Sorting and Search */
|
/* Sorting and Search */
|
||||||
|
|
||||||
select, #search, #sort_options, #inside, #searchbox > *, #sort_submit {
|
select, #search, #sort_options, #listing_options, #inside, #searchbox > *, #sort_submit {
|
||||||
height: 40px;
|
height: 38px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.search_label {
|
.search_label {
|
||||||
@ -400,7 +390,7 @@ select {
|
|||||||
|
|
||||||
select, #search {
|
select, #search {
|
||||||
border: none;
|
border: none;
|
||||||
padding: 0 15px;
|
padding: 0 10px;
|
||||||
|
|
||||||
appearance: none;
|
appearance: none;
|
||||||
-webkit-appearance: none;
|
-webkit-appearance: none;
|
||||||
@ -458,6 +448,11 @@ button.submit:hover > svg { stroke: var(--accent); }
|
|||||||
border-radius: 5px 0px 0px 5px;
|
border-radius: 5px 0px 0px 5px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#listing_options + #sort_select {
|
||||||
|
margin-left: 10px;
|
||||||
|
border-radius: 5px 0px 0px 5px;
|
||||||
|
}
|
||||||
|
|
||||||
#search_sort {
|
#search_sort {
|
||||||
background: var(--highlighted);
|
background: var(--highlighted);
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
@ -486,15 +481,20 @@ button.submit:hover > svg { stroke: var(--accent); }
|
|||||||
margin-bottom: 20px;
|
margin-bottom: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
#sort_options, footer > a {
|
#listing_options {
|
||||||
|
overflow-x: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
#sort_options, #listing_options, footer > a {
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
|
align-items: center;
|
||||||
box-shadow: var(--shadow);
|
box-shadow: var(--shadow);
|
||||||
background: var(--outside);
|
background: var(--outside);
|
||||||
display: flex;
|
display: flex;
|
||||||
overflow: auto;
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
|
|
||||||
#sort_options > a, footer > a {
|
#sort_options > a, #listing_options > a, footer > a {
|
||||||
color: var(--text);
|
color: var(--text);
|
||||||
padding: 10px 20px;
|
padding: 10px 20px;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
@ -502,12 +502,12 @@ button.submit:hover > svg { stroke: var(--accent); }
|
|||||||
transition: 0.2s background;
|
transition: 0.2s background;
|
||||||
}
|
}
|
||||||
|
|
||||||
#sort_options > a.selected {
|
#sort_options > a.selected, #listing_options > a.selected {
|
||||||
background: var(--accent);
|
background: var(--accent);
|
||||||
color: var(--foreground);
|
color: var(--foreground);
|
||||||
}
|
}
|
||||||
|
|
||||||
#sort_options > a:not(.selected):hover {
|
#sort_options > a:not(.selected):hover, #listing_options > a:not(.selected):hover {
|
||||||
background: var(--foreground);
|
background: var(--foreground);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -569,6 +569,13 @@ a.search_subreddit:hover {
|
|||||||
opacity: 0.5;
|
opacity: 0.5;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#more_subreddits {
|
||||||
|
justify-content: center;
|
||||||
|
color: var(--accent);
|
||||||
|
font-weight: 600;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
/* Post */
|
/* Post */
|
||||||
|
|
||||||
.sep {
|
.sep {
|
||||||
@ -595,6 +602,7 @@ a.search_subreddit:hover {
|
|||||||
"post_score post_title post_thumbnail" 1fr
|
"post_score post_title post_thumbnail" 1fr
|
||||||
"post_score post_media post_thumbnail" auto
|
"post_score post_media post_thumbnail" auto
|
||||||
"post_score post_body post_thumbnail" auto
|
"post_score post_body post_thumbnail" auto
|
||||||
|
"post_score post_notification post_thumbnail" auto
|
||||||
"post_score post_footer post_thumbnail" auto
|
"post_score post_footer post_thumbnail" auto
|
||||||
/ minmax(40px, auto) minmax(0, 1fr) fit-content(min(20%, 152px));
|
/ minmax(40px, auto) minmax(0, 1fr) fit-content(min(20%, 152px));
|
||||||
}
|
}
|
||||||
@ -606,13 +614,13 @@ a.search_subreddit:hover {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.post_score {
|
.post_score {
|
||||||
padding-top: 16px;
|
padding-top: 19px;
|
||||||
|
padding-left: 12px;
|
||||||
font-size: 13px;
|
font-size: 13px;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
text-align: end;
|
|
||||||
color: var(--accent);
|
color: var(--accent);
|
||||||
grid-area: post_score;
|
grid-area: post_score;
|
||||||
text-align: end;
|
text-align: center;
|
||||||
border-radius: 5px 0 0 5px;
|
border-radius: 5px 0 0 5px;
|
||||||
transition: 0.2s background;
|
transition: 0.2s background;
|
||||||
}
|
}
|
||||||
@ -622,8 +630,9 @@ a.search_subreddit:hover {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.post_header {
|
.post_header {
|
||||||
margin: 15px 20px 5px 15px;
|
margin: 15px 20px 5px 12px;
|
||||||
grid-area: post_header;
|
grid-area: post_header;
|
||||||
|
line-height: 25px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.post_subreddit {
|
.post_subreddit {
|
||||||
@ -632,11 +641,28 @@ a.search_subreddit:hover {
|
|||||||
|
|
||||||
.post_title {
|
.post_title {
|
||||||
font-size: 16px;
|
font-size: 16px;
|
||||||
|
font-weight: 500;
|
||||||
line-height: 1.5;
|
line-height: 1.5;
|
||||||
margin: 5px 15px;
|
overflow-wrap: anywhere;
|
||||||
|
margin: 5px 15px 5px 12px;
|
||||||
grid-area: post_title;
|
grid-area: post_title;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.post:not(.highlighted) .post_title a:visited {
|
||||||
|
color: var(--visited);
|
||||||
|
}
|
||||||
|
|
||||||
|
.post_notification {
|
||||||
|
grid-area: post_notification;
|
||||||
|
margin: 5px 15px;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.post_notification a {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
.post_flair {
|
.post_flair {
|
||||||
background: var(--accent);
|
background: var(--accent);
|
||||||
color: var(--background);
|
color: var(--background);
|
||||||
@ -647,6 +673,26 @@ a.search_subreddit:hover {
|
|||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.awards {
|
||||||
|
background-color: var(--foreground);
|
||||||
|
border-radius: 5px;
|
||||||
|
margin: auto;
|
||||||
|
padding: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.awards .award {
|
||||||
|
margin-right: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.award {
|
||||||
|
position: relative;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.award > img {
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
.author_flair:empty, .post_flair:empty {
|
.author_flair:empty, .post_flair:empty {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
@ -671,22 +717,39 @@ a.search_subreddit:hover {
|
|||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
|
|
||||||
.post_media_image, .post .__NoScript_PlaceHolder__, .post_media_video, .gallery {
|
.post_media_content, .post .__NoScript_PlaceHolder__, .gallery {
|
||||||
max-width: calc(100% - 40px);
|
max-width: calc(100% - 40px);
|
||||||
grid-area: post_media;
|
grid-area: post_media;
|
||||||
margin: 15px auto 5px auto;
|
margin: 15px auto 5px auto;
|
||||||
|
width: auto;
|
||||||
height: auto;
|
height: auto;
|
||||||
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.post_media_video {
|
||||||
.post_media_video.short {
|
|
||||||
max-height: 512px;
|
|
||||||
width: auto;
|
width: auto;
|
||||||
|
height: auto;
|
||||||
|
max-width: 100%;
|
||||||
|
max-height: 512px;
|
||||||
|
display: block;
|
||||||
|
margin: auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
.post_media_image.short svg, .post_media_image.short img{
|
.post_media_image.short svg, .post_media_image.short img{
|
||||||
max-height: 512px;
|
|
||||||
width: auto;
|
width: auto;
|
||||||
|
height: auto;
|
||||||
|
max-width: 100%;
|
||||||
|
max-height: 512px;
|
||||||
|
display: block;
|
||||||
|
margin: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.post_nsfw_blur {
|
||||||
|
filter: blur(1.5rem);
|
||||||
|
}
|
||||||
|
|
||||||
|
.post_nsfw_blur:hover {
|
||||||
|
filter: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
.post_media_image svg{
|
.post_media_image svg{
|
||||||
@ -725,16 +788,27 @@ a.search_subreddit:hover {
|
|||||||
|
|
||||||
#post_url {
|
#post_url {
|
||||||
color: var(--accent);
|
color: var(--accent);
|
||||||
margin: 5px 15px;
|
margin: 5px 12px;
|
||||||
grid-area: post_media;
|
grid-area: post_media;
|
||||||
|
overflow-wrap: anywhere;
|
||||||
}
|
}
|
||||||
|
|
||||||
.post_body {
|
.post_body {
|
||||||
opacity: 0.9;
|
opacity: 0.9;
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
margin: 5px 15px;
|
padding: 5px 15px 5px 12px;
|
||||||
grid-area: post_body;
|
grid-area: post_body;
|
||||||
width: calc(100% - 30px);
|
width: calc(100% - 30px);
|
||||||
|
overflow-wrap: anywhere;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Used only for text post preview */
|
||||||
|
.post_preview {
|
||||||
|
-webkit-mask-image: linear-gradient(180deg,#000 60%,transparent);;
|
||||||
|
mask-image: linear-gradient(180deg,#000 60%,transparent);
|
||||||
|
opacity: 0.8;
|
||||||
|
max-height: 250px;
|
||||||
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
|
|
||||||
.post_footer {
|
.post_footer {
|
||||||
@ -743,7 +817,7 @@ a.search_subreddit:hover {
|
|||||||
opacity: 0.5;
|
opacity: 0.5;
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
grid-area: post_footer;
|
grid-area: post_footer;
|
||||||
margin: 5px 20px 15px 15px;
|
margin: 5px 20px 15px 12px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.post_comments {
|
.post_comments {
|
||||||
@ -771,13 +845,25 @@ a.search_subreddit:hover {
|
|||||||
margin: 5px;
|
margin: 5px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.post_thumbnail svg {
|
.post_thumbnail div {
|
||||||
grid-area: 1 / 1 / 2 / 2;
|
grid-area: 1 / 1 / 2 / 2;
|
||||||
width: 100%;
|
|
||||||
height: auto;
|
|
||||||
object-fit: cover;
|
object-fit: cover;
|
||||||
align-self: center;
|
align-self: center;
|
||||||
justify-self: center;
|
justify-self: center;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.post_thumbnail div svg {
|
||||||
|
width: 100%;
|
||||||
|
height: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.post_thumbnail span {
|
||||||
|
z-index: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.thumb_nsfw_blur {
|
||||||
|
filter: blur(0.3rem)
|
||||||
}
|
}
|
||||||
|
|
||||||
.post_thumbnail.no_thumbnail {
|
.post_thumbnail.no_thumbnail {
|
||||||
@ -856,7 +942,8 @@ a.search_subreddit:hover {
|
|||||||
min-width: 40px;
|
min-width: 40px;
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
padding: 10px 0;
|
padding: 10px 0;
|
||||||
font-size: 16px;
|
font-size: 14px;
|
||||||
|
font-weight: 600;
|
||||||
}
|
}
|
||||||
|
|
||||||
.comment_right {
|
.comment_right {
|
||||||
@ -881,9 +968,10 @@ a.search_subreddit:hover {
|
|||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
padding: 5px 5px;
|
padding: 5px 5px;
|
||||||
margin: 5px 0;
|
margin: 5px 0;
|
||||||
|
overflow: auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
.comment_body.highlighted {
|
.comment_body.highlighted, .comment_body_filtered.highlighted {
|
||||||
background: var(--highlighted);
|
background: var(--highlighted);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -896,6 +984,15 @@ a.search_subreddit:hover {
|
|||||||
color: var(--accent);
|
color: var(--accent);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.comment_body_filtered {
|
||||||
|
opacity: 0.4;
|
||||||
|
font-weight: normal;
|
||||||
|
font-style: italic;
|
||||||
|
padding: 5px 5px;
|
||||||
|
margin: 5px 0;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
|
||||||
.deeper_replies {
|
.deeper_replies {
|
||||||
color: var(--accent);
|
color: var(--accent);
|
||||||
margin-left: 15px;
|
margin-left: 15px;
|
||||||
@ -926,6 +1023,10 @@ a.search_subreddit:hover {
|
|||||||
background: var(--foreground);
|
background: var(--foreground);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
summary.comment_data {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
.moderator, .admin { opacity: 1; }
|
.moderator, .admin { opacity: 1; }
|
||||||
.op, .moderator, .admin { font-weight: bold; }
|
.op, .moderator, .admin { font-weight: bold; }
|
||||||
|
|
||||||
@ -960,7 +1061,7 @@ a.search_subreddit:hover {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.compact .post_header {
|
.compact .post_header {
|
||||||
margin: 15px 15px 2.5px 15px;
|
margin: 11px 15px 2.5px 12px;
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -968,6 +1069,10 @@ a.search_subreddit:hover {
|
|||||||
margin: 2.5px 15px;
|
margin: 2.5px 15px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.compact .post_preview {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
.compact .post_media {
|
.compact .post_media {
|
||||||
max-width: calc(100% - 30px);
|
max-width: calc(100% - 30px);
|
||||||
margin: 2.5px auto;
|
margin: 2.5px auto;
|
||||||
@ -1002,12 +1107,10 @@ a.search_subreddit:hover {
|
|||||||
color: var(--accent);
|
color: var(--accent);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
.prefs {
|
.prefs {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
justify-content: space-between;
|
justify-content: space-between;
|
||||||
align-items: center;
|
|
||||||
padding: 20px;
|
padding: 20px;
|
||||||
background: var(--post);
|
background: var(--post);
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
@ -1020,7 +1123,19 @@ a.search_subreddit:hover {
|
|||||||
width: 100%;
|
width: 100%;
|
||||||
height: 35px;
|
height: 35px;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
margin-top: 10px;
|
margin-top: 7px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.prefs legend {
|
||||||
|
font-weight: 500;
|
||||||
|
border-bottom: 1px solid var(--highlighted);
|
||||||
|
font-size: 18px;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.prefs legend:not(:first-child) {
|
||||||
|
padding-top: 10px;
|
||||||
|
margin-top: 15px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.prefs select {
|
.prefs select {
|
||||||
@ -1051,6 +1166,24 @@ input[type="submit"] {
|
|||||||
margin-left: 30px;
|
margin-left: 30px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#settings_subs a {
|
||||||
|
color: var(--accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
#settings_filters .unsubscribe {
|
||||||
|
margin-left: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#settings_filters a {
|
||||||
|
color: var(--accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.helper {
|
||||||
|
padding: 10px;
|
||||||
|
width: 250px;
|
||||||
|
background: var(--highlighted) !important;
|
||||||
|
}
|
||||||
|
|
||||||
/* Markdown */
|
/* Markdown */
|
||||||
|
|
||||||
.md {
|
.md {
|
||||||
@ -1079,16 +1212,21 @@ input[type="submit"] {
|
|||||||
color: var(--accent);
|
color: var(--accent);
|
||||||
}
|
}
|
||||||
|
|
||||||
.md .md-spoiler-text {
|
.md .md-spoiler-text, .md-spoiler-text a {
|
||||||
background: var(--highlighted);
|
background: var(--highlighted);
|
||||||
color: transparent;
|
color: transparent;
|
||||||
}
|
}
|
||||||
|
|
||||||
.md .md-spoiler-text:hover {
|
.md-spoiler-text:hover {
|
||||||
background: var(--foreground);
|
background: var(--foreground);
|
||||||
color: var(--text);
|
color: var(--text);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.md-spoiler-text:hover a {
|
||||||
|
background: var(--foreground);
|
||||||
|
color: var(--accent);
|
||||||
|
}
|
||||||
|
|
||||||
.md li { margin: 10px 0; }
|
.md li { margin: 10px 0; }
|
||||||
.toc_child { list-style: none; }
|
.toc_child { list-style: none; }
|
||||||
|
|
||||||
@ -1103,10 +1241,13 @@ input[type="submit"] {
|
|||||||
|
|
||||||
.md table {
|
.md table {
|
||||||
margin: 5px;
|
margin: 5px;
|
||||||
|
overflow-x: auto;
|
||||||
|
display: block;
|
||||||
|
max-width: fit-content;
|
||||||
}
|
}
|
||||||
|
|
||||||
.md code {
|
.md code {
|
||||||
font-family: monospace;
|
font-family: monospace, sans-serif;
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1185,6 +1326,7 @@ td, th {
|
|||||||
"post_title post_title post_thumbnail" 1fr
|
"post_title post_title post_thumbnail" 1fr
|
||||||
"post_media post_media post_thumbnail" auto
|
"post_media post_media post_thumbnail" auto
|
||||||
"post_body post_body post_thumbnail" auto
|
"post_body post_body post_thumbnail" auto
|
||||||
|
"post_notification post_notification post_thumbnail" auto
|
||||||
"post_score post_footer post_thumbnail" auto
|
"post_score post_footer post_thumbnail" auto
|
||||||
/ auto 1fr fit-content(min(20%, 152px));
|
/ auto 1fr fit-content(min(20%, 152px));
|
||||||
}
|
}
|
||||||
@ -1231,4 +1373,4 @@ td, th {
|
|||||||
padding: 7px 0px;
|
padding: 7px 0px;
|
||||||
margin-right: -5px;
|
margin-right: -5px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
14
static/themes/black.css
Normal file
14
static/themes/black.css
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
/* Black theme setting */
|
||||||
|
.black {
|
||||||
|
--accent: #009a9a;
|
||||||
|
--green: #00a229;
|
||||||
|
--text: white;
|
||||||
|
--foreground: #0f0f0f;
|
||||||
|
--background: black;
|
||||||
|
--outside: black;
|
||||||
|
--post: black;
|
||||||
|
--panel-border: 2px solid #0f0f0f;
|
||||||
|
--highlighted: #0f0f0f;
|
||||||
|
--visited: #aaa;
|
||||||
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
14
static/themes/dark.css
Normal file
14
static/themes/dark.css
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
/* Dark theme setting */
|
||||||
|
.dark{
|
||||||
|
--accent: aqua;
|
||||||
|
--green: #5cff85;
|
||||||
|
--text: white;
|
||||||
|
--foreground: #222;
|
||||||
|
--background: #0f0f0f;
|
||||||
|
--outside: #1f1f1f;
|
||||||
|
--post: #161616;
|
||||||
|
--panel-border: 1px solid #333;
|
||||||
|
--highlighted: #333;
|
||||||
|
--visited: #aaa;
|
||||||
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
13
static/themes/doomone.css
Normal file
13
static/themes/doomone.css
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
.doomone {
|
||||||
|
--accent: #51afef;
|
||||||
|
--green: #00a229;
|
||||||
|
--text: #bbc2cf;
|
||||||
|
--foreground: #3d4148;
|
||||||
|
--background: #282c34;
|
||||||
|
--outside: #52565c;
|
||||||
|
--post: #24272e;
|
||||||
|
--panel-border: 2px solid #52565c;
|
||||||
|
--highlighted: #686b70;
|
||||||
|
--visited: #969692;
|
||||||
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
14
static/themes/dracula.css
Normal file
14
static/themes/dracula.css
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
/* Dracula theme setting */
|
||||||
|
.dracula {
|
||||||
|
--accent: #bd93f9;
|
||||||
|
--green: #50fa7b;
|
||||||
|
--text: #f8f8f2;
|
||||||
|
--foreground: #3d4051;
|
||||||
|
--background: #282a36;
|
||||||
|
--outside: #393c4d;
|
||||||
|
--post: #333544;
|
||||||
|
--panel-border: 2px solid #44475a;
|
||||||
|
--highlighted: #4e5267;
|
||||||
|
--visited: #969692;
|
||||||
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
14
static/themes/gold.css
Normal file
14
static/themes/gold.css
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
/* Gold theme setting */
|
||||||
|
.gold {
|
||||||
|
--accent: #f2aa4c;
|
||||||
|
--green: #5cff85;
|
||||||
|
--text: white;
|
||||||
|
--foreground: #234;
|
||||||
|
--background: #101820;
|
||||||
|
--outside: #1b2936;
|
||||||
|
--post: #1b2936;
|
||||||
|
--panel-border: 0px solid black;
|
||||||
|
--highlighted: #234;
|
||||||
|
--visited: #aaa;
|
||||||
|
--shadow: 0 2px 5px rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
13
static/themes/gruvboxdark.css
Normal file
13
static/themes/gruvboxdark.css
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
/* Gruvbox-Dark theme setting */
|
||||||
|
.gruvboxdark {
|
||||||
|
--accent: #8ec07c;
|
||||||
|
--green: #b8bb26;
|
||||||
|
--text: #ebdbb2;
|
||||||
|
--foreground: #3c3836;
|
||||||
|
--background: #282828;
|
||||||
|
--outside: #3c3836;
|
||||||
|
--post: #3c3836;
|
||||||
|
--panel-border: 1px solid #504945;
|
||||||
|
--highlighted: #282828;
|
||||||
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
13
static/themes/gruvboxlight.css
Normal file
13
static/themes/gruvboxlight.css
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
/* Gruvbox-Light theme setting */
|
||||||
|
.gruvboxlight {
|
||||||
|
--accent: #427b58;
|
||||||
|
--green: #79740e;
|
||||||
|
--text: #3c3836;
|
||||||
|
--foreground: #ebdbb2;
|
||||||
|
--background: #fbf1c7;
|
||||||
|
--outside: #ebdbb2;
|
||||||
|
--post: #ebdbb2;
|
||||||
|
--panel-border: 1px solid #d5c4a1;
|
||||||
|
--highlighted: #fbf1c7;
|
||||||
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.25);
|
||||||
|
}
|
14
static/themes/laserwave.css
Normal file
14
static/themes/laserwave.css
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
/* Laserwave theme setting */
|
||||||
|
.laserwave {
|
||||||
|
--accent: #eb64b9;
|
||||||
|
--green: #74dfc4;
|
||||||
|
--text: #e0dfe1;
|
||||||
|
--foreground: #302a36;
|
||||||
|
--background: #27212e;
|
||||||
|
--outside: #3e3647;
|
||||||
|
--post: #3e3647;
|
||||||
|
--panel-border: 2px solid #2f2738;
|
||||||
|
--highlighted: #302a36;
|
||||||
|
--visited: #91889b;
|
||||||
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
14
static/themes/light.css
Normal file
14
static/themes/light.css
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
/* Light theme setting */
|
||||||
|
.light {
|
||||||
|
--accent: #009a9a;
|
||||||
|
--green: #00a229;
|
||||||
|
--text: black;
|
||||||
|
--foreground: #f5f5f5;
|
||||||
|
--background: #ddd;
|
||||||
|
--outside: #ececec;
|
||||||
|
--post: #eee;
|
||||||
|
--panel-border: 1px solid #ccc;
|
||||||
|
--highlighted: white;
|
||||||
|
--visited: #555;
|
||||||
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
14
static/themes/nord.css
Normal file
14
static/themes/nord.css
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
/* Nord theme setting */
|
||||||
|
.nord {
|
||||||
|
--accent: #8fbcbb;
|
||||||
|
--green: #a3be8c;
|
||||||
|
--text: #eceff4;
|
||||||
|
--foreground: #3b4252;
|
||||||
|
--background: #2e3440;
|
||||||
|
--outside: #434c5e;
|
||||||
|
--post: #434c5e;
|
||||||
|
--panel-border: 2px solid #4c566a;
|
||||||
|
--highlighted: #3b4252;
|
||||||
|
--visited: #a3a5aa;
|
||||||
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
13
static/themes/rosebox.css
Normal file
13
static/themes/rosebox.css
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
/* Rosebox theme setting */
|
||||||
|
.rosebox {
|
||||||
|
--accent: #a57562;
|
||||||
|
--green: #a3be8c;
|
||||||
|
--text: white;
|
||||||
|
--foreground: #222;
|
||||||
|
--background: #262626;
|
||||||
|
--outside: #222;
|
||||||
|
--post: #222;
|
||||||
|
--panel-border: 1px solid #222;
|
||||||
|
--highlighted: #262626;
|
||||||
|
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
14
static/themes/violet.css
Normal file
14
static/themes/violet.css
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
/* Violet theme setting */
|
||||||
|
.violet {
|
||||||
|
--accent: #7c71dd;
|
||||||
|
--green: #5cff85;
|
||||||
|
--text: white;
|
||||||
|
--foreground: #1F2347;
|
||||||
|
--background: #12152b;
|
||||||
|
--outside: #181c3a;
|
||||||
|
--post: #181c3a;
|
||||||
|
--panel-border: 1px solid #1F2347;
|
||||||
|
--highlighted: #1F2347;
|
||||||
|
--visited: #aaa;
|
||||||
|
--shadow: 0 2px 5px rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
@ -12,14 +12,14 @@
|
|||||||
<meta name="apple-mobile-web-app-title" content="Libreddit">
|
<meta name="apple-mobile-web-app-title" content="Libreddit">
|
||||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||||
<meta name="apple-mobile-web-app-status-bar-style" content="default">
|
<meta name="apple-mobile-web-app-status-bar-style" content="default">
|
||||||
<!-- Android -->
|
<!-- Android -->
|
||||||
<meta name="mobile-web-app-capable" content="yes">
|
<meta name="mobile-web-app-capable" content="yes">
|
||||||
<!-- iOS Logo -->
|
<!-- iOS Logo -->
|
||||||
<link href="/touch-icon-iphone.png" rel="apple-touch-icon">
|
<link href="/touch-icon-iphone.png" rel="apple-touch-icon">
|
||||||
<!-- PWA Manifest -->
|
<!-- PWA Manifest -->
|
||||||
<link rel="manifest" type="application/json" href="/manifest.json">
|
<link rel="manifest" type="application/json" href="/manifest.json">
|
||||||
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico">
|
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico">
|
||||||
<link rel="stylesheet" type="text/css" href="/style.css">
|
<link rel="stylesheet" type="text/css" href="/style.css?v={{ env!("CARGO_PKG_VERSION") }}">
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
</head>
|
</head>
|
||||||
<body class="
|
<body class="
|
||||||
@ -35,6 +35,12 @@
|
|||||||
</div>
|
</div>
|
||||||
{% block search %}{% endblock %}
|
{% block search %}{% endblock %}
|
||||||
<div id="links">
|
<div id="links">
|
||||||
|
<a id="reddit_link" href="https://www.reddit.com{{ url }}" rel="nofollow">
|
||||||
|
<span>reddit</span>
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="M23 12.0737C23 10.7308 21.9222 9.64226 20.5926 9.64226C19.9435 9.64226 19.3557 9.90274 18.923 10.3244C17.2772 9.12492 15.0099 8.35046 12.4849 8.26135L13.5814 3.05002L17.1643 3.8195C17.2081 4.73947 17.9539 5.47368 18.8757 5.47368C19.8254 5.47368 20.5951 4.69626 20.5951 3.73684C20.5951 2.77769 19.8254 2 18.8758 2C18.2001 2 17.6214 2.39712 17.3404 2.96952L13.3393 2.11066C13.2279 2.08679 13.1116 2.10858 13.016 2.17125C12.9204 2.23393 12.8533 2.33235 12.8295 2.44491L11.6051 8.25987C9.04278 8.33175 6.73904 9.10729 5.07224 10.3201C4.63988 9.90099 4.05398 9.64226 3.40757 9.64226C2.0781 9.64226 1 10.7308 1 12.0737C1 13.0618 1.58457 13.9105 2.4225 14.2909C2.38466 14.5342 2.36545 14.78 2.36505 15.0263C2.36505 18.7673 6.67626 21.8 11.9945 21.8C17.3131 21.8 21.6243 18.7673 21.6243 15.0263C21.6243 14.7794 21.6043 14.5359 21.5678 14.2957C22.4109 13.9175 23 13.0657 23 12.0737Z"/>
|
||||||
|
</svg>
|
||||||
|
</a>
|
||||||
<a id="settings_link" href="/settings">
|
<a id="settings_link" href="/settings">
|
||||||
<span>settings</span>
|
<span>settings</span>
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
@ -2,23 +2,39 @@
|
|||||||
|
|
||||||
{% if kind == "more" && parent_kind == "t1" %}
|
{% if kind == "more" && parent_kind == "t1" %}
|
||||||
<a class="deeper_replies" href="{{ post_link }}{{ parent_id }}">→ More replies</a>
|
<a class="deeper_replies" href="{{ post_link }}{{ parent_id }}">→ More replies</a>
|
||||||
{% else if kind == "t1" %}
|
{% else if kind == "t1" %}
|
||||||
<div id="{{ id }}" class="comment">
|
<div id="{{ id }}" class="comment">
|
||||||
<div class="comment_left">
|
<div class="comment_left">
|
||||||
<p class="comment_score" title="{{ score.1 }}">{{ score.0 }}</p>
|
<p class="comment_score" title="{{ score.1 }}">{{ score.0 }}</p>
|
||||||
<div class="line"></div>
|
<div class="line"></div>
|
||||||
</div>
|
</div>
|
||||||
<details class="comment_right" open>
|
<details class="comment_right" {% if !collapsed || highlighted %}open{% endif %}>
|
||||||
<summary class="comment_data">
|
<summary class="comment_data">
|
||||||
<a class="comment_author {{ author.distinguished }} {% if author.name == post_author %}op{% endif %}" href="/user/{{ author.name }}">u/{{ author.name }}</a>
|
{% if author.name != "[deleted]" %}
|
||||||
|
<a class="comment_author {{ author.distinguished }} {% if author.name == post_author %}op{% endif %}" href="/user/{{ author.name }}">u/{{ author.name }}</a>
|
||||||
|
{% else %}
|
||||||
|
<span class="comment_author {{ author.distinguished }}">u/[deleted]</span>
|
||||||
|
{% endif %}
|
||||||
{% if author.flair.flair_parts.len() > 0 %}
|
{% if author.flair.flair_parts.len() > 0 %}
|
||||||
<small class="author_flair">{% call utils::render_flair(author.flair.flair_parts) %}</small>
|
<small class="author_flair">{% call utils::render_flair(author.flair.flair_parts) %}</small>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<a href="{{ post_link }}{{ id }}/?context=3" class="created" title="{{ created }}">{{ rel_time }}</a>
|
<a href="{{ post_link }}{{ id }}/?context=3" class="created" title="{{ created }}">{{ rel_time }}</a>
|
||||||
{% if edited.0 != "".to_string() %}<span class="edited" title="{{ edited.1 }}">edited {{ edited.0 }}</span>{% endif %}
|
{% if edited.0 != "".to_string() %}<span class="edited" title="{{ edited.1 }}">edited {{ edited.0 }}</span>{% endif %}
|
||||||
|
{% if !awards.is_empty() %}
|
||||||
|
<span class="dot">•</span>
|
||||||
|
{% for award in awards.clone() %}
|
||||||
|
<span class="award" title="{{ award.name }}">
|
||||||
|
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||||
|
</span>
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
</summary>
|
</summary>
|
||||||
<div class="comment_body {% if highlighted %}highlighted{% endif %}">{{ body }}</div>
|
{% if is_filtered %}
|
||||||
<blockquote class="replies">{% for c in replies -%}{{ c.render().unwrap() }}{%- endfor %}
|
<div class="comment_body_filtered {% if highlighted %}highlighted{% endif %}">(Filtered content)</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="comment_body {% if highlighted %}highlighted{% endif %}">{{ body|safe }}</div>
|
||||||
|
{% endif %}
|
||||||
|
<blockquote class="replies">{% for c in replies -%}{{ c.render().unwrap()|safe }}{%- endfor %}
|
||||||
</blockquote>
|
</blockquote>
|
||||||
</details>
|
</details>
|
||||||
</div>
|
</div>
|
||||||
|
@ -13,16 +13,25 @@
|
|||||||
<!-- Meta Tags -->
|
<!-- Meta Tags -->
|
||||||
<meta name="author" content="u/{{ post.author.name }}">
|
<meta name="author" content="u/{{ post.author.name }}">
|
||||||
<meta name="title" content="{{ post.title }} - r/{{ post.community }}">
|
<meta name="title" content="{{ post.title }} - r/{{ post.community }}">
|
||||||
<meta property="og:type" content="website">
|
|
||||||
<meta property="og:url" content="{{ post.permalink }}">
|
|
||||||
<meta property="og:title" content="{{ post.title }} - r/{{ post.community }}">
|
<meta property="og:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||||
<meta property="og:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
<meta property="og:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||||
<meta property="og:image" content="{{ post.thumbnail.url }}">
|
<meta property="og:url" content="{{ post.permalink }}">
|
||||||
<meta property="twitter:card" content="summary_large_image">
|
|
||||||
<meta property="twitter:url" content="{{ post.permalink }}">
|
<meta property="twitter:url" content="{{ post.permalink }}">
|
||||||
<meta property="twitter:title" content="{{ post.title }} - r/{{ post.community }}">
|
<meta property="twitter:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||||
<meta property="twitter:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
<meta property="twitter:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||||
|
{% if post.post_type == "image" %}
|
||||||
|
<meta property="og:type" content="image">
|
||||||
|
<meta property="og:image" content="{{ post.thumbnail.url }}">
|
||||||
|
<meta property="twitter:card" content="summary_large_image">
|
||||||
<meta property="twitter:image" content="{{ post.thumbnail.url }}">
|
<meta property="twitter:image" content="{{ post.thumbnail.url }}">
|
||||||
|
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||||
|
<meta property="twitter:card" content="video">
|
||||||
|
<meta property="og:type" content="video">
|
||||||
|
<meta property="og:video" content="{{ post.media.url }}">
|
||||||
|
<meta property="og:video:type" content="video/mp4">
|
||||||
|
{% else %}
|
||||||
|
<meta property="og:type" content="website">
|
||||||
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block subscriptions %}
|
{% block subscriptions %}
|
||||||
@ -37,63 +46,91 @@
|
|||||||
<p class="post_header">
|
<p class="post_header">
|
||||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||||
<span class="dot">•</span>
|
<span class="dot">•</span>
|
||||||
<a class="post_author" href="/user/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
<a class="post_author {{ post.author.distinguished }}" href="/user/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||||
{% if post.author.flair.flair_parts.len() > 0 %}
|
{% if post.author.flair.flair_parts.len() > 0 %}
|
||||||
<small class="author_flair">{% call utils::render_flair(post.author.flair.flair_parts) %}</small>
|
<small class="author_flair">{% call utils::render_flair(post.author.flair.flair_parts) %}</small>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<span class="dot">•</span>
|
<span class="dot">•</span>
|
||||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||||
|
{% if !post.awards.is_empty() %}
|
||||||
|
<span class="dot">•</span>
|
||||||
|
<span class="awards">
|
||||||
|
{% for award in post.awards.clone() %}
|
||||||
|
<span class="award" title="{{ award.name }}">
|
||||||
|
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||||
|
{{ award.count }}
|
||||||
|
</span>
|
||||||
|
{% endfor %}
|
||||||
|
</span>
|
||||||
|
{% endif %}
|
||||||
</p>
|
</p>
|
||||||
<p class="post_title">
|
<h1 class="post_title">
|
||||||
<a href="{{ post.permalink }}">{{ post.title }}</a>
|
{{ post.title }}
|
||||||
{% if post.flair.flair_parts.len() > 0 %}
|
{% if post.flair.flair_parts.len() > 0 %}
|
||||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||||
class="post_flair"
|
class="post_flair"
|
||||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};">{% call utils::render_flair(post.flair.flair_parts) %}</a>
|
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};">{% call utils::render_flair(post.flair.flair_parts) %}</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||||
</p>
|
</h1>
|
||||||
|
|
||||||
<!-- POST MEDIA -->
|
<!-- POST MEDIA -->
|
||||||
|
<!-- post_type: {{ post.post_type }} -->
|
||||||
{% if post.post_type == "image" %}
|
{% if post.post_type == "image" %}
|
||||||
<a href="{{ post.media.url }}" class="post_media_image" >
|
<div class="post_media_content">
|
||||||
<svg
|
<a href="{{ post.media.url }}" class="post_media_image" >
|
||||||
width="{{ post.media.width }}px"
|
<svg
|
||||||
height="{{ post.media.height }}px"
|
width="{{ post.media.width }}px"
|
||||||
xmlns="http://www.w3.org/2000/svg">
|
height="{{ post.media.height }}px"
|
||||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
xmlns="http://www.w3.org/2000/svg">
|
||||||
<desc>
|
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||||
<img alt="Post image" src="{{ post.media.url }}"/>
|
<desc>
|
||||||
</desc>
|
<img loading="lazy" alt="Post image" src="{{ post.media.url }}"/>
|
||||||
</svg>
|
</desc>
|
||||||
</a>
|
</svg>
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||||
<video class="post_media_video" src="{{ post.media.url }}" controls autoplay loop><a href={{ post.media.url }}>Video</a></video>
|
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||||
|
<script src="/hls.min.js"></script>
|
||||||
|
<div class="post_media_content">
|
||||||
|
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls>
|
||||||
|
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||||
|
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||||
|
</video>
|
||||||
|
</div>
|
||||||
|
<script src="/playHLSVideo.js"></script>
|
||||||
|
{% else %}
|
||||||
|
<div class="post_media_content">
|
||||||
|
<video class="post_media_video" src="{{ post.media.url }}" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %} loop><a href={{ post.media.url }}>Video</a></video>
|
||||||
|
</div>
|
||||||
|
{% call utils::render_hls_notification(post.permalink[1..]) %}
|
||||||
|
{% endif %}
|
||||||
{% else if post.post_type == "gallery" %}
|
{% else if post.post_type == "gallery" %}
|
||||||
<div class="gallery">
|
<div class="gallery">
|
||||||
{% for image in post.gallery -%}
|
{% for image in post.gallery -%}
|
||||||
<figure>
|
<figure>
|
||||||
<a href="{{ image.url }}" ><img alt="Gallery image" src="{{ image.url }}"/></a>
|
<a href="{{ image.url }}" ><img loading="lazy" alt="Gallery image" src="{{ image.url }}"/></a>
|
||||||
<figcaption>
|
<figcaption>
|
||||||
<p>{{ image.caption }}</p>
|
<p>{{ image.caption }}</p>
|
||||||
{% if image.outbound_url.len() > 0 %}
|
{% if image.outbound_url.len() > 0 %}
|
||||||
<p><a class="outbound_url" href="{{ image.outbound_url }}">{{ image.outbound_url }}</a>
|
<p><a class="outbound_url" href="{{ image.outbound_url }}" rel="nofollow">{{ image.outbound_url }}</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</figcaption>
|
</figcaption>
|
||||||
</figure>
|
</figure>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
</div>
|
</div>
|
||||||
{% else if post.post_type == "link" %}
|
{% else if post.post_type == "link" %}
|
||||||
<a id="post_url" href="{{ post.media.url }}">{{ post.media.url }}</a>
|
<a id="post_url" href="{{ post.media.url }}" rel="nofollow">{{ post.media.url }}</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
<!-- POST BODY -->
|
<!-- POST BODY -->
|
||||||
<div class="post_body">{{ post.body }}</div>
|
<div class="post_body">{{ post.body|safe }}</div>
|
||||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||||
<div class="post_footer">
|
<div class="post_footer">
|
||||||
<ul id="post_links">
|
<ul id="post_links">
|
||||||
<li><a href="/{{ post.id }}">permalink</a></li>
|
<li><a href="{{ post.permalink }}">permalink</a></li>
|
||||||
<li><a href="https://reddit.com/{{ post.id }}">reddit</a></li>
|
<li><a href="https://reddit.com{{ post.permalink }}" rel="nofollow">reddit</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
<p>{{ post.upvote_ratio }}% Upvoted</p>
|
<p>{{ post.upvote_ratio }}% Upvoted</p>
|
||||||
</div>
|
</div>
|
||||||
@ -116,13 +153,13 @@
|
|||||||
{% for c in comments -%}
|
{% for c in comments -%}
|
||||||
<div class="thread">
|
<div class="thread">
|
||||||
{% if single_thread %}
|
{% if single_thread %}
|
||||||
<p class="thread_nav"><a href="/{{ post.id }}">View all comments</a></p>
|
<p class="thread_nav"><a href="{{ post.permalink }}">View all comments</a></p>
|
||||||
{% if c.parent_kind == "t1" %}
|
{% if c.parent_kind == "t1" %}
|
||||||
<p class="thread_nav"><a href="?context=9999">Show parent comments</a></p>
|
<p class="thread_nav"><a href="?context=9999">Show parent comments</a></p>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{{ c.render().unwrap() }}
|
{{ c.render().unwrap()|safe }}
|
||||||
</div>
|
</div>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
<label for="restrict_sr" class="search_label">in r/{{ sub }}</label>
|
<label for="restrict_sr" class="search_label">in r/{{ sub }}</label>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% if params.typed == "sr_user" %}<input type="hidden" name="type" value="sr_user">{% endif %}
|
||||||
<select id="sort_options" name="sort" title="Sort results by">
|
<select id="sort_options" name="sort" title="Sort results by">
|
||||||
{% call utils::options(params.sort, ["relevance", "hot", "top", "new", "comments"], "") %}
|
{% call utils::options(params.sort, ["relevance", "hot", "top", "new", "comments"], "") %}
|
||||||
</select>{% if params.sort != "new" %}<select id="timeframe" name="t" title="Timeframe">
|
</select>{% if params.sort != "new" %}<select id="timeframe" name="t" title="Timeframe">
|
||||||
@ -29,15 +30,19 @@
|
|||||||
</svg>
|
</svg>
|
||||||
</button>
|
</button>
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
{% if subreddits.len() > 0 %}
|
{% if !is_filtered %}
|
||||||
|
{% if subreddits.len() > 0 || params.typed == "sr_user" %}
|
||||||
<div id="search_subreddits">
|
<div id="search_subreddits">
|
||||||
|
{% if params.typed == "sr_user" %}
|
||||||
|
<a href="?q={{ params.q }}&sort={{ params.sort }}&t={{ params.t }}" class="search_subreddit" id="more_subreddits">← Back to post/comment results</a>
|
||||||
|
{% endif %}
|
||||||
{% for subreddit in subreddits %}
|
{% for subreddit in subreddits %}
|
||||||
<a href="{{ subreddit.url }}" class="search_subreddit">
|
<a href="{{ subreddit.url }}" class="search_subreddit">
|
||||||
<div class="search_subreddit_left">{% if subreddit.icon != "" %}<img src="{{ subreddit.icon }}" alt="r/{{ subreddit.name }} icon">{% endif %}</div>
|
<div class="search_subreddit_left">{% if subreddit.icon != "" %}<img loading="lazy" src="{{ subreddit.icon|safe }}" alt="r/{{ subreddit.name }} icon">{% endif %}</div>
|
||||||
<div class="search_subreddit_right">
|
<div class="search_subreddit_right">
|
||||||
<p class="search_subreddit_header">
|
<p class="search_subreddit_header">
|
||||||
<span class="search_subreddit_name">{{ subreddit.name }}</span>
|
<span class="search_subreddit_name">r/{{ subreddit.name }}</span>
|
||||||
<span class="dot">•</span>
|
<span class="dot">•</span>
|
||||||
<span class="search_subreddit_members" title="{{ subreddit.subscribers.1 }} Members">{{ subreddit.subscribers.0 }} Members</span>
|
<span class="search_subreddit_members" title="{{ subreddit.subscribers.1 }} Members">{{ subreddit.subscribers.0 }} Members</span>
|
||||||
</p>
|
</p>
|
||||||
@ -45,42 +50,62 @@
|
|||||||
</div>
|
</div>
|
||||||
</a>
|
</a>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
{% if params.typed != "sr_user" %}
|
||||||
|
<a href="?q={{ params.q }}&sort={{ params.sort }}&t={{ params.t }}&type=sr_user" class="search_subreddit" id="more_subreddits">More subreddit results →</a>
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
|
||||||
{% for post in posts %}
|
|
||||||
|
|
||||||
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
|
||||||
{% else if post.title != "Comment" %}
|
|
||||||
{% call utils::post_in_list(post) %}
|
|
||||||
{% else %}
|
|
||||||
<div class="comment">
|
|
||||||
<div class="comment_left">
|
|
||||||
<p class="comment_score" title="{{ post.score.1 }}">{{ post.score.0 }}</p>
|
|
||||||
<div class="line"></div>
|
|
||||||
</div>
|
|
||||||
<details class="comment_right" open>
|
|
||||||
<summary class="comment_data">
|
|
||||||
<a class="comment_link" href="{{ post.permalink }}">COMMENT</a>
|
|
||||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
|
||||||
</summary>
|
|
||||||
<p class="comment_body">{{ post.body }}</p>
|
|
||||||
</details>
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endif %}
|
||||||
|
|
||||||
|
{% if all_posts_hidden_nsfw %}
|
||||||
|
<center>All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</center>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if all_posts_filtered %}
|
||||||
|
<center>(All content on this page has been filtered)</center>
|
||||||
|
{% else if is_filtered %}
|
||||||
|
<center>(Content from r/{{ sub }} has been filtered)</center>
|
||||||
|
{% else if params.typed != "sr_user" %}
|
||||||
|
{% for post in posts %}
|
||||||
|
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||||
|
{% else if !post.title.is_empty() %}
|
||||||
|
{% call utils::post_in_list(post) %}
|
||||||
|
{% else %}
|
||||||
|
<div class="comment">
|
||||||
|
<div class="comment_left">
|
||||||
|
<p class="comment_score" title="{{ post.score.1 }}">{{ post.score.0 }}</p>
|
||||||
|
<div class="line"></div>
|
||||||
|
</div>
|
||||||
|
<details class="comment_right" open>
|
||||||
|
<summary class="comment_data">
|
||||||
|
<a class="comment_link" href="{{ post.permalink }}">COMMENT</a>
|
||||||
|
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||||
|
</summary>
|
||||||
|
<p class="comment_body">{{ post.body }}</p>
|
||||||
|
</details>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% if prefs.use_hls == "on" %}
|
||||||
|
<script src="/hls.min.js"></script>
|
||||||
|
<script src="/playHLSVideo.js"></script>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if params.typed != "sr_user" %}
|
||||||
<footer>
|
<footer>
|
||||||
{% if params.before != "" %}
|
{% if params.before != "" %}
|
||||||
<a href="?q={{ params.q }}&restrict_sr={{ params.restrict_sr }}
|
<a href="?q={{ params.q }}&restrict_sr={{ params.restrict_sr }}
|
||||||
&sort={{ params.sort }}&t={{ params.t }}
|
&sort={{ params.sort }}&t={{ params.t }}
|
||||||
&before={{ params.before }}">PREV</a>
|
&before={{ params.before }}" accesskey="P">PREV</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if params.after != "" %}
|
{% if params.after != "" %}
|
||||||
<a href="?q={{ params.q }}&restrict_sr={{ params.restrict_sr }}
|
<a href="?q={{ params.q }}&restrict_sr={{ params.restrict_sr }}
|
||||||
&sort={{ params.sort }}&t={{ params.t }}
|
&sort={{ params.sort }}&t={{ params.t }}
|
||||||
&after={{ params.after }}">NEXT</a>
|
&after={{ params.after }}" accesskey="N">NEXT</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</footer>
|
</footer>
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
@ -11,14 +11,14 @@
|
|||||||
<div id="settings">
|
<div id="settings">
|
||||||
<form action="/settings" method="POST">
|
<form action="/settings" method="POST">
|
||||||
<div class="prefs">
|
<div class="prefs">
|
||||||
<p>Appearance</p>
|
<legend>Appearance</legend>
|
||||||
<div id="theme">
|
<div id="theme">
|
||||||
<label for="theme">Theme:</label>
|
<label for="theme">Theme:</label>
|
||||||
<select name="theme">
|
<select name="theme">
|
||||||
{% call utils::options(prefs.theme, ["system", "light", "dark", "black"], "system") %}
|
{% call utils::options(prefs.theme, prefs.available_themes, "system") %}
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
<p>Interface</p>
|
<legend>Interface</legend>
|
||||||
<div id="front_page">
|
<div id="front_page">
|
||||||
<label for="front_page">Front page:</label>
|
<label for="front_page">Front page:</label>
|
||||||
<select name="front_page">
|
<select name="front_page">
|
||||||
@ -33,9 +33,10 @@
|
|||||||
</div>
|
</div>
|
||||||
<div id="wide">
|
<div id="wide">
|
||||||
<label for="wide">Wide UI:</label>
|
<label for="wide">Wide UI:</label>
|
||||||
|
<input type="hidden" value="off" name="wide">
|
||||||
<input type="checkbox" name="wide" {% if prefs.wide == "on" %}checked{% endif %}>
|
<input type="checkbox" name="wide" {% if prefs.wide == "on" %}checked{% endif %}>
|
||||||
</div>
|
</div>
|
||||||
<p>Content</p>
|
<legend>Content</legend>
|
||||||
<div id="post_sort">
|
<div id="post_sort">
|
||||||
<label for="post_sort" title="Applies only to subreddit feeds">Default subreddit post sort:</label>
|
<label for="post_sort" title="Applies only to subreddit feeds">Default subreddit post sort:</label>
|
||||||
<select name="post_sort">
|
<select name="post_sort">
|
||||||
@ -50,17 +51,45 @@
|
|||||||
</div>
|
</div>
|
||||||
<div id="show_nsfw">
|
<div id="show_nsfw">
|
||||||
<label for="show_nsfw">Show NSFW posts:</label>
|
<label for="show_nsfw">Show NSFW posts:</label>
|
||||||
|
<input type="hidden" value="off" name="show_nsfw">
|
||||||
<input type="checkbox" name="show_nsfw" {% if prefs.show_nsfw == "on" %}checked{% endif %}>
|
<input type="checkbox" name="show_nsfw" {% if prefs.show_nsfw == "on" %}checked{% endif %}>
|
||||||
</div>
|
</div>
|
||||||
|
<div id="blur_nsfw">
|
||||||
|
<label for="blur_nsfw">Blur NSFW previews:</label>
|
||||||
|
<input type="hidden" value="off" name="blur_nsfw">
|
||||||
|
<input type="checkbox" name="blur_nsfw" {% if prefs.blur_nsfw == "on" %}checked{% endif %}>
|
||||||
|
</div>
|
||||||
|
<div id="autoplay_videos">
|
||||||
|
<label for="autoplay_videos">Autoplay videos</label>
|
||||||
|
<input type="hidden" value="off" name="autoplay_videos">
|
||||||
|
<input type="checkbox" name="autoplay_videos" {% if prefs.autoplay_videos == "on" %}checked{% endif %}>
|
||||||
|
</div>
|
||||||
|
<div id="use_hls">
|
||||||
|
<label for="use_hls">Use HLS for videos
|
||||||
|
<details id="feeds">
|
||||||
|
<summary>Why?</summary>
|
||||||
|
<div id="feed_list" class="helper">Reddit videos require JavaScript (via HLS.js) to be enabled to be played with audio. Therefore, this toggle lets you either use Libreddit JS-free or utilize this feature.</div>
|
||||||
|
</details>
|
||||||
|
</label>
|
||||||
|
<input type="hidden" value="off" name="use_hls">
|
||||||
|
<input type="checkbox" name="use_hls" {% if prefs.use_hls == "on" %}checked{% endif %}>
|
||||||
|
</div>
|
||||||
|
<div id="hide_hls_notification">
|
||||||
|
<label for="hide_hls_notification">Hide notification about possible HLS usage</label>
|
||||||
|
<input type="hidden" value="off" name="hide_hls_notification">
|
||||||
|
<input type="checkbox" name="hide_hls_notification" {% if prefs.hide_hls_notification == "on" %}checked{% endif %}>
|
||||||
|
</div>
|
||||||
<input id="save" type="submit" value="Save">
|
<input id="save" type="submit" value="Save">
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
{% if prefs.subscriptions.len() > 0 %}
|
{% if prefs.subscriptions.len() > 0 %}
|
||||||
<div class="prefs" id="settings_subs">
|
<div class="prefs" id="settings_subs">
|
||||||
<p>Subscribed Subreddits</p>
|
<legend>Subscribed Feeds</legend>
|
||||||
{% for sub in prefs.subscriptions %}
|
{% for sub in prefs.subscriptions %}
|
||||||
<div>
|
<div>
|
||||||
<span>{{ sub }}</span>
|
{% let feed -%}
|
||||||
|
{% if sub.starts_with("u_") -%}{% let feed = format!("u/{}", &sub[2..]) -%}{% else -%}{% let feed = format!("r/{}", sub) -%}{% endif -%}
|
||||||
|
<a href="/{{ feed }}">{{ feed }}</a>
|
||||||
<form action="/r/{{ sub }}/unsubscribe/?redirect=settings" method="POST">
|
<form action="/r/{{ sub }}/unsubscribe/?redirect=settings" method="POST">
|
||||||
<button class="unsubscribe">Unsubscribe</button>
|
<button class="unsubscribe">Unsubscribe</button>
|
||||||
</form>
|
</form>
|
||||||
@ -68,10 +97,25 @@
|
|||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% if !prefs.filters.is_empty() %}
|
||||||
|
<div class="prefs" id="settings_filters">
|
||||||
|
<legend>Filtered Feeds</legend>
|
||||||
|
{% for sub in prefs.filters %}
|
||||||
|
<div>
|
||||||
|
{% let feed -%}
|
||||||
|
{% if sub.starts_with("u_") -%}{% let feed = format!("u/{}", &sub[2..]) -%}{% else -%}{% let feed = format!("r/{}", sub) -%}{% endif -%}
|
||||||
|
<a href="/{{ feed }}">{{ feed }}</a>
|
||||||
|
<form action="/r/{{ sub }}/unfilter/?redirect=settings" method="POST">
|
||||||
|
<button class="unfilter">Unfilter</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
<div id="settings_note">
|
<div id="settings_note">
|
||||||
<p><b>Note:</b> settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.</p><br>
|
<p><b>Note:</b> settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.</p><br>
|
||||||
<p>You can restore your current settings and subscriptions after clearing your cookies using <a href="/settings/restore/?theme={{ prefs.theme }}&front_page={{ prefs.front_page }}&layout={{ prefs.layout }}&wide={{ prefs.wide }}&comment_sort={{ prefs.comment_sort }}&show_nsfw={{ prefs.show_nsfw }}&subscriptions={{ prefs.subscriptions.join("%2B") }}">this link</a>.</p>
|
<p>You can restore your current settings and subscriptions after clearing your cookies using <a href="/settings/restore/?theme={{ prefs.theme }}&front_page={{ prefs.front_page }}&layout={{ prefs.layout }}&wide={{ prefs.wide }}&post_sort={{ prefs.post_sort }}&comment_sort={{ prefs.comment_sort }}&show_nsfw={{ prefs.show_nsfw }}&blur_nsfw={{ prefs.blur_nsfw }}&use_hls={{ prefs.use_hls }}&hide_hls_notification={{ prefs.hide_hls_notification }}&subscriptions={{ prefs.subscriptions.join("%2B") }}&filters={{ prefs.filters.join("%2B") }}">this link</a>.</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<main>
|
<main>
|
||||||
|
{% if !is_filtered %}
|
||||||
<div id="column_one">
|
<div id="column_one">
|
||||||
<form id="sort">
|
<form id="sort">
|
||||||
<div id="sort_options">
|
<div id="sort_options">
|
||||||
@ -40,11 +41,18 @@
|
|||||||
</form>
|
</form>
|
||||||
|
|
||||||
{% if sub.name.contains("+") %}
|
{% if sub.name.contains("+") %}
|
||||||
<form action="/r/{{ sub.name }}/subscribe" method="POST">
|
<form action="/r/{{ sub.name }}/subscribe?redirect={{ redirect_url }}" method="POST">
|
||||||
<button id="multisub" class="subscribe" title="Subscribe to each sub in this multireddit">Subscribe to Multireddit</button>
|
<button id="multisub" class="subscribe" title="Subscribe to each sub in this multireddit">Subscribe to Multireddit</button>
|
||||||
</form>
|
</form>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
{% if all_posts_hidden_nsfw %}
|
||||||
|
<center>All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</center>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if all_posts_filtered %}
|
||||||
|
<center>(All content on this page has been filtered)</center>
|
||||||
|
{% else %}
|
||||||
<div id="posts">
|
<div id="posts">
|
||||||
{% for post in posts %}
|
{% for post in posts %}
|
||||||
{% if !(post.flags.nsfw && prefs.show_nsfw != "on") %}
|
{% if !(post.flags.nsfw && prefs.show_nsfw != "on") %}
|
||||||
@ -52,20 +60,30 @@
|
|||||||
{% call utils::post_in_list(post) %}
|
{% call utils::post_in_list(post) %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
{% if prefs.use_hls == "on" %}
|
||||||
|
<script src="/hls.min.js"></script>
|
||||||
|
<script src="/playHLSVideo.js"></script>
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
<footer>
|
<footer>
|
||||||
{% if ends.0 != "" %}
|
{% if !ends.0.is_empty() %}
|
||||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}">PREV</a>
|
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}" accesskey="P">PREV</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if ends.1 != "" %}
|
{% if !ends.1.is_empty() %}
|
||||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}">NEXT</a>
|
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}" accesskey="N">NEXT</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</footer>
|
</footer>
|
||||||
</div>
|
</div>
|
||||||
{% if sub.name != "" && !sub.name.contains("+") %}
|
{% endif %}
|
||||||
|
{% if is_filtered || (!sub.name.is_empty() && sub.name != "all" && sub.name != "popular" && !sub.name.contains("+")) %}
|
||||||
<aside>
|
<aside>
|
||||||
|
{% if is_filtered %}
|
||||||
|
<center>(Content from r/{{ sub.name }} has been filtered)</center>
|
||||||
|
{% endif %}
|
||||||
|
{% if !sub.name.is_empty() && sub.name != "all" && sub.name != "popular" && !sub.name.contains("+") %}
|
||||||
<div class="panel" id="subreddit">
|
<div class="panel" id="subreddit">
|
||||||
{% if sub.wiki %}
|
{% if sub.wiki %}
|
||||||
<div id="top">
|
<div id="top">
|
||||||
@ -74,8 +92,8 @@
|
|||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<div id="sub_meta">
|
<div id="sub_meta">
|
||||||
<img id="sub_icon" src="{{ sub.icon }}" alt="Icon for r/{{ sub.name }}">
|
<img loading="lazy" id="sub_icon" src="{{ sub.icon }}" alt="Icon for r/{{ sub.name }}">
|
||||||
<p id="sub_title">{{ sub.title }}</p>
|
<h1 id="sub_title">{{ sub.title }}</h1>
|
||||||
<p id="sub_name">r/{{ sub.name }}</p>
|
<p id="sub_name">r/{{ sub.name }}</p>
|
||||||
<p id="sub_description">{{ sub.description }}</p>
|
<p id="sub_description">{{ sub.description }}</p>
|
||||||
<div id="sub_details">
|
<div id="sub_details">
|
||||||
@ -84,23 +102,47 @@
|
|||||||
<div title="{{ sub.members.1 }}">{{ sub.members.0 }}</div>
|
<div title="{{ sub.members.1 }}">{{ sub.members.0 }}</div>
|
||||||
<div title="{{ sub.active.1 }}">{{ sub.active.0 }}</div>
|
<div title="{{ sub.active.1 }}">{{ sub.active.0 }}</div>
|
||||||
</div>
|
</div>
|
||||||
<div id="sub_subscription">
|
<div id="sub_actions">
|
||||||
{% if prefs.subscriptions.contains(sub.name) %}
|
<div id="sub_subscription">
|
||||||
<form action="/r/{{ sub.name }}/unsubscribe" method="POST">
|
{% if prefs.subscriptions.contains(sub.name) %}
|
||||||
<button class="unsubscribe">Unsubscribe</button>
|
<form action="/r/{{ sub.name }}/unsubscribe?redirect={{ redirect_url }}" method="POST">
|
||||||
|
<button class="unsubscribe">Unsubscribe</button>
|
||||||
|
</form>
|
||||||
|
{% else %}
|
||||||
|
<form action="/r/{{ sub.name }}/subscribe?redirect={{ redirect_url }}" method="POST">
|
||||||
|
<button class="subscribe">Subscribe</button>
|
||||||
|
</form>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div id="sub_filter">
|
||||||
|
{% if prefs.filters.contains(sub.name) %}
|
||||||
|
<form action="/r/{{ sub.name }}/unfilter?redirect={{ redirect_url }}" method="POST">
|
||||||
|
<button class="unfilter">Unfilter</button>
|
||||||
|
</form>
|
||||||
|
{% else %}
|
||||||
|
<form action="/r/{{ sub.name }}/filter?redirect={{ redirect_url }}" method="POST">
|
||||||
|
<button class="filter">Filter</button>
|
||||||
</form>
|
</form>
|
||||||
{% else %}
|
{% endif %}
|
||||||
<form action="/r/{{ sub.name }}/subscribe" method="POST">
|
</div>
|
||||||
<button class="subscribe">Subscribe</button>
|
|
||||||
</form>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<details class="panel" id="sidebar">
|
<details class="panel" id="sidebar">
|
||||||
<summary id="sidebar_label">Sidebar</summary>
|
<summary id="sidebar_label">Sidebar</summary>
|
||||||
<div id="sidebar_contents">{{ sub.info }}</div>
|
<div id="sidebar_contents">
|
||||||
|
{{ sub.info|safe }}
|
||||||
|
{# <hr>
|
||||||
|
<h2>Moderators</h2>
|
||||||
|
<br>
|
||||||
|
<ul>
|
||||||
|
{% for moderator in sub.moderators %}
|
||||||
|
<li><a style="color: var(--accent)" href="/u/{{ moderator }}">{{ moderator }}</a></li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul> #}
|
||||||
|
</div>
|
||||||
</details>
|
</details>
|
||||||
|
{% endif %}
|
||||||
</aside>
|
</aside>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</main>
|
</main>
|
||||||
|
@ -13,11 +13,15 @@
|
|||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<main>
|
<main>
|
||||||
|
{% if !is_filtered %}
|
||||||
<div id="column_one">
|
<div id="column_one">
|
||||||
<form id="sort">
|
<form id="sort">
|
||||||
<select name="sort">
|
<div id="listing_options">
|
||||||
{% call utils::options(sort.0, ["hot", "new", "top"], "") %}
|
{% call utils::sort(["/user/", user.name.as_str()].concat(), ["overview", "comments", "submitted"], listing) %}
|
||||||
</select>{% if sort.0 == "top" %}<select id="timeframe" name="t">
|
</div>
|
||||||
|
<select id="sort_select" name="sort">
|
||||||
|
{% call utils::options(sort.0, ["hot", "new", "top", "controversial"], "") %}
|
||||||
|
</select>{% if sort.0 == "top" || sort.0 == "controversial" %}<select id="timeframe" name="t">
|
||||||
{% call utils::options(sort.1, ["hour", "day", "week", "month", "year", "all"], "all") %}
|
{% call utils::options(sort.1, ["hour", "day", "week", "month", "year", "all"], "all") %}
|
||||||
</select>{% endif %}<button id="sort_submit" class="submit">
|
</select>{% endif %}<button id="sort_submit" class="submit">
|
||||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||||
@ -28,11 +32,18 @@
|
|||||||
</button>
|
</button>
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
|
{% if all_posts_hidden_nsfw %}
|
||||||
|
<center>All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</center>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if all_posts_filtered %}
|
||||||
|
<center>(All content on this page has been filtered)</center>
|
||||||
|
{% else %}
|
||||||
<div id="posts">
|
<div id="posts">
|
||||||
{% for post in posts %}
|
{% for post in posts %}
|
||||||
|
|
||||||
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||||
{% else if post.title != "Comment" %}
|
{% else if !post.title.is_empty() %}
|
||||||
{% call utils::post_in_list(post) %}
|
{% call utils::post_in_list(post) %}
|
||||||
{% else %}
|
{% else %}
|
||||||
<div class="comment">
|
<div class="comment">
|
||||||
@ -45,28 +56,36 @@
|
|||||||
<a class="comment_link" href="{{ post.permalink }}">COMMENT</a>
|
<a class="comment_link" href="{{ post.permalink }}">COMMENT</a>
|
||||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||||
</summary>
|
</summary>
|
||||||
<p class="comment_body">{{ post.body }}</p>
|
<p class="comment_body">{{ post.body|safe }}</p>
|
||||||
</details>
|
</details>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
{% if prefs.use_hls == "on" %}
|
||||||
|
<script src="/hls.min.js"></script>
|
||||||
|
<script src="/playHLSVideo.js"></script>
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
<footer>
|
<footer>
|
||||||
{% if ends.0 != "" %}
|
{% if ends.0 != "" %}
|
||||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}">PREV</a>
|
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}" accesskey="P">PREV</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if ends.1 != "" %}
|
{% if ends.1 != "" %}
|
||||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}">NEXT</a>
|
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}" accesskey="N">NEXT</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</footer>
|
</footer>
|
||||||
</div>
|
</div>
|
||||||
|
{% endif %}
|
||||||
<aside>
|
<aside>
|
||||||
|
{% if is_filtered %}
|
||||||
|
<center>(Content from u/{{ user.name }} has been filtered)</center>
|
||||||
|
{% endif %}
|
||||||
<div class="panel" id="user">
|
<div class="panel" id="user">
|
||||||
<img id="user_icon" src="{{ user.icon }}" alt="User icon">
|
<img loading="lazy" id="user_icon" src="{{ user.icon }}" alt="User icon">
|
||||||
<p id="user_title">{{ user.title }}</p>
|
<h1 id="user_title">{{ user.title }}</h1>
|
||||||
<p id="user_name">u/{{ user.name }}</p>
|
<p id="user_name">u/{{ user.name }}</p>
|
||||||
<div id="user_description">{{ user.description }}</div>
|
<div id="user_description">{{ user.description }}</div>
|
||||||
<div id="user_details">
|
<div id="user_details">
|
||||||
@ -75,6 +94,31 @@
|
|||||||
<div>{{ user.karma }}</div>
|
<div>{{ user.karma }}</div>
|
||||||
<div>{{ user.created }}</div>
|
<div>{{ user.created }}</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div id="user_actions">
|
||||||
|
{% let name = ["u_", user.name.as_str()].join("") %}
|
||||||
|
<div id="user_subscription">
|
||||||
|
{% if prefs.subscriptions.contains(name) %}
|
||||||
|
<form action="/r/{{ name }}/unsubscribe?redirect={{ redirect_url }}" method="POST">
|
||||||
|
<button class="unsubscribe">Unfollow</button>
|
||||||
|
</form>
|
||||||
|
{% else %}
|
||||||
|
<form action="/r/{{ name }}/subscribe?redirect={{ redirect_url }}" method="POST">
|
||||||
|
<button class="subscribe">Follow</button>
|
||||||
|
</form>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div id="user_filter">
|
||||||
|
{% if prefs.filters.contains(name) %}
|
||||||
|
<form action="/r/{{ name }}/unfilter?redirect={{ redirect_url }}" method="POST">
|
||||||
|
<button class="unfilter">Unfilter</button>
|
||||||
|
</form>
|
||||||
|
{% else %}
|
||||||
|
<form action="/r/{{ name }}/filter?redirect={{ redirect_url }}" method="POST">
|
||||||
|
<button class="filter">Filter</button>
|
||||||
|
</form>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</aside>
|
</aside>
|
||||||
</main>
|
</main>
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{% macro options(current, values, default) -%}
|
{% macro options(current, values, default) -%}
|
||||||
{% for value in values %}
|
{% for value in values %}
|
||||||
<option value="{{ value }}" {% if current == value || (current == "" && value == default) %}selected{% endif %}>
|
<option value="{{ value }}" {% if current == value.to_string() || (current == "" && value.to_string() == default.to_string()) %}selected{% endif %}>
|
||||||
{{ format!("{}{}", value.get(0..1).unwrap_or_default().to_uppercase(), value.get(1..).unwrap_or_default()) }}
|
{{ format!("{}{}", value.get(0..1).unwrap_or_default().to_uppercase(), value.get(1..).unwrap_or_default()) }}
|
||||||
</option>
|
</option>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@ -8,7 +8,7 @@
|
|||||||
|
|
||||||
{% macro sort(root, methods, selected) -%}
|
{% macro sort(root, methods, selected) -%}
|
||||||
{% for method in methods %}
|
{% for method in methods %}
|
||||||
<a {% if method == selected %}class="selected"{% endif %} href="{{ root }}/{{ method }}">
|
<a {% if method.to_string() == selected.to_string() %}class="selected"{% endif %} href="{{ root }}/{{ method }}">
|
||||||
{{ format!("{}{}", method.get(0..1).unwrap_or_default().to_uppercase(), method.get(1..).unwrap_or_default()) }}
|
{{ format!("{}{}", method.get(0..1).unwrap_or_default().to_uppercase(), method.get(1..).unwrap_or_default()) }}
|
||||||
</a>
|
</a>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@ -19,7 +19,7 @@
|
|||||||
<input id="search" type="text" name="q" placeholder="Search" title="Search libreddit" value="{{ search }}">
|
<input id="search" type="text" name="q" placeholder="Search" title="Search libreddit" value="{{ search }}">
|
||||||
{% if root != "/r/" && !root.is_empty() %}
|
{% if root != "/r/" && !root.is_empty() %}
|
||||||
<div id="inside">
|
<div id="inside">
|
||||||
<input type="checkbox" name="restrict_sr" id="restrict_sr">
|
<input type="checkbox" name="restrict_sr" id="restrict_sr" checked>
|
||||||
<label for="restrict_sr" class="search_label" title="Restrict search to this subreddit">in {{ root }}</label>
|
<label for="restrict_sr" class="search_label" title="Restrict search to this subreddit">in {{ root }}</label>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@ -34,11 +34,10 @@
|
|||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
|
|
||||||
{% macro render_flair(flair_parts) -%}
|
{% macro render_flair(flair_parts) -%}
|
||||||
{% for flair_part in flair_parts %}{% if flair_part.flair_part_type == "emoji" %}<span class="emoji" style="background-image:url('{{ flair_part.value }}');"></span>{% else if flair_part.flair_part_type == "text" && !flair_part.value.is_empty() %}<span>{{ flair_part.value }}</span>{% endif %}{% endfor %}
|
{% for flair_part in flair_parts.clone() %}{% if flair_part.flair_part_type == "emoji" %}<span class="emoji" style="background-image:url('{{ flair_part.value }}');"></span>{% else if flair_part.flair_part_type == "text" && !flair_part.value.is_empty() %}<span>{{ flair_part.value }}</span>{% endif %}{% endfor %}
|
||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
|
|
||||||
{% macro sub_list(current) -%}
|
{% macro sub_list(current) -%}
|
||||||
{% if prefs.subscriptions.len() > 0 %}
|
|
||||||
<details id="feeds">
|
<details id="feeds">
|
||||||
<summary>Feeds</summary>
|
<summary>Feeds</summary>
|
||||||
<div id="feed_list">
|
<div id="feed_list">
|
||||||
@ -46,71 +45,114 @@
|
|||||||
<a href="/">Home</a>
|
<a href="/">Home</a>
|
||||||
<a href="/r/popular">Popular</a>
|
<a href="/r/popular">Popular</a>
|
||||||
<a href="/r/all">All</a>
|
<a href="/r/all">All</a>
|
||||||
<p>REDDIT FEEDS</p>
|
{% if prefs.subscriptions.len() > 0 %}
|
||||||
{% for sub in prefs.subscriptions %}
|
<p>REDDIT FEEDS</p>
|
||||||
<a href="/r/{{ sub }}" {% if sub == current %}class="selected"{% endif %}>{{ sub }}</a>
|
{% for sub in prefs.subscriptions %}
|
||||||
{% endfor %}
|
<a href="/r/{{ sub }}" {% if sub == current %}class="selected"{% endif %}>{{ sub }}</a>
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
</details>
|
</details>
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
{% macro render_hls_notification(redirect_url) -%}
|
||||||
|
{% if post.post_type == "video" && !post.media.alt_url.is_empty() && prefs.hide_hls_notification != "on" %}
|
||||||
|
<div class="post_notification"><p><a href="/settings/update/?use_hls=on&redirect={{ redirect_url }}">Enable HLS</a> to view with audio, or <a href="/settings/update/?hide_hls_notification=on&redirect={{ redirect_url }}">disable this notification</a></p></div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
|
|
||||||
{% macro post_in_list(post) -%}
|
{% macro post_in_list(post) -%}
|
||||||
<div class="post {% if post.flags.stickied %}stickied{% endif %}">
|
<div class="post {% if post.flags.stickied %}stickied{% endif %}" id="{{ post.id }}">
|
||||||
<p class="post_header">
|
<p class="post_header">
|
||||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
{% let community -%}
|
||||||
|
{% if post.community.starts_with("u_") -%}
|
||||||
|
{% let community = format!("u/{}", &post.community[2..]) -%}
|
||||||
|
{% else -%}
|
||||||
|
{% let community = format!("r/{}", post.community) -%}
|
||||||
|
{% endif -%}
|
||||||
|
<a class="post_subreddit" href="/{{ community }}">{{ community }}</a>
|
||||||
<span class="dot">•</span>
|
<span class="dot">•</span>
|
||||||
<a class="post_author" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
<a class="post_author {{ post.author.distinguished }}" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||||
<span class="dot">•</span>
|
<span class="dot">•</span>
|
||||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||||
|
{% if !post.awards.is_empty() %}
|
||||||
|
{% for award in post.awards.clone() %}
|
||||||
|
<span class="award" title="{{ award.name }}">
|
||||||
|
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||||
|
</span>
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
</p>
|
</p>
|
||||||
<p class="post_title">
|
<h2 class="post_title">
|
||||||
{% if post.flair.flair_parts.len() > 0 %}
|
{% if post.flair.flair_parts.len() > 0 %}
|
||||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||||
class="post_flair"
|
class="post_flair"
|
||||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};">{% call render_flair(post.flair.flair_parts) %}</a>
|
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};"
|
||||||
|
dir="ltr">{% call render_flair(post.flair.flair_parts) %}</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<a href="{{ post.permalink }}">{{ post.title }}</a>{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
<a href="{{ post.permalink }}">{{ post.title }}</a>{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||||
</p>
|
</h2>
|
||||||
<!-- POST MEDIA/THUMBNAIL -->
|
<!-- POST MEDIA/THUMBNAIL -->
|
||||||
{% if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "image" %}
|
{% if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "image" %}
|
||||||
<a href="{{ post.media.url }}" class="post_media_image {% if post.media.height / post.media.width < 2 %}short{% endif %}" >
|
<div class="post_media_content">
|
||||||
<svg
|
<a href="{{ post.media.url }}" class="post_media_image {% if post.media.height / post.media.width < 2 %}short{% endif %}" >
|
||||||
width="{{ post.media.width }}px"
|
<svg
|
||||||
height="{{ post.media.height }}px"
|
{%if post.flags.nsfw && prefs.blur_nsfw=="on" %}class="post_nsfw_blur"{% endif %}
|
||||||
xmlns="http://www.w3.org/2000/svg">
|
width="{{ post.media.width }}px"
|
||||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
height="{{ post.media.height }}px"
|
||||||
<desc>
|
xmlns="http://www.w3.org/2000/svg">
|
||||||
<img alt="Post image" src="{{ post.media.url }}"/>
|
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||||
</desc>
|
<desc>
|
||||||
</svg>
|
<img loading="lazy" alt="Post image" src="{{ post.media.url }}"/>
|
||||||
</a>
|
</desc>
|
||||||
|
</svg>
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
||||||
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" controls loop autoplay><a href={{ post.media.url }}>Video</a></video>
|
<div class="post_media_content">
|
||||||
|
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||||
|
</div>
|
||||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
||||||
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls autoplay><a href={{ post.media.url }}>Video</a></video>
|
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||||
|
<div class="post_media_content">
|
||||||
|
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" controls preload="none">
|
||||||
|
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||||
|
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||||
|
</video>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="post_media_content">
|
||||||
|
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||||
|
</div>
|
||||||
|
{% call render_hls_notification(format!("{}%23{}", &self.url[1..].replace("&", "%26").replace("+", "%2B"), post.id)) %}
|
||||||
|
{% endif %}
|
||||||
{% else if post.post_type != "self" %}
|
{% else if post.post_type != "self" %}
|
||||||
<a class="post_thumbnail {% if post.thumbnail.url.is_empty() %}no_thumbnail{% endif %}" href="{% if post.post_type == "link" %}{{ post.media.url }}{% else %}{{ post.permalink }}{% endif %}">
|
<a class="post_thumbnail {% if post.thumbnail.url.is_empty() %}no_thumbnail{% endif %}" href="{% if post.post_type == "link" %}{{ post.media.url }}{% else %}{{ post.permalink }}{% endif %}" rel="nofollow">
|
||||||
{% if post.thumbnail.url.is_empty() %}
|
{% if post.thumbnail.url.is_empty() %}
|
||||||
<svg viewBox="0 0 100 106" width="140" height="53" xmlns="http://www.w3.org/2000/svg">
|
<svg viewBox="0 0 100 106" width="140" height="53" xmlns="http://www.w3.org/2000/svg">
|
||||||
<title>Thumbnail</title>
|
<title>Thumbnail</title>
|
||||||
<path d="M35,15h-15a10,10 0,0,0 0,20h25a10,10 0,0,0 10,-10m-12.5,0a10, 10 0,0,1 10, -10h25a10,10 0,0,1 0,20h-15" fill="none" stroke-width="5" stroke-linecap="round"/>
|
<path d="M35,15h-15a10,10 0,0,0 0,20h25a10,10 0,0,0 10,-10m-12.5,0a10, 10 0,0,1 10, -10h25a10,10 0,0,1 0,20h-15" fill="none" stroke-width="5" stroke-linecap="round"/>
|
||||||
</svg>
|
</svg>
|
||||||
{% else %}
|
{% else %}
|
||||||
<svg width="{{ post.thumbnail.width }}px" height="{{ post.thumbnail.height }}px" xmlns="http://www.w3.org/2000/svg">
|
<div style="max-width:{{ post.thumbnail.width }}px;max-height:{{ post.thumbnail.height }}px;">
|
||||||
<image width="100%" height="100%" href="{{ post.thumbnail.url }}"/>
|
<svg {% if post.flags.nsfw && prefs.blur_nsfw=="on" %} class="thumb_nsfw_blur" {% endif %} width="{{ post.thumbnail.width }}px" height="{{ post.thumbnail.height }}px" xmlns="http://www.w3.org/2000/svg">
|
||||||
<desc>
|
<image width="100%" height="100%" href="{{ post.thumbnail.url }}"/>
|
||||||
<img alt="Thumbnail" src="{{ post.thumbnail.url }}"/>
|
<desc>
|
||||||
</desc>
|
<img loading="lazy" alt="Thumbnail" src="{{ post.thumbnail.url }}"/>
|
||||||
</svg>
|
</desc>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<span>{% if post.post_type == "link" %}{{ post.domain }}{% else %}{{ post.post_type }}{% endif %}</span>
|
<span>{% if post.post_type == "link" %}{{ post.domain }}{% else %}{{ post.post_type }}{% endif %}</span>
|
||||||
</a>
|
</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||||
|
<div class="post_body post_preview">
|
||||||
|
{{ post.body|safe }}
|
||||||
|
</div>
|
||||||
<div class="post_footer">
|
<div class="post_footer">
|
||||||
<a href="{{ post.permalink }}" class="post_comments" title="{{ post.comments.1 }} comments">{{ post.comments.0 }} comments</a>
|
<a href="{{ post.permalink }}" class="post_comments" title="{{ post.comments.1 }} {% if post.comments.1 == "1" %}comment{% else %}comments{% endif %}">{{ post.comments.0 }} {% if post.comments.1 == "1" %}comment{% else %}comments{% endif %}</a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
|
13
templates/wall.html
Normal file
13
templates/wall.html
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
{% block title %}{{ msg }}{% endblock %}
|
||||||
|
{% block sortstyle %}{% endblock %}
|
||||||
|
{% block content %}
|
||||||
|
<div id="wall">
|
||||||
|
<h1>{{ title }}</h1>
|
||||||
|
<br>
|
||||||
|
<p>{{ msg }}</p>
|
||||||
|
<form action="/r/{{ sub }}?redir={{ url }}" method="POST">
|
||||||
|
<input id="save" type="submit" value="Continue">
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
@ -22,8 +22,8 @@
|
|||||||
<div>Wiki</div>
|
<div>Wiki</div>
|
||||||
</div>
|
</div>
|
||||||
<div id="wiki">
|
<div id="wiki">
|
||||||
{{ wiki }}
|
{{ wiki|safe }}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</main>
|
</main>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
Reference in New Issue
Block a user