Compare commits
451 Commits
Author | SHA1 | Date | |
---|---|---|---|
ca3f6c0579 | |||
decc9e5139 | |||
d27bd782ce | |||
4defb58f2a | |||
ba42fc066f | |||
2cd35fb3b6 | |||
b9af6f47f3 | |||
73732a2a44 | |||
43ed9756dc | |||
8bb247af3b | |||
ed05f5a092 | |||
4f09333cd7 | |||
31bf8c802e | |||
e4f9bd7b8d | |||
83a667347d | |||
499a56aed4 | |||
928907086c | |||
dc9fbc1a05 | |||
7ae7a88eed | |||
536a766960 | |||
e34329cfee | |||
97a0680bd0 | |||
c1560f4eba | |||
242ffab0da | |||
1211d781d0 | |||
9e4066658c | |||
560de4e91f | |||
bd1c890961 | |||
6f799b2617 | |||
38e176f59f | |||
8248eca95c | |||
ffc3bfe72d | |||
d713746407 | |||
21b45760eb | |||
e3fb93946a | |||
b6134a39d0 | |||
c844655c98 | |||
cac83493da | |||
b47cfd1ba5 | |||
28ca3589ed | |||
3cf787cf98 | |||
46e22cf74e | |||
5c2e134924 | |||
c6244585fa | |||
9f1ba274eb | |||
93ed1c6f0c | |||
6ce82c36fb | |||
2974d92e30 | |||
34dfcb2512 | |||
6b42e97bda | |||
49bfe4d27c | |||
c8965ae51b | |||
0b64a52a63 | |||
a18db1e2b7 | |||
3b53e5be4c | |||
42e8351285 | |||
b3e4b7bfae | |||
4a42a25ed3 | |||
2bacaa163f | |||
48c3a8c0d0 | |||
c23d2dc50b | |||
46dbd88d91 | |||
f0f484288e | |||
90d39b121f | |||
44dee302c9 | |||
c7f9386c01 | |||
66ac72beab | |||
14f9ac4ca7 | |||
6a7f725c12 | |||
2533e8cef5 | |||
772d20615b | |||
0bb1677520 | |||
da4883db29 | |||
d50b6ca4b3 | |||
4c66e75f6b | |||
966e0ce921 | |||
ab886d1e67 | |||
dc7e087ed0 | |||
0d6e18d97d | |||
f872baa1fe | |||
9b5176f7b9 | |||
60c89197e5 | |||
7d94876d90 | |||
467342edf4 | |||
3c5b4037e2 | |||
a81502dde1 | |||
0ce2d9054e | |||
a5203fe8dd | |||
038fafa378 | |||
e15c15c390 | |||
07363e47a9 | |||
fb7faf6477 | |||
b14b4ff551 | |||
4b1195f221 | |||
a472461ee8 | |||
baf5e3d7ee | |||
f209757ed6 | |||
4173362ce1 | |||
b2ae5e486f | |||
cda19a1912 | |||
f0b69f8a4a | |||
118ff9485c | |||
4a51b7cfb0 | |||
f877face80 | |||
f0e8deb000 | |||
e70dfe2c0b | |||
2e89a85858 | |||
e59b2b1346 | |||
1c36549134 | |||
5fb88d4744 | |||
6c7188a1b9 | |||
84009fbb8e | |||
bf783c2f3a | |||
213babb057 | |||
7dbc02d930 | |||
10873dd0c6 | |||
c0d1519341 | |||
8709c49f39 | |||
56cfeba9e5 | |||
890d5ae625 | |||
caa8f1d49e | |||
dd51b23dc4 | |||
52d9698879 | |||
20f6945160 | |||
10c73fad7f | |||
2bddc952cb | |||
1de01d7283 | |||
9183ce1921 | |||
a197df89ff | |||
be2a1d876b | |||
686d61801f | |||
5d643277bc | |||
a3ec44149c | |||
83ba0fb913 | |||
55e9915bb0 | |||
5cd5b553b0 | |||
2b2bd8421b | |||
47d01a0dca | |||
0a69937238 | |||
6d08f2dd24 | |||
4a06882dc8 | |||
3e567d9acf | |||
8034594006 | |||
2f3315dcfc | |||
df118764df | |||
d78f82649e | |||
80fb3a5c18 | |||
518d5753a7 | |||
de38f7ef18 | |||
dd67b52199 | |||
9cfab348eb | |||
e1f7b6d0c0 | |||
a606e48435 | |||
2091f26bda | |||
b3341b49c0 | |||
65e4ceff7b | |||
bacb22f7f9 | |||
902c9a6e42 | |||
c586de66ba | |||
e466be8946 | |||
bed3465475 | |||
8560e8a37a | |||
3652342f46 | |||
58127b17d8 | |||
2f4deb221a | |||
38230ed473 | |||
71501b064c | |||
47a58ea05c | |||
14ecf3cf60 | |||
aa7c8c85df | |||
0cb7031c36 | |||
93cfc713c6 | |||
ff8685ae4c | |||
f06320a4ae | |||
809be42e01 | |||
58ca085521 | |||
4a40e16277 | |||
fee2cb1b56 | |||
8785bc95f5 | |||
16454213cf | |||
6feb347c27 | |||
e731cfbac4 | |||
008924fff8 | |||
ebbdd7185f | |||
402b3149e1 | |||
ac5ef89dff | |||
7edca18f8d | |||
cf45d53fdd | |||
2a475d127a | |||
3fa523e67b | |||
3fbb433e37 | |||
5fbcfd850f | |||
c758db84ec | |||
90d3063f93 | |||
82a601d534 | |||
12a1b3f459 | |||
e23eaf0be0 | |||
821709c8d2 | |||
653b0e7024 | |||
c7a2c43287 | |||
9824370771 | |||
d87b96d0ea | |||
6eae4bc47a | |||
1bcb070fbb | |||
24bc758090 | |||
ffbb1cf7cd | |||
cbf1f540d6 | |||
f8e0d2d4b9 | |||
8a27b2bac8 | |||
69941d9efd | |||
956de50419 | |||
d790264a62 | |||
f4f2d8a377 | |||
dd908c9f68 | |||
9e1948733d | |||
9df1dfae32 | |||
cfbee1bb81 | |||
8430cbc6f3 | |||
a9dd2e6f2c | |||
36964982fb | |||
0742a33304 | |||
7f320b3143 | |||
58f4fc4e77 | |||
7d8faefad0 | |||
ba9b5afd4e | |||
ae09f77bf6 | |||
5030c418de | |||
4ccd6b1751 | |||
7d17aa0627 | |||
4b73e2d914 | |||
0a140a6ffc | |||
e837d84105 | |||
f6d791ccd9 | |||
effaeb7508 | |||
6257faf9dc | |||
ee0da63862 | |||
971f14bb55 | |||
9a1733ac99 | |||
c32d62fbd5 | |||
1a0d12d2ff | |||
2a27850914 | |||
bfcc4c985d | |||
1653d4fb4c | |||
79027c4c75 | |||
269bb0bfb6 | |||
7933d840b3 | |||
b875e9377e | |||
8c80946121 | |||
21d96e261f | |||
9c58d23b41 | |||
4ae2191392 | |||
d62a3ab86b | |||
9b7cd1da5a | |||
a301f1ecb6 | |||
f14639ee00 | |||
b527735f6f | |||
8cc01c58f3 | |||
a1d800a0f0 | |||
449899962a | |||
dc2030e6f3 | |||
ef5a1cd66e | |||
11e4ff42ed | |||
c71df35b22 | |||
345308a9ac | |||
75bbcefbec | |||
49a6168607 | |||
f55ea5a353 | |||
30c33d91e1 | |||
00b135fb0f | |||
5fe9ce8d7b | |||
8c04365049 | |||
d5b1c3a5bb | |||
f038aa61f4 | |||
f72c9d39be | |||
e6c2d08425 | |||
e901e99278 | |||
acd2cff747 | |||
8f913e696c | |||
226d39328c | |||
b2ad2f636c | |||
18fe7ff8cf | |||
077c222a4e | |||
2270b6cf95 | |||
758b627660 | |||
baf7272cfd | |||
6641e242af | |||
610fcfbf87 | |||
dea7f33910 | |||
c299e128ab | |||
53fa946c75 | |||
5d44a071f9 | |||
e29e203188 | |||
6ead6e08dc | |||
7360503234 | |||
140c1b1bfa | |||
040982f1fd | |||
4b0677d10e | |||
616751e054 | |||
5df957f193 | |||
7f9cb1b35a | |||
c030771d36 | |||
a562395c26 | |||
2bcdf68e40 | |||
72eaa685d0 | |||
899a414cf6 | |||
524538eeb8 | |||
a184559c21 | |||
1c9fd46e98 | |||
738941d830 | |||
06ab7a4181 | |||
6981d94417 | |||
dd60cb5b2b | |||
1d57e29d56 | |||
2d973707f3 | |||
cbb937b494 | |||
d45ee03122 | |||
162e00b243 | |||
7a32ba087e | |||
801216dfe9 | |||
21763c51cd | |||
138f8320e9 | |||
571ba3392c | |||
090ca1a140 | |||
6127f2a90c | |||
ef9bc791e1 | |||
894323becf | |||
4c89d31948 | |||
471d181284 | |||
0e48c66b8c | |||
a0bc1732cf | |||
6d5fd1dbf6 | |||
0f6e73dd87 | |||
151490faf0 | |||
fdf60e7255 | |||
ab102ca32c | |||
998b301229 | |||
d7839899e6 | |||
2385fa33ec | |||
1fd688eeed | |||
65543a43b2 | |||
0099021478 | |||
3a9b2dba32 | |||
59021b9331 | |||
078d6fe25b | |||
373ce55203 | |||
aef0442e9d | |||
21ff8d7b6f | |||
bca2a7e540 | |||
0c014ad41b | |||
32b8637c7e | |||
5ed122d92c | |||
45660816ce | |||
d19e73f059 | |||
18684c934b | |||
cf4c5e1fe8 | |||
7ef4a20aff | |||
292f8fbbb7 | |||
735f79d80b | |||
a85a4278f6 | |||
dbe617d7eb | |||
842d97e9fa | |||
0bf5576427 | |||
dd027bff4b | |||
f95ef51017 | |||
740641cb4e | |||
09c98c8da6 | |||
33c8bdffb9 | |||
5ab88567de | |||
c6627ceece | |||
d9affcdefc | |||
96607256fc | |||
eb9a0dcb4a | |||
89fa0d5489 | |||
22589c8296 | |||
b0540d2c57 | |||
41c4661bbb | |||
d2314580a9 | |||
a4d77926b6 | |||
bbe7024323 | |||
32e1469e11 | |||
2d4ca2379f | |||
374f53eb32 | |||
add7efea3c | |||
065d82a5f5 | |||
1895bbc025 | |||
65f1a2afb2 | |||
6eb9e6f0c0 | |||
eb735a42fe | |||
541c741bde | |||
7a33ed3434 | |||
48d2943f72 | |||
6bbc90bc0d | |||
4d18dc0bb8 | |||
6dbd002acd | |||
bf6245a505 | |||
91746908a1 | |||
bb8273bab4 | |||
62bcc31305 | |||
08683fa5a6 | |||
c58b077330 | |||
f445c42f55 | |||
a0866b251e | |||
aa819544f6 | |||
fac56d7f87 | |||
ef1ad17234 | |||
b8cdc605a2 | |||
ef2f9ad12b | |||
b13874d0db | |||
3d142afd03 | |||
7fcb7fcfed | |||
747d5a7c67 | |||
770c4d3630 | |||
e7b448a282 | |||
c7c787dff1 | |||
59a34a0e85 | |||
6e8cf69227 | |||
3444989f9a | |||
7e96bb3d80 | |||
0adbb1556e | |||
710eecdb9d | |||
8a57fa8a1d | |||
b33d79ed9b | |||
0f506fc41b | |||
c9cd825d55 | |||
e63384e6a6 | |||
3260a4d596 | |||
da5c4603d9 | |||
b50fa6f3ae | |||
aa7b4b2af7 | |||
2b0193f5ea | |||
2185d895c0 | |||
9c1a932214 | |||
8c0269af1c | |||
df89c5076e | |||
f819ad2bc6 | |||
f5884a5270 | |||
c046d00060 | |||
5934e34ea0 | |||
463b44ac52 | |||
b40d21e559 | |||
a422a74747 | |||
4124fa87d3 | |||
1dd0c4ee20 | |||
0dd114c166 | |||
67090e9b08 | |||
d97fb49fde | |||
9263b0657f | |||
a3384cbaa6 | |||
5d26b5c764 | |||
516403ee47 | |||
5ea504e6e8 |
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
---
|
||||
name: 🐛 Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## Describe the bug
|
||||
<!--
|
||||
A clear and concise description of what the bug is.
|
||||
-->
|
||||
|
||||
## To reproduce
|
||||
|
||||
<!--
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
-->
|
||||
|
||||
## Expected behavior
|
||||
<!--
|
||||
A clear and concise description of what you expected to happen.
|
||||
-->
|
||||
|
||||
## Additional context
|
||||
<!--
|
||||
Add any other context about the problem here.
|
||||
-->
|
28
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
28
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
---
|
||||
name: 💡 Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## Is your feature request related to a problem? Please describe.
|
||||
<!--
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
-->
|
||||
|
||||
## Describe the solution you'd like
|
||||
<!--
|
||||
A clear and concise description of what you want to happen.
|
||||
-->
|
||||
|
||||
## Describe alternatives you've considered
|
||||
<!--
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
-->
|
||||
|
||||
## Additional context
|
||||
<!--
|
||||
Add any other context or screenshots about the feature request here.
|
||||
-->
|
36
.github/workflows/docker-arm.yml
vendored
Normal file
36
.github/workflows/docker-arm.yml
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
name: Docker ARM Build
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build-docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
platforms: all
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
version: latest
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.arm
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: spikecodes/libreddit:arm
|
30
.github/workflows/rust.yml
vendored
30
.github/workflows/rust.yml
vendored
@ -11,7 +11,7 @@ env:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-18.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@ -21,9 +21,35 @@ jobs:
|
||||
|
||||
- name: Build
|
||||
run: cargo build --release
|
||||
|
||||
|
||||
- uses: actions/upload-artifact@v2.2.1
|
||||
name: Upload a Build Artifact
|
||||
with:
|
||||
name: libreddit
|
||||
path: target/release/libreddit
|
||||
|
||||
- name: Versions
|
||||
id: version
|
||||
run: |
|
||||
echo "::set-output name=version::$(cargo metadata --format-version 1 --no-deps | jq .packages[0].version -r | sed 's/^/v/')"
|
||||
echo "::set-output name=tag::$(git describe --tags)"
|
||||
|
||||
- name: Calculate SHA512 checksum
|
||||
run: sha512sum target/release/libreddit > libreddit.sha512
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: github.base_ref != 'master'
|
||||
with:
|
||||
tag_name: ${{ steps.version.outputs.version }}
|
||||
name: ${{ steps.version.outputs.version }} - NAME
|
||||
draft: true
|
||||
files: |
|
||||
target/release/libreddit
|
||||
libreddit.sha512
|
||||
body: |
|
||||
- ${{ github.event.head_commit.message }} ${{ github.sha }}
|
||||
|
||||
See full list of changes [here](https://github.com/spikecodes/libreddit/compare/${{ steps.version.outputs.tag }}...${{ steps.version.outputs.version }}).
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -1 +1 @@
|
||||
/target
|
||||
/target
|
1
CODEOWNERS
Normal file
1
CODEOWNERS
Normal file
@ -0,0 +1 @@
|
||||
* @spikecodes
|
1797
Cargo.lock
generated
1797
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
31
Cargo.toml
31
Cargo.toml
@ -3,22 +3,23 @@ name = "libreddit"
|
||||
description = " Alternative private front-end to Reddit"
|
||||
license = "AGPL-3.0"
|
||||
repository = "https://github.com/spikecodes/libreddit"
|
||||
version = "0.2.5"
|
||||
version = "0.14.6"
|
||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
default = ["proxy"]
|
||||
proxy = ["actix-web/rustls", "base64"]
|
||||
|
||||
[dependencies]
|
||||
base64 = { version = "0.13.0", optional = true }
|
||||
actix-web = "3.2.0"
|
||||
reqwest = { version = "0.10", default_features = false, features = ["rustls-tls"] }
|
||||
askama = "0.8.0"
|
||||
serde = "1.0.117"
|
||||
serde_json = "1.0"
|
||||
chrono = "0.4.19"
|
||||
async-recursion = "0.3.1"
|
||||
url = "2.2.0"
|
||||
regex = "1"
|
||||
askama = { version = "0.10.5", default-features = false }
|
||||
async-recursion = "0.3.2"
|
||||
cached = "0.23.0"
|
||||
clap = { version = "2.33.3", default-features = false }
|
||||
regex = "1.5.4"
|
||||
serde = { version = "1.0.126", features = ["derive"] }
|
||||
cookie = "0.15.0"
|
||||
futures-lite = "1.11.3"
|
||||
hyper = { version = "0.14.8", features = ["full"] }
|
||||
hyper-rustls = "0.22.1"
|
||||
route-recognizer = "0.3.0"
|
||||
serde_json = "1.0.64"
|
||||
tokio = { version = "1.6.0", features = ["full"] }
|
||||
time = "0.2.26"
|
||||
url = "2.2.2"
|
||||
|
39
Dockerfile
39
Dockerfile
@ -1,9 +1,36 @@
|
||||
FROM rust:alpine as builder
|
||||
WORKDIR /usr/src/libreddit
|
||||
COPY . .
|
||||
RUN apk add --no-cache g++ openssl-dev
|
||||
RUN cargo install --path .
|
||||
####################################################################################################
|
||||
## Builder
|
||||
####################################################################################################
|
||||
FROM rust:alpine AS builder
|
||||
|
||||
RUN apk add --no-cache musl-dev
|
||||
|
||||
WORKDIR /libreddit
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN cargo build --target x86_64-unknown-linux-musl --release
|
||||
|
||||
####################################################################################################
|
||||
## Final image
|
||||
####################################################################################################
|
||||
FROM alpine:latest
|
||||
COPY --from=builder /usr/local/cargo/bin/libreddit /usr/local/bin/libreddit
|
||||
|
||||
# Import ca-certificates from builder
|
||||
COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
||||
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||
|
||||
# Copy our build
|
||||
COPY --from=builder /libreddit/target/x86_64-unknown-linux-musl/release/libreddit /usr/local/bin/libreddit
|
||||
|
||||
# Use an unprivileged user.
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||
USER libreddit
|
||||
|
||||
# Tell Docker to expose port 8080
|
||||
EXPOSE 8080
|
||||
|
||||
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||
|
||||
CMD ["libreddit"]
|
36
Dockerfile.arm
Normal file
36
Dockerfile.arm
Normal file
@ -0,0 +1,36 @@
|
||||
####################################################################################################
|
||||
## Builder
|
||||
####################################################################################################
|
||||
FROM rust:alpine AS builder
|
||||
|
||||
RUN apk add --no-cache g++
|
||||
|
||||
WORKDIR /usr/src/libreddit
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN cargo install --path .
|
||||
|
||||
####################################################################################################
|
||||
## Final image
|
||||
####################################################################################################
|
||||
FROM alpine:latest
|
||||
|
||||
# Import ca-certificates from builder
|
||||
COPY --from=builder /usr/share/ca-certificates /usr/share/ca-certificates
|
||||
COPY --from=builder /etc/ssl/certs /etc/ssl/certs
|
||||
|
||||
# Copy our build
|
||||
COPY --from=builder /usr/local/cargo/bin/libreddit /usr/local/bin/libreddit
|
||||
|
||||
# Use an unprivileged user.
|
||||
RUN adduser --home /nonexistent --no-create-home --disabled-password libreddit
|
||||
USER libreddit
|
||||
|
||||
# Tell Docker to expose port 8080
|
||||
EXPOSE 8080
|
||||
|
||||
# Run a healthcheck every minute to make sure Libreddit is functional
|
||||
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
|
||||
|
||||
CMD ["libreddit"]
|
205
README.md
205
README.md
@ -2,96 +2,98 @@
|
||||
|
||||
> An alternative private front-end to Reddit
|
||||
|
||||
Libre + Reddit = [Libreddit](https://libredd.it)
|
||||

|
||||
|
||||
- 🚀 Fast: written in Rust for blazing fast speeds and safety
|
||||
- ☁️ Light: no JavaScript, no ads, no tracking
|
||||
---
|
||||
|
||||
**10 second pitch:** Libreddit is a portmanteau of "libre" (meaning freedom) and "Reddit". It is a private front-end like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libreddit.spike.codes/r/unpopularopinion) without being [tracked](#reddit).
|
||||
|
||||
- 🚀 Fast: written in Rust for blazing fast speeds and memory safety
|
||||
- ☁️ Light: no JavaScript, no ads, no tracking, no bloat
|
||||
- 🕵 Private: all requests are proxied through the server, including media
|
||||
- 🦺 Safe: does not rely on Reddit OAuth or require a Reddit API Key
|
||||
- 🔒 Secure: strong [Content Security Policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP) prevents browser requests to Reddit
|
||||
|
||||
Like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libredd.it/r/unpopularopinion) without being [tracked](#reddit).
|
||||
---
|
||||
|
||||
## Contents
|
||||
- [Screenshot](#screenshot)
|
||||
- [Instances](#instances)
|
||||
- [About](#about)
|
||||
- [Elsewhere](#elsewhere)
|
||||
- [Info](#info)
|
||||
- [Teddit Comparison](#how-does-it-compare-to-teddit)
|
||||
- [Comparison](#comparison)
|
||||
- [Speed](#speed)
|
||||
- [Privacy](#privacy)
|
||||
- [Installation](#installation)
|
||||
- [Cargo](#a-cargo)
|
||||
- [Docker](#b-docker)
|
||||
- [AUR](#c-aur)
|
||||
- [GitHub Releases](#d-github-releases)
|
||||
- [Repl.it](#e-replit)
|
||||
- Developing
|
||||
- [Deployment](#deployment)
|
||||
- [Building](#building)
|
||||
**BTC:** bc1qwyxjnafpu3gypcpgs025cw9wa7ryudtecmwa6y
|
||||
|
||||
## Screenshot
|
||||
**XMR:** 45FJrEuFPtG2o7QZz2Nps77TbHD4sPqxViwbdyV9A6ktfHiWs47UngG5zXPcLoDXAc8taeuBgeNjfeprwgeXYXhN3C9tVSR
|
||||
|
||||

|
||||
---
|
||||
|
||||
## Instances
|
||||
# Instances
|
||||
|
||||
Feel free to [open an issue](https://github.com/spikecodes/libreddit/issues/new) to have your [selfhosted instance](#deployment) listed here!
|
||||
|
||||
| Website | Country | Cloudflare |
|
||||
|-|-|-|
|
||||
| [libredd.it](https://libredd.it) (official) | 🇺🇸 US | |
|
||||
| [libreddit.spike.codes](https://libreddit.spike.codes) (official) | 🇺🇸 US | |
|
||||
| [libreddit.dothq.co](https://libreddit.dothq.co) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.insanity.wtf](https://libreddit.insanity.wtf) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.kavin.rocks](https://libreddit.kavin.rocks) | 🇮🇳 IN | ✅ |
|
||||
| [libredd.it](https://libredd.it) (official) | 🇺🇸 US | |
|
||||
| [libreddit.spike.codes](https://libreddit.spike.codes) (official) | 🇺🇸 US | |
|
||||
| [libreddit.dothq.co](https://libreddit.dothq.co) | 🇺🇸 US | |
|
||||
| [libreddit.kavin.rocks](https://libreddit.kavin.rocks) | 🇮🇳 IN | ✅ |
|
||||
| [libreddit.bcow.xyz](https://libreddit.bcow.xyz) | 🇺🇸 US | |
|
||||
| [libreddit.40two.app](https://libreddit.40two.app) | 🇳🇱 NL | |
|
||||
| [reddit.invak.id](https://reddit.invak.id) | 🇧🇬 BG | |
|
||||
| [reddit.phii.me](https://reddit.phii.me) | 🇺🇸 US | |
|
||||
| [lr.riverside.rocks](https://lr.riverside.rocks) | 🇺🇸 US | |
|
||||
| [libreddit.silkky.cloud](https://libreddit.silkky.cloud) | 🇫🇮 FI | |
|
||||
| [libreddit.database.red](https://libreddit.database.red) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.exonip.de](https://libreddit.exonip.de) | 🇩🇪 DE | |
|
||||
| [libreddit.domain.glass](https://libreddit.domain.glass) | 🇺🇸 US | ✅ |
|
||||
| [spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion](http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion) | 🇮🇳 IN | |
|
||||
| [fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion](http://fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion) | 🇩🇪 DE | |
|
||||
| [dflv6yjt7il3n3tggf4qhcmkzbti2ppytqx3o7pjrzwgntutpewscyid.onion](http://dflv6yjt7il3n3tggf4qhcmkzbti2ppytqx3o7pjrzwgntutpewscyid.onion/) | 🇺🇸 US | |
|
||||
| [kphht2jcflojtqte4b4kyx7p2ahagv4debjj32nre67dxz7y57seqwyd.onion](http://kphht2jcflojtqte4b4kyx7p2ahagv4debjj32nre67dxz7y57seqwyd.onion/) | 🇳🇱 NL | |
|
||||
|
||||
A checkmark in the "Cloudflare" category here refers to the use of the reverse proxy, [Cloudflare](https://cloudflare). The checkmark will not be listed for a site which uses Cloudflare DNS but rather the proxying service which grants Cloudflare the ability to monitor traffic to the website.
|
||||
|
||||
## About
|
||||
---
|
||||
|
||||
### Elsewhere
|
||||
Find Libreddit on...
|
||||
- 💬 Matrix: [#libreddit:matrix.org](https://matrix.to/#/#libreddit:matrix.org)
|
||||
- 🐋 Docker: [spikecodes/libreddit](https://hub.docker.com/r/spikecodes/libreddit)
|
||||
- :octocat: GitHub: [spikecodes/libreddit](https://github.com/spikecodes/libreddit)
|
||||
- 🦊 GitLab: [spikecodes/libreddit](https://gitlab.com/spikecodes/libreddit)
|
||||
# About
|
||||
|
||||
### Info
|
||||
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/spikecodes/libreddit), :octocat: [GitHub](https://github.com/spikecodes/libreddit), and 🦊 [GitLab](https://gitlab.com/spikecodes/libreddit).
|
||||
|
||||
## Built with
|
||||
|
||||
- [Rust](https://www.rust-lang.org/) - Programming language
|
||||
- [Hyper](https://github.com/hyperium/hyper) - HTTP server and client
|
||||
- [Askama](https://github.com/djc/askama) - Templating engine
|
||||
- [Rustls](https://github.com/ctz/rustls) - TLS library
|
||||
|
||||
## Info
|
||||
Libreddit hopes to provide an easier way to browse Reddit, without the ads, trackers, and bloat. Libreddit was inspired by other alternative front-ends to popular services such as [Invidious](https://github.com/iv-org/invidious) for YouTube, [Nitter](https://github.com/zedeus/nitter) for Twitter, and [Bibliogram](https://sr.ht/~cadence/bibliogram/) for Instagram.
|
||||
|
||||
Libreddit currently implements most of Reddit's functionalities but still lacks a few features that are being worked on below.
|
||||
Libreddit currently implements most of Reddit's (signed-out) functionalities but still lacks [a few features](https://github.com/spikecodes/libreddit/issues).
|
||||
|
||||
### How does it compare to Teddit?
|
||||
## How does it compare to Teddit?
|
||||
|
||||
Teddit is another awesome open source project designed to provide an alternative frontend to Reddit. There is no connection between the two and you're welcome to use whichever one you favor. Competition fosters innovation and Teddit's release has motivated me to build Libreddit into an even more polished product.
|
||||
|
||||
If you are looking to compare, the biggest differences I have noticed are:
|
||||
- Libreddit is themed around Reddit's redesign whereas Teddit appears to stick much closer to Reddit's old design. This may suit some users better as design is always subjective.
|
||||
- Libreddit is written in [Rust](https://www.rust-lang.org) for speed and memory safety. It uses [Actix Web](https://actix.rs), which was [benchmarked as the fastest web server for single queries](https://www.techempower.com/benchmarks/#hw=ph&test=db).
|
||||
- Libreddit is written in [Rust](https://www.rust-lang.org) for speed and memory safety. It uses [Hyper](https://hyper.rs), a speedy and lightweight HTTP server/client implementation.
|
||||
|
||||
## Comparison
|
||||
---
|
||||
|
||||
# Comparison
|
||||
|
||||
This section outlines how Libreddit compares to Reddit.
|
||||
|
||||
### Speed
|
||||
## Speed
|
||||
|
||||
Lasted tested December 21, 2020.
|
||||
Lasted tested Jan 17, 2021.
|
||||
|
||||
Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdotdevsite.appspot.com/lh/html?url=https%3A%2F%2Flibredd.it), [Reddit Report](https://lighthouse-dot-webdotdevsite.appspot.com/lh/html?url=https%3A%2F%2Fwww.reddit.com%2F)).
|
||||
|
||||
| | Libreddit | Reddit |
|
||||
|---------------------|---------------|-----------|
|
||||
| Requests | 22 | 70 |
|
||||
| Resource Size | 135 KiB | 2,222 KiB |
|
||||
| Time to Interactive | **1.7 s** | **11.5 s**|
|
||||
| | Libreddit | Reddit |
|
||||
|------------------------|---------------|------------|
|
||||
| Requests | 20 | 70 |
|
||||
| Resource Size (card ui)| 1,224 KiB | 1,690 KiB |
|
||||
| Time to Interactive | **1.5 s** | **11.2 s** |
|
||||
|
||||
### Privacy
|
||||
## Privacy
|
||||
|
||||
#### Reddit
|
||||
### Reddit
|
||||
|
||||
**Logging:** According to Reddit's [privacy policy](https://www.redditinc.com/policies/privacy-policy), they "may [automatically] log information" including:
|
||||
- IP address
|
||||
@ -120,21 +122,23 @@ Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdot
|
||||
- Third-Party Cookies
|
||||
- Third-Party Site
|
||||
|
||||
#### Libreddit
|
||||
### Libreddit
|
||||
|
||||
For transparency, I hope to describe all the ways Libreddit handles user privacy.
|
||||
|
||||
**Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs nothing. When debugging (running from source without `--release`), Libreddit logs post IDs and URL paths fetched to aid troubleshooting but nothing else.
|
||||
**Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs when Reddit is ratelimiting Libreddit and when Reddit's JSON responses can't be parsed. When debugging (running from source without `--release`), Libreddit logs post IDs and URL paths fetched to aid with troubleshooting.
|
||||
|
||||
**DNS:** Both official domains (`libredd.it` and `libreddit.spike.codes`) use Cloudflare as the DNS resolver. Though, the sites are not proxied through Cloudflare meaning Cloudflare doesn't have access to user traffic.
|
||||
|
||||
**Cookies:** Libreddit uses no cookies currently but eventually, I plan to add a configuration page where users can store an optional cookie to save their preferred theme, default sorting algorithm, or default layout.
|
||||
**Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libreddit.spike.codes/settings). This is not a cross-site cookie and the cookie holds no personal data, only a value of the possible layout.
|
||||
|
||||
**Hosting:** The official instances (`libredd.it` and `libreddit.spike.codes`) are hosted on [Repl.it](https://repl.it/) which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models and therefore, selfhosting and browsing through Tor are welcomed.
|
||||
**Hosting:** The official instances are hosted on [Replit](https://replit.com/) which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models and therefore, selfhosting and browsing through Tor are welcomed.
|
||||
|
||||
## Installation
|
||||
---
|
||||
|
||||
### A) Cargo
|
||||
# Installation
|
||||
|
||||
## 1) Cargo
|
||||
|
||||
Make sure Rust stable is installed along with `cargo`, Rust's package manager.
|
||||
|
||||
@ -142,58 +146,105 @@ Make sure Rust stable is installed along with `cargo`, Rust's package manager.
|
||||
cargo install libreddit
|
||||
```
|
||||
|
||||
### B) Docker
|
||||
## 2) Docker
|
||||
|
||||
Deploy the Docker image of Libreddit:
|
||||
Deploy the [Docker image](https://hub.docker.com/r/spikecodes/libreddit) of Libreddit:
|
||||
```
|
||||
docker pull spikecodes/libreddit
|
||||
docker run -d --name libreddit -p 8080:8080 spikecodes/libreddit
|
||||
```
|
||||
|
||||
Deploy using a different port (in this case, port 80):
|
||||
```
|
||||
docker pull spikecodes/libreddit
|
||||
docker run -d --name libreddit -p 80:8080 spikecodes/libreddit
|
||||
```
|
||||
|
||||
### C) AUR
|
||||
To deploy on `arm64` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:arm`.
|
||||
|
||||
## 3) AUR
|
||||
|
||||
For ArchLinux users, Libreddit is available from the AUR as [`libreddit-git`](https://aur.archlinux.org/packages/libreddit-git).
|
||||
|
||||
Install:
|
||||
```
|
||||
yay -S libreddit-git
|
||||
```
|
||||
|
||||
### D) GitHub Releases
|
||||
## 4) GitHub Releases
|
||||
|
||||
If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/spikecodes/libreddit/releases/latest).
|
||||
Currently, Libreddit does not have Windows or macOS binaries but those will be available soon.
|
||||
|
||||
### E) Repl.it
|
||||
## 5) Replit
|
||||
|
||||
**Note:** Repl.it is a free option but they are *not* private and are monitor server usage to prevent abuse. If you really need a free and easy setup, this method may work best for you.
|
||||
**Note:** Replit is a free option but they are *not* private and will monitor server usage to prevent abuse. If you need a free and easy setup, this method may work best for you.
|
||||
|
||||
1. Create a Repl.it account (see note above)
|
||||
2. Visit [the official Repl](https://repl.it/@spikethecoder/libreddit) and fork it
|
||||
1. Create a Replit account (see note above)
|
||||
2. Visit [the official Repl](https://replit.com/@spikethecoder/libreddit) and fork it
|
||||
3. Hit the run button to download the latest Libreddit version and start it
|
||||
|
||||
In the web preview (defaults to top right), you should see your instance hosted where you can assign a [custom domain](https://docs.repl.it/repls/web-hosting#custom-domains).
|
||||
In the web preview (defaults to top right), you should see your instance hosted where you can assign a [custom domain](https://docs.replit.com/repls/web-hosting#custom-domains).
|
||||
|
||||
## Deployment
|
||||
---
|
||||
|
||||
Once installed, deploy Libreddit (unless you're using Docker) by running:
|
||||
# Deployment
|
||||
|
||||
Once installed, deploy Libreddit to `0.0.0.0:8080` by running:
|
||||
|
||||
```
|
||||
libreddit
|
||||
```
|
||||
|
||||
Specify a custom address for the server by passing the `-a` or `--address` argument:
|
||||
```
|
||||
libreddit --address=0.0.0.0:8111
|
||||
## Change Default Settings
|
||||
|
||||
Assign a default value for each setting by passing environment variables to Libreddit in the format `LIBREDDIT_DEFAULT_{X}`. Replace `{X}` with the setting name (see list below) in capital letters.
|
||||
|
||||
| Name | Possible values | Default value |
|
||||
|-------------------------|------------------------------------------------------------------------------------------|---------------|
|
||||
| `THEME` | `["system", "light", "dark", "black", "dracula", "nord", "laserwave", "violet", "gold"]` | `system` |
|
||||
| `FRONT_PAGE` | `["default", "popular", "all"]` | `default` |
|
||||
| `LAYOUT` | `["card", "clean", "compact"]` | `card` |
|
||||
| `WIDE` | `["on", "off"]` | `off` |
|
||||
| `COMMENT_SORT` | `["hot", "new", "top", "rising", "controversial"]` | `hot` |
|
||||
| `POST_SORT` | `["confidence", "top", "new", "controversial", "old"]` | `confidence` |
|
||||
| `SHOW_NSFW` | `["on", "off"]` | `off` |
|
||||
| `USE_HLS` | `["on", "off"]` | `off` |
|
||||
| `HIDE_HLS_NOTIFICATION` | `["on", "off"]` | `off` |
|
||||
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
LIBREDDIT_DEFAULT_SHOW_NSFW=on libreddit
|
||||
```
|
||||
|
||||
To disable the media proxy built into Libreddit, run:
|
||||
```bash
|
||||
LIBREDDIT_DEFAULT_WIDE=on LIBREDDIT_DEFAULT_THEME=dark libreddit -r
|
||||
```
|
||||
libreddit --no-default-features
|
||||
|
||||
## Proxying using NGINX
|
||||
|
||||
**NOTE** If you're [proxying Libreddit through a NGINX Reverse Proxy](https://github.com/spikecodes/libreddit/issues/122#issuecomment-782226853), add
|
||||
```nginx
|
||||
proxy_http_version 1.1;
|
||||
```
|
||||
to your NGINX configuration file above your `proxy_pass` line.
|
||||
|
||||
## SystemD
|
||||
|
||||
You can use the SystemD service available in `contrib/libreddit.service`
|
||||
(install it on `/etc/systemd/system/libreddit.service`).
|
||||
|
||||
That service can be optionally configured in terms of environment variables by
|
||||
creating a file in `/etc/libreddit.conf`. Use the `contrib/libreddit.conf` as a
|
||||
template. You can also add the `LIBREDDIT_DEFAULT__{X}` settings explained
|
||||
above.
|
||||
|
||||
When "Proxying using NGINX" where the proxy is on the same machine, you should
|
||||
guarantee nginx waits for this service to start. Edit
|
||||
`/etc/systemd/system/libreddit.service.d/reverse-proxy.conf`:
|
||||
|
||||
```conf
|
||||
[Unit]
|
||||
Before=nginx.service
|
||||
```
|
||||
|
||||
## Building
|
||||
|
2
contrib/libreddit.conf
Normal file
2
contrib/libreddit.conf
Normal file
@ -0,0 +1,2 @@
|
||||
ADDRESS=localhost
|
||||
PORT=12345
|
15
contrib/libreddit.service
Normal file
15
contrib/libreddit.service
Normal file
@ -0,0 +1,15 @@
|
||||
[Unit]
|
||||
Description=libreddit daemon
|
||||
After=network.service
|
||||
|
||||
[Service]
|
||||
DynamicUser=yes
|
||||
# Default Values
|
||||
Environment=ADDRESS=0.0.0.0
|
||||
Environment=PORT=8080
|
||||
# Optional Override
|
||||
EnvironmentFile=-/etc/libreddit.conf
|
||||
ExecStart=/usr/bin/libreddit -a ${ADDRESS} -p ${PORT}
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
13
docker-compose.yml
Normal file
13
docker-compose.yml
Normal file
@ -0,0 +1,13 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
web:
|
||||
build: .
|
||||
restart: always
|
||||
container_name: "libreddit"
|
||||
ports:
|
||||
- 8080:8080
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/settings"]
|
||||
interval: 5m
|
||||
timeout: 3s
|
157
src/client.rs
Normal file
157
src/client.rs
Normal file
@ -0,0 +1,157 @@
|
||||
use cached::proc_macro::cached;
|
||||
use futures_lite::{future::Boxed, FutureExt};
|
||||
use hyper::{body::Buf, client, Body, Request, Response, Uri};
|
||||
use serde_json::Value;
|
||||
use std::{result::Result, str::FromStr};
|
||||
|
||||
use crate::server::RequestExt;
|
||||
|
||||
pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, String> {
|
||||
let mut url = format!("{}?{}", format, req.uri().query().unwrap_or_default());
|
||||
|
||||
// For each parameter in request
|
||||
for (name, value) in req.params().iter() {
|
||||
// Fill the parameter value in the url
|
||||
url = url.replace(&format!("{{{}}}", name), value);
|
||||
}
|
||||
|
||||
stream(&url, &req).await
|
||||
}
|
||||
|
||||
async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String> {
|
||||
// First parameter is target URL (mandatory).
|
||||
let url = Uri::from_str(url).map_err(|_| "Couldn't parse URL".to_string())?;
|
||||
|
||||
// Prepare the HTTPS connector.
|
||||
let https = hyper_rustls::HttpsConnector::with_native_roots();
|
||||
|
||||
// Build the hyper client from the HTTPS connector.
|
||||
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
|
||||
|
||||
let mut builder = Request::get(url);
|
||||
|
||||
// Copy useful headers from original request
|
||||
for &key in &["Range", "If-Modified-Since", "Cache-Control"] {
|
||||
if let Some(value) = req.headers().get(key) {
|
||||
builder = builder.header(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
let stream_request = builder.body(Body::empty()).map_err(|_| "Couldn't build empty body in stream".to_string())?;
|
||||
|
||||
client
|
||||
.request(stream_request)
|
||||
.await
|
||||
.map(|mut res| {
|
||||
let mut rm = |key: &str| res.headers_mut().remove(key);
|
||||
|
||||
rm("access-control-expose-headers");
|
||||
rm("server");
|
||||
rm("vary");
|
||||
rm("etag");
|
||||
rm("x-cdn");
|
||||
rm("x-cdn-client-region");
|
||||
rm("x-cdn-name");
|
||||
rm("x-cdn-server-region");
|
||||
rm("x-reddit-cdn");
|
||||
rm("x-reddit-video-features");
|
||||
|
||||
res
|
||||
})
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn request(url: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
// Prepare the HTTPS connector.
|
||||
let https = hyper_rustls::HttpsConnector::with_native_roots();
|
||||
|
||||
// Construct the hyper client from the HTTPS connector.
|
||||
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
|
||||
|
||||
// Build request
|
||||
let builder = Request::builder()
|
||||
.method("GET")
|
||||
.uri(&url)
|
||||
.header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION")))
|
||||
.header("Host", "www.reddit.com")
|
||||
.header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
|
||||
.header("Accept-Language", "en-US,en;q=0.5")
|
||||
.header("Connection", "keep-alive")
|
||||
.header("Cookie", if quarantine { "_options=%7B%22pref_quarantine_optin%22%3A%20true%7D" } else { "" })
|
||||
.body(Body::empty());
|
||||
|
||||
async move {
|
||||
match builder {
|
||||
Ok(req) => match client.request(req).await {
|
||||
Ok(response) => {
|
||||
if response.status().to_string().starts_with('3') {
|
||||
request(
|
||||
response
|
||||
.headers()
|
||||
.get("Location")
|
||||
.map(|val| val.to_str().unwrap_or_default())
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
quarantine,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
Err(e) => Err(e.to_string()),
|
||||
},
|
||||
Err(_) => Err("Post url contains non-ASCII characters".to_string()),
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
// Make a request to a Reddit API and parse the JSON response
|
||||
#[cached(size = 100, time = 30, result = true)]
|
||||
pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
||||
// Build Reddit url from path
|
||||
let url = format!("https://www.reddit.com{}", path);
|
||||
|
||||
// Closure to quickly build errors
|
||||
let err = |msg: &str, e: String| -> Result<Value, String> {
|
||||
// eprintln!("{} - {}: {}", url, msg, e);
|
||||
Err(format!("{}: {}", msg, e))
|
||||
};
|
||||
|
||||
// Fetch the url...
|
||||
match request(url.clone(), quarantine).await {
|
||||
Ok(response) => {
|
||||
// asynchronously aggregate the chunks of the body
|
||||
match hyper::body::aggregate(response).await {
|
||||
Ok(body) => {
|
||||
// Parse the response from Reddit as JSON
|
||||
match serde_json::from_reader(body.reader()) {
|
||||
Ok(value) => {
|
||||
let json: Value = value;
|
||||
// If Reddit returned an error
|
||||
if json["error"].is_i64() {
|
||||
Err(
|
||||
json["reason"]
|
||||
.as_str()
|
||||
.unwrap_or_else(|| {
|
||||
json["message"].as_str().unwrap_or_else(|| {
|
||||
eprintln!("{} - Error parsing reddit error", url);
|
||||
"Error parsing reddit error"
|
||||
})
|
||||
})
|
||||
.to_string(),
|
||||
)
|
||||
} else {
|
||||
Ok(json)
|
||||
}
|
||||
}
|
||||
Err(e) => err("Failed to parse page JSON data", e.to_string()),
|
||||
}
|
||||
}
|
||||
Err(e) => err("Failed receiving body from Reddit", e.to_string()),
|
||||
}
|
||||
}
|
||||
Err(e) => err("Couldn't send request to Reddit", e),
|
||||
}
|
||||
}
|
313
src/main.rs
313
src/main.rs
@ -1,81 +1,272 @@
|
||||
// Import Crates
|
||||
use actix_web::{get, middleware::NormalizePath, web, App, HttpResponse, HttpServer};
|
||||
// Global specifiers
|
||||
#![forbid(unsafe_code)]
|
||||
#![warn(clippy::pedantic, clippy::all)]
|
||||
#![allow(clippy::needless_pass_by_value, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::find_map)]
|
||||
|
||||
// Reference local files
|
||||
mod post;
|
||||
mod proxy;
|
||||
mod search;
|
||||
mod settings;
|
||||
mod subreddit;
|
||||
mod user;
|
||||
mod utils;
|
||||
|
||||
// Import Crates
|
||||
use clap::{App as cli, Arg};
|
||||
|
||||
use futures_lite::FutureExt;
|
||||
use hyper::{header::HeaderValue, Body, Request, Response};
|
||||
|
||||
mod client;
|
||||
use client::proxy;
|
||||
use server::RequestExt;
|
||||
use utils::{error, redirect};
|
||||
|
||||
mod server;
|
||||
|
||||
// Create Services
|
||||
async fn style() -> HttpResponse {
|
||||
HttpResponse::Ok().content_type("text/css").body(include_str!("../static/style.css"))
|
||||
|
||||
// Required for the manifest to be valid
|
||||
async fn pwa_logo() -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "image/png")
|
||||
.body(include_bytes!("../static/logo.png").as_ref().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
async fn robots() -> HttpResponse {
|
||||
HttpResponse::Ok().body(include_str!("../static/robots.txt"))
|
||||
// Required for iOS App Icons
|
||||
async fn iphone_logo() -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "image/png")
|
||||
.body(include_bytes!("../static/apple-touch-icon.png").as_ref().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
#[get("/favicon.ico")]
|
||||
async fn favicon() -> HttpResponse {
|
||||
HttpResponse::Ok().body("")
|
||||
async fn favicon() -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "image/vnd.microsoft.icon")
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body(include_bytes!("../static/favicon.ico").as_ref().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
let mut address = "0.0.0.0:8080".to_string();
|
||||
async fn font() -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "font/woff2")
|
||||
.body(include_bytes!("../static/Inter.var.woff2").as_ref().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
if args.len() > 1 {
|
||||
for arg in args {
|
||||
if arg.starts_with("--address=") || arg.starts_with("-a=") {
|
||||
let split: Vec<&str> = arg.split('=').collect();
|
||||
address = split[1].to_string();
|
||||
}
|
||||
async fn resource(body: &str, content_type: &str, cache: bool) -> Result<Response<Body>, String> {
|
||||
let mut res = Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", content_type)
|
||||
.body(body.to_string().into())
|
||||
.unwrap_or_default();
|
||||
|
||||
if cache {
|
||||
if let Ok(val) = HeaderValue::from_str("public, max-age=1209600, s-maxage=86400") {
|
||||
res.headers_mut().insert("Cache-Control", val);
|
||||
}
|
||||
}
|
||||
|
||||
// start http server
|
||||
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), &address);
|
||||
|
||||
HttpServer::new(|| {
|
||||
App::new()
|
||||
// TRAILING SLASH MIDDLEWARE
|
||||
.wrap(NormalizePath::default())
|
||||
// DEFAULT SERVICE
|
||||
.default_service(web::get().to(utils::error))
|
||||
// GENERAL SERVICES
|
||||
.route("/style.css/", web::get().to(style))
|
||||
.route("/favicon.ico/", web::get().to(HttpResponse::Ok))
|
||||
.route("/robots.txt/", web::get().to(robots))
|
||||
// PROXY SERVICE
|
||||
.route("/proxy/{url:.*}/", web::get().to(proxy::handler))
|
||||
// SEARCH SERVICES
|
||||
.route("/search/", web::get().to(search::find))
|
||||
.route("r/{sub}/search/", web::get().to(search::find))
|
||||
// USER SERVICES
|
||||
.route("/u/{username}/", web::get().to(user::profile))
|
||||
.route("/user/{username}/", web::get().to(user::profile))
|
||||
// WIKI SERVICES
|
||||
.route("/wiki/", web::get().to(subreddit::wiki))
|
||||
.route("/wiki/{page}/", web::get().to(subreddit::wiki))
|
||||
.route("/r/{sub}/wiki/", web::get().to(subreddit::wiki))
|
||||
.route("/r/{sub}/wiki/{page}/", web::get().to(subreddit::wiki))
|
||||
// SUBREDDIT SERVICES
|
||||
.route("/r/{sub}/", web::get().to(subreddit::page))
|
||||
.route("/r/{sub}/{sort:hot|new|top|rising}/", web::get().to(subreddit::page))
|
||||
// POPULAR SERVICES
|
||||
.route("/", web::get().to(subreddit::page))
|
||||
.route("/{sort:best|hot|new|top|rising}/", web::get().to(subreddit::page))
|
||||
// POST SERVICES
|
||||
.route("/{id:.{5,6}}/", web::get().to(post::item))
|
||||
.route("/r/{sub}/comments/{id}/{title}/", web::get().to(post::item))
|
||||
.route("/r/{sub}/comments/{id}/{title}/{comment_id}/", web::get().to(post::item))
|
||||
})
|
||||
.bind(&address)
|
||||
.unwrap_or_else(|_| panic!("Cannot bind to the address: {}", address))
|
||||
.run()
|
||||
.await
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let matches = cli::new("Libreddit")
|
||||
.version(env!("CARGO_PKG_VERSION"))
|
||||
.about("Private front-end for Reddit written in Rust ")
|
||||
.arg(
|
||||
Arg::with_name("redirect-https")
|
||||
.short("r")
|
||||
.long("redirect-https")
|
||||
.help("Redirect all HTTP requests to HTTPS (no longer functional)")
|
||||
.takes_value(false),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("address")
|
||||
.short("a")
|
||||
.long("address")
|
||||
.value_name("ADDRESS")
|
||||
.help("Sets address to listen on")
|
||||
.default_value("0.0.0.0")
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("port")
|
||||
.short("p")
|
||||
.long("port")
|
||||
.value_name("PORT")
|
||||
.help("Port to listen on")
|
||||
.default_value("8080")
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("hsts")
|
||||
.short("H")
|
||||
.long("hsts")
|
||||
.value_name("EXPIRE_TIME")
|
||||
.help("HSTS header to tell browsers that this site should only be accessed over HTTPS")
|
||||
.default_value("604800")
|
||||
.takes_value(true),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
let address = matches.value_of("address").unwrap_or("0.0.0.0");
|
||||
let port = matches.value_of("port").unwrap_or("8080");
|
||||
let hsts = matches.value_of("hsts");
|
||||
|
||||
let listener = [address, ":", port].concat();
|
||||
|
||||
println!("Starting Libreddit...");
|
||||
|
||||
// Begin constructing a server
|
||||
let mut app = server::Server::new();
|
||||
|
||||
// Define default headers (added to all responses)
|
||||
app.default_headers = headers! {
|
||||
"Referrer-Policy" => "no-referrer",
|
||||
"X-Content-Type-Options" => "nosniff",
|
||||
"X-Frame-Options" => "DENY",
|
||||
"Content-Security-Policy" => "default-src 'none'; font-src 'self'; script-src 'self' blob:; manifest-src 'self'; media-src 'self' data: blob: about:; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none'; connect-src 'self'; worker-src blob:;"
|
||||
};
|
||||
|
||||
if let Some(expire_time) = hsts {
|
||||
if let Ok(val) = HeaderValue::from_str(&format!("max-age={}", expire_time)) {
|
||||
app.default_headers.insert("Strict-Transport-Security", val);
|
||||
}
|
||||
}
|
||||
|
||||
// Read static files
|
||||
app.at("/style.css").get(|_| resource(include_str!("../static/style.css"), "text/css", false).boxed());
|
||||
app
|
||||
.at("/manifest.json")
|
||||
.get(|_| resource(include_str!("../static/manifest.json"), "application/json", false).boxed());
|
||||
app.at("/robots.txt").get(|_| resource("User-agent: *\nAllow: /", "text/plain", true).boxed());
|
||||
app.at("/favicon.ico").get(|_| favicon().boxed());
|
||||
app.at("/logo.png").get(|_| pwa_logo().boxed());
|
||||
app.at("/Inter.var.woff2").get(|_| font().boxed());
|
||||
app.at("/touch-icon-iphone.png").get(|_| iphone_logo().boxed());
|
||||
app.at("/apple-touch-icon.png").get(|_| iphone_logo().boxed());
|
||||
app
|
||||
.at("/playHLSVideo.js")
|
||||
.get(|_| resource(include_str!("../static/playHLSVideo.js"), "text/javascript", false).boxed());
|
||||
app
|
||||
.at("/hls.min.js")
|
||||
.get(|_| resource(include_str!("../static/hls.min.js"), "text/javascript", false).boxed());
|
||||
|
||||
// Proxy media through Libreddit
|
||||
app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed());
|
||||
app.at("/hls/:id/*path").get(|r| proxy(r, "https://v.redd.it/{id}/{path}").boxed());
|
||||
app.at("/img/:id").get(|r| proxy(r, "https://i.redd.it/{id}").boxed());
|
||||
app.at("/thumb/:point/:id").get(|r| proxy(r, "https://{point}.thumbs.redditmedia.com/{id}").boxed());
|
||||
app.at("/emoji/:id/:name").get(|r| proxy(r, "https://emoji.redditmedia.com/{id}/{name}").boxed());
|
||||
app.at("/preview/:loc/:id").get(|r| proxy(r, "https://{loc}view.redd.it/{id}").boxed());
|
||||
app.at("/style/*path").get(|r| proxy(r, "https://styles.redditmedia.com/{path}").boxed());
|
||||
app.at("/static/*path").get(|r| proxy(r, "https://www.redditstatic.com/{path}").boxed());
|
||||
|
||||
// Browse user profile
|
||||
app
|
||||
.at("/u/:name")
|
||||
.get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
|
||||
app.at("/u/:name/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/u/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
|
||||
app.at("/user/[deleted]").get(|req| error(req, "User has deleted their account".to_string()).boxed());
|
||||
app.at("/user/:name").get(|r| user::profile(r).boxed());
|
||||
app.at("/user/:name/comments/:id").get(|r| post::item(r).boxed());
|
||||
app.at("/user/:name/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/user/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
|
||||
// Configure settings
|
||||
app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed());
|
||||
app.at("/settings/restore").get(|r| settings::restore(r).boxed());
|
||||
app.at("/settings/update").get(|r| settings::update(r).boxed());
|
||||
|
||||
// Subreddit services
|
||||
app
|
||||
.at("/r/:sub")
|
||||
.get(|r| subreddit::community(r).boxed())
|
||||
.post(|r| subreddit::add_quarantine_exception(r).boxed());
|
||||
|
||||
app
|
||||
.at("/r/u_:name")
|
||||
.get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
|
||||
|
||||
app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions(r).boxed());
|
||||
app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions(r).boxed());
|
||||
|
||||
app.at("/r/:sub/comments/:id").get(|r| post::item(r).boxed());
|
||||
app.at("/r/:sub/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/r/:sub/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
|
||||
app.at("/r/:sub/search").get(|r| search::find(r).boxed());
|
||||
|
||||
app
|
||||
.at("/r/:sub/w")
|
||||
.get(|r| async move { Ok(redirect(format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed());
|
||||
app
|
||||
.at("/r/:sub/w/*page")
|
||||
.get(|r| async move { Ok(redirect(format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed());
|
||||
app.at("/r/:sub/wiki").get(|r| subreddit::wiki(r).boxed());
|
||||
app.at("/r/:sub/wiki/*page").get(|r| subreddit::wiki(r).boxed());
|
||||
|
||||
app.at("/r/:sub/about/sidebar").get(|r| subreddit::sidebar(r).boxed());
|
||||
|
||||
app.at("/r/:sub/:sort").get(|r| subreddit::community(r).boxed());
|
||||
|
||||
// Comments handler
|
||||
app.at("/comments/:id").get(|r| post::item(r).boxed());
|
||||
|
||||
// Front page
|
||||
app.at("/").get(|r| subreddit::community(r).boxed());
|
||||
|
||||
// View Reddit wiki
|
||||
app.at("/w").get(|_| async { Ok(redirect("/wiki".to_string())) }.boxed());
|
||||
app
|
||||
.at("/w/*page")
|
||||
.get(|r| async move { Ok(redirect(format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed());
|
||||
app.at("/wiki").get(|r| subreddit::wiki(r).boxed());
|
||||
app.at("/wiki/*page").get(|r| subreddit::wiki(r).boxed());
|
||||
|
||||
// Search all of Reddit
|
||||
app.at("/search").get(|r| search::find(r).boxed());
|
||||
|
||||
// Handle about pages
|
||||
app.at("/about").get(|req| error(req, "About pages aren't added yet".to_string()).boxed());
|
||||
|
||||
app.at("/:id").get(|req: Request<Body>| match req.param("id").as_deref() {
|
||||
// Sort front page
|
||||
Some("best") | Some("hot") | Some("new") | Some("top") | Some("rising") | Some("controversial") => subreddit::community(req).boxed(),
|
||||
// Short link for post
|
||||
Some(id) if id.len() > 4 && id.len() < 7 => post::item(req).boxed(),
|
||||
// Error message for unknown pages
|
||||
_ => error(req, "Nothing here".to_string()).boxed(),
|
||||
});
|
||||
|
||||
// Default service in case no routes match
|
||||
app.at("/*").get(|req| error(req, "Nothing here".to_string()).boxed());
|
||||
|
||||
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), listener);
|
||||
|
||||
let server = app.listen(listener);
|
||||
|
||||
// Run this server for... forever!
|
||||
if let Err(e) = server.await {
|
||||
eprintln!("Server error: {}", e);
|
||||
}
|
||||
}
|
||||
|
267
src/post.rs
267
src/post.rs
@ -1,11 +1,13 @@
|
||||
// CRATES
|
||||
use crate::utils::{error, format_num, format_url, param, request, rewrite_url, val, Comment, Flags, Flair, Post};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use crate::client::json;
|
||||
use crate::esc;
|
||||
use crate::server::RequestExt;
|
||||
use crate::subreddit::{can_access_quarantine, quarantine};
|
||||
use crate::utils::{error, format_num, format_url, param, rewrite_urls, setting, template, time, val, Author, Comment, Flags, Flair, FlairPart, Media, Post, Preferences};
|
||||
|
||||
use async_recursion::async_recursion;
|
||||
use hyper::{Body, Request, Response};
|
||||
|
||||
use askama::Template;
|
||||
use chrono::{TimeZone, Utc};
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
@ -14,139 +16,204 @@ struct PostTemplate {
|
||||
comments: Vec<Comment>,
|
||||
post: Post,
|
||||
sort: String,
|
||||
prefs: Preferences,
|
||||
single_thread: bool,
|
||||
}
|
||||
|
||||
pub async fn item(req: HttpRequest) -> Result<HttpResponse> {
|
||||
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
|
||||
let sort = param(&path, "sort");
|
||||
let id = req.match_info().get("id").unwrap_or("").to_string();
|
||||
pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// Build Reddit API path
|
||||
let mut path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default());
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
|
||||
// Set sort to sort query parameter
|
||||
let sort = param(&path, "sort").unwrap_or_else(|| {
|
||||
// Grab default comment sort method from Cookies
|
||||
let default_sort = setting(&req, "comment_sort");
|
||||
|
||||
// If there's no sort query but there's a default sort, set sort to default_sort
|
||||
if default_sort.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
path = format!("{}.json?{}&sort={}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), default_sort);
|
||||
default_sort
|
||||
}
|
||||
});
|
||||
|
||||
// Log the post ID being fetched in debug mode
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(&id);
|
||||
dbg!(req.param("id").unwrap_or_default());
|
||||
|
||||
let single_thread = req.param("comment_id").is_some();
|
||||
let highlighted_comment = &req.param("comment_id").unwrap_or_default();
|
||||
|
||||
// Send a request to the url, receive JSON in response
|
||||
match request(&path).await {
|
||||
match json(path, quarantined).await {
|
||||
// Otherwise, grab the JSON output from the request
|
||||
Ok(res) => {
|
||||
Ok(response) => {
|
||||
// Parse the JSON into Post and Comment structs
|
||||
let post = parse_post(&res[0]).await.unwrap();
|
||||
let comments = parse_comments(&res[1]).await.unwrap();
|
||||
let post = parse_post(&response[0]).await;
|
||||
let comments = parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment);
|
||||
|
||||
// Use the Post and Comment structs to generate a website to show users
|
||||
let s = PostTemplate { comments, post, sort }.render().unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
template(PostTemplate {
|
||||
comments,
|
||||
post,
|
||||
sort,
|
||||
prefs: Preferences::new(req),
|
||||
single_thread,
|
||||
})
|
||||
}
|
||||
// If the Reddit API returns an error, exit and send error page to user
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
quarantine(req, sub)
|
||||
} else {
|
||||
error(req, msg).await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// UTILITIES
|
||||
async fn media(data: &serde_json::Value) -> (String, String) {
|
||||
let post_type: &str;
|
||||
let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() {
|
||||
post_type = "video";
|
||||
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap().to_string())
|
||||
} else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() {
|
||||
post_type = "video";
|
||||
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap().to_string())
|
||||
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
||||
post_type = "image";
|
||||
format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap().to_string())
|
||||
} else {
|
||||
post_type = "link";
|
||||
data["url"].as_str().unwrap().to_string()
|
||||
};
|
||||
|
||||
(post_type.to_string(), url)
|
||||
}
|
||||
|
||||
// POSTS
|
||||
async fn parse_post(json: &serde_json::Value) -> Result<Post, &'static str> {
|
||||
async fn parse_post(json: &serde_json::Value) -> Post {
|
||||
// Retrieve post (as opposed to comments) from JSON
|
||||
let post_data: &serde_json::Value = &json["data"]["children"][0];
|
||||
let post: &serde_json::Value = &json["data"]["children"][0];
|
||||
|
||||
// Grab UTC time as unix timestamp
|
||||
let unix_time: i64 = post_data["data"]["created_utc"].as_f64().unwrap().round() as i64;
|
||||
// Parse post score
|
||||
let score = post_data["data"]["score"].as_i64().unwrap();
|
||||
let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default());
|
||||
// Parse post score and upvote ratio
|
||||
let score = post["data"]["score"].as_i64().unwrap_or_default();
|
||||
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||
|
||||
// Determine the type of media along with the media URL
|
||||
let media = media(&post_data["data"]).await;
|
||||
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
||||
|
||||
// Build a post using data parsed from Reddit post API
|
||||
let post = Post {
|
||||
title: val(post_data, "title"),
|
||||
community: val(post_data, "subreddit"),
|
||||
body: rewrite_url(&val(post_data, "selftext_html")),
|
||||
author: val(post_data, "author"),
|
||||
author_flair: Flair(
|
||||
val(post_data, "author_flair_text"),
|
||||
val(post_data, "author_flair_background_color"),
|
||||
val(post_data, "author_flair_text_color"),
|
||||
),
|
||||
url: val(post_data, "permalink"),
|
||||
Post {
|
||||
id: val(post, "id"),
|
||||
title: esc!(post, "title"),
|
||||
community: val(post, "subreddit"),
|
||||
body: rewrite_urls(&val(post, "selftext_html")).replace("\\", ""),
|
||||
author: Author {
|
||||
name: val(post, "author"),
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
post["data"]["author_flair_type"].as_str().unwrap_or_default(),
|
||||
post["data"]["author_flair_richtext"].as_array(),
|
||||
post["data"]["author_flair_text"].as_str(),
|
||||
),
|
||||
text: esc!(post, "link_flair_text"),
|
||||
background_color: val(post, "author_flair_background_color"),
|
||||
foreground_color: val(post, "author_flair_text_color"),
|
||||
},
|
||||
distinguished: val(post, "distinguished"),
|
||||
},
|
||||
permalink: val(post, "permalink"),
|
||||
score: format_num(score),
|
||||
post_type: media.0,
|
||||
flair: Flair(
|
||||
val(post_data, "link_flair_text"),
|
||||
val(post_data, "link_flair_background_color"),
|
||||
if val(post_data, "link_flair_text_color") == "dark" {
|
||||
upvote_ratio: ratio as i64,
|
||||
post_type,
|
||||
media,
|
||||
thumbnail: Media {
|
||||
url: format_url(val(post, "thumbnail").as_str()),
|
||||
alt_url: String::new(),
|
||||
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||
poster: "".to_string(),
|
||||
},
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
post["data"]["link_flair_type"].as_str().unwrap_or_default(),
|
||||
post["data"]["link_flair_richtext"].as_array(),
|
||||
post["data"]["link_flair_text"].as_str(),
|
||||
),
|
||||
text: esc!(post, "link_flair_text"),
|
||||
background_color: val(post, "link_flair_background_color"),
|
||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
} else {
|
||||
"white".to_string()
|
||||
},
|
||||
),
|
||||
flags: Flags {
|
||||
nsfw: post_data["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
stickied: post_data["data"]["stickied"].as_bool().unwrap_or(false),
|
||||
},
|
||||
media: media.1,
|
||||
time: Utc.timestamp(unix_time, 0).format("%b %e %Y %H:%M UTC").to_string(),
|
||||
};
|
||||
|
||||
Ok(post)
|
||||
flags: Flags {
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
||||
},
|
||||
domain: val(post, "domain"),
|
||||
rel_time,
|
||||
created,
|
||||
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
|
||||
gallery,
|
||||
}
|
||||
}
|
||||
|
||||
// COMMENTS
|
||||
#[async_recursion]
|
||||
async fn parse_comments(json: &serde_json::Value) -> Result<Vec<Comment>, &'static str> {
|
||||
// Separate the comment JSON into a Vector of comments
|
||||
let comment_data = json["data"]["children"].as_array().unwrap();
|
||||
|
||||
let mut comments: Vec<Comment> = Vec::new();
|
||||
fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str) -> Vec<Comment> {
|
||||
// Parse the comment JSON into a Vector of Comments
|
||||
let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned);
|
||||
|
||||
// For each comment, retrieve the values to build a Comment object
|
||||
for comment in comment_data {
|
||||
let unix_time: i64 = comment["data"]["created_utc"].as_f64().unwrap_or(0.0).round() as i64;
|
||||
if unix_time == 0 {
|
||||
continue;
|
||||
}
|
||||
comments
|
||||
.into_iter()
|
||||
.map(|comment| {
|
||||
let kind = comment["kind"].as_str().unwrap_or_default().to_string();
|
||||
let data = &comment["data"];
|
||||
|
||||
let score = comment["data"]["score"].as_i64().unwrap_or(0);
|
||||
let body = rewrite_url(&val(comment, "body_html"));
|
||||
let unix_time = data["created_utc"].as_f64().unwrap_or_default();
|
||||
let (rel_time, created) = time(unix_time);
|
||||
|
||||
let replies: Vec<Comment> = if comment["data"]["replies"].is_object() {
|
||||
parse_comments(&comment["data"]["replies"]).await.unwrap_or_default()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
let edited = data["edited"].as_f64().map_or((String::new(), String::new()), time);
|
||||
|
||||
comments.push(Comment {
|
||||
id: val(comment, "id"),
|
||||
body,
|
||||
author: val(comment, "author"),
|
||||
score: format_num(score),
|
||||
time: Utc.timestamp(unix_time, 0).format("%b %e %Y %H:%M UTC").to_string(),
|
||||
replies,
|
||||
flair: Flair(
|
||||
val(comment, "author_flair_text"),
|
||||
val(comment, "author_flair_background_color"),
|
||||
val(comment, "author_flair_text_color"),
|
||||
),
|
||||
});
|
||||
}
|
||||
let score = data["score"].as_i64().unwrap_or(0);
|
||||
let body = rewrite_urls(&val(&comment, "body_html"));
|
||||
|
||||
Ok(comments)
|
||||
// If this comment contains replies, handle those too
|
||||
let replies: Vec<Comment> = if data["replies"].is_object() {
|
||||
parse_comments(&data["replies"], post_link, post_author, highlighted_comment)
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let parent_kind_and_id = val(&comment, "parent_id");
|
||||
let parent_info = parent_kind_and_id.split('_').collect::<Vec<&str>>();
|
||||
|
||||
let id = val(&comment, "id");
|
||||
let highlighted = id == highlighted_comment;
|
||||
|
||||
Comment {
|
||||
id,
|
||||
kind,
|
||||
parent_id: parent_info[1].to_string(),
|
||||
parent_kind: parent_info[0].to_string(),
|
||||
post_link: post_link.to_string(),
|
||||
post_author: post_author.to_string(),
|
||||
body,
|
||||
author: Author {
|
||||
name: val(&comment, "author"),
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
data["author_flair_type"].as_str().unwrap_or_default(),
|
||||
data["author_flair_richtext"].as_array(),
|
||||
data["author_flair_text"].as_str(),
|
||||
),
|
||||
text: esc!(&comment, "link_flair_text"),
|
||||
background_color: val(&comment, "author_flair_background_color"),
|
||||
foreground_color: val(&comment, "author_flair_text_color"),
|
||||
},
|
||||
distinguished: val(&comment, "distinguished"),
|
||||
},
|
||||
score: if data["score_hidden"].as_bool().unwrap_or_default() {
|
||||
("\u{2022}".to_string(), "Hidden".to_string())
|
||||
} else {
|
||||
format_num(score)
|
||||
},
|
||||
rel_time,
|
||||
created,
|
||||
edited,
|
||||
replies,
|
||||
highlighted,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
30
src/proxy.rs
30
src/proxy.rs
@ -1,30 +0,0 @@
|
||||
use actix_web::{client::Client, web, Error, HttpResponse, Result};
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
use base64::decode;
|
||||
|
||||
pub async fn handler(web::Path(url): web::Path<String>) -> Result<HttpResponse> {
|
||||
if cfg!(feature = "proxy") {
|
||||
#[cfg(feature = "proxy")]
|
||||
let media: String;
|
||||
|
||||
#[cfg(not(feature = "proxy"))]
|
||||
let media = url;
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
match decode(url) {
|
||||
Ok(bytes) => media = String::from_utf8(bytes).unwrap(),
|
||||
Err(_e) => return Ok(HttpResponse::Ok().body("")),
|
||||
};
|
||||
|
||||
let client = Client::default();
|
||||
client
|
||||
.get(media.replace("&", "&"))
|
||||
.send()
|
||||
.await
|
||||
.map_err(Error::from)
|
||||
.map(|res| HttpResponse::build(res.status()).streaming(res))
|
||||
} else {
|
||||
Ok(HttpResponse::Ok().body(""))
|
||||
}
|
||||
}
|
122
src/search.rs
122
src/search.rs
@ -1,44 +1,112 @@
|
||||
// CRATES
|
||||
use crate::utils::{error, fetch_posts, param, Post};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use crate::utils::{catch_random, error, format_num, format_url, param, setting, template, val, Post, Preferences};
|
||||
use crate::{
|
||||
client::json,
|
||||
subreddit::{can_access_quarantine, quarantine},
|
||||
RequestExt,
|
||||
};
|
||||
use askama::Template;
|
||||
use hyper::{Body, Request, Response};
|
||||
|
||||
// STRUCTS
|
||||
struct SearchParams {
|
||||
q: String,
|
||||
sort: String,
|
||||
t: String,
|
||||
before: String,
|
||||
after: String,
|
||||
restrict_sr: String,
|
||||
}
|
||||
|
||||
// STRUCTS
|
||||
struct Subreddit {
|
||||
name: String,
|
||||
url: String,
|
||||
icon: String,
|
||||
description: String,
|
||||
subscribers: (String, String),
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[allow(dead_code)]
|
||||
#[template(path = "search.html", escape = "none")]
|
||||
struct SearchTemplate {
|
||||
posts: Vec<Post>,
|
||||
query: String,
|
||||
subreddits: Vec<Subreddit>,
|
||||
sub: String,
|
||||
sort: (String, String),
|
||||
ends: (String, String),
|
||||
params: SearchParams,
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn find(req: HttpRequest) -> Result<HttpResponse> {
|
||||
let path = format!("{}.json?{}", req.path(), req.query_string());
|
||||
let q = param(&path, "q");
|
||||
let sort = if param(&path, "sort").is_empty() {
|
||||
"relevance".to_string()
|
||||
} else {
|
||||
param(&path, "sort")
|
||||
};
|
||||
let sub = req.match_info().get("sub").unwrap_or("").to_string();
|
||||
pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let nsfw_results = if setting(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" };
|
||||
let path = format!("{}.json?{}{}", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
// Handle random subreddits
|
||||
if let Ok(random) = catch_random(&sub, "/find").await {
|
||||
return Ok(random);
|
||||
}
|
||||
let query = param(&path, "q").unwrap_or_default();
|
||||
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok(posts) => {
|
||||
let s = SearchTemplate {
|
||||
posts: posts.0,
|
||||
query: q,
|
||||
sub,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), posts.1),
|
||||
let sort = param(&path, "sort").unwrap_or_else(|| "relevance".to_string());
|
||||
|
||||
// If search is not restricted to this subreddit, show other subreddits in search results
|
||||
let subreddits = param(&path, "restrict_sr").map_or(search_subreddits(&query).await, |_| Vec::new());
|
||||
|
||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||
|
||||
match Post::fetch(&path, String::new(), quarantined).await {
|
||||
Ok((posts, after)) => template(SearchTemplate {
|
||||
posts,
|
||||
subreddits,
|
||||
sub,
|
||||
params: SearchParams {
|
||||
q: query.replace('"', """),
|
||||
sort,
|
||||
t: param(&path, "t").unwrap_or_default(),
|
||||
before: param(&path, "after").unwrap_or_default(),
|
||||
after,
|
||||
restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
|
||||
},
|
||||
prefs: Preferences::new(req),
|
||||
url,
|
||||
}),
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
quarantine(req, sub)
|
||||
} else {
|
||||
error(req, msg).await
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
}
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn search_subreddits(q: &str) -> Vec<Subreddit> {
|
||||
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit=3", q.replace(' ', "+"));
|
||||
|
||||
// Send a request to the url
|
||||
json(subreddit_search_path, false).await.unwrap_or_default()["data"]["children"]
|
||||
.as_array()
|
||||
.map(ToOwned::to_owned)
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|subreddit| {
|
||||
// For each subreddit from subreddit list
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let icon = subreddit["data"]["community_icon"]
|
||||
.as_str()
|
||||
.map_or_else(|| val(&subreddit, "icon_img"), ToString::to_string);
|
||||
|
||||
Subreddit {
|
||||
name: val(subreddit, "display_name_prefixed"),
|
||||
url: val(subreddit, "url"),
|
||||
icon: format_url(&icon),
|
||||
description: val(subreddit, "public_description"),
|
||||
subscribers: format_num(subreddit["data"]["subscribers"].as_f64().unwrap_or_default() as i64),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Subreddit>>()
|
||||
}
|
||||
|
215
src/server.rs
Normal file
215
src/server.rs
Normal file
@ -0,0 +1,215 @@
|
||||
use cookie::Cookie;
|
||||
use futures_lite::{future::Boxed, Future, FutureExt};
|
||||
use hyper::{
|
||||
header::HeaderValue,
|
||||
service::{make_service_fn, service_fn},
|
||||
HeaderMap,
|
||||
};
|
||||
use hyper::{Body, Method, Request, Response, Server as HyperServer};
|
||||
use route_recognizer::{Params, Router};
|
||||
use std::{pin::Pin, result::Result};
|
||||
use time::Duration;
|
||||
|
||||
type BoxResponse = Pin<Box<dyn Future<Output = Result<Response<Body>, String>> + Send>>;
|
||||
|
||||
pub struct Route<'a> {
|
||||
router: &'a mut Router<fn(Request<Body>) -> BoxResponse>,
|
||||
path: String,
|
||||
}
|
||||
|
||||
pub struct Server {
|
||||
pub default_headers: HeaderMap,
|
||||
router: Router<fn(Request<Body>) -> BoxResponse>,
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! headers(
|
||||
{ $($key:expr => $value:expr),+ } => {
|
||||
{
|
||||
let mut m = hyper::HeaderMap::new();
|
||||
$(
|
||||
if let Ok(val) = hyper::header::HeaderValue::from_str($value) {
|
||||
m.insert($key, val);
|
||||
}
|
||||
)+
|
||||
m
|
||||
}
|
||||
};
|
||||
);
|
||||
|
||||
pub trait RequestExt {
|
||||
fn params(&self) -> Params;
|
||||
fn param(&self, name: &str) -> Option<String>;
|
||||
fn set_params(&mut self, params: Params) -> Option<Params>;
|
||||
fn cookies(&self) -> Vec<Cookie>;
|
||||
fn cookie(&self, name: &str) -> Option<Cookie>;
|
||||
}
|
||||
|
||||
pub trait ResponseExt {
|
||||
fn cookies(&self) -> Vec<Cookie>;
|
||||
fn insert_cookie(&mut self, cookie: Cookie);
|
||||
fn remove_cookie(&mut self, name: String);
|
||||
}
|
||||
|
||||
impl RequestExt for Request<Body> {
|
||||
fn params(&self) -> Params {
|
||||
self.extensions().get::<Params>().unwrap_or(&Params::new()).to_owned()
|
||||
// self.extensions()
|
||||
// .get::<RequestMeta>()
|
||||
// .and_then(|meta| meta.route_params())
|
||||
// .expect("Routerify: No RouteParams added while processing request")
|
||||
}
|
||||
|
||||
fn param(&self, name: &str) -> Option<String> {
|
||||
self.params().find(name).map(std::borrow::ToOwned::to_owned)
|
||||
}
|
||||
|
||||
fn set_params(&mut self, params: Params) -> Option<Params> {
|
||||
self.extensions_mut().insert(params)
|
||||
}
|
||||
|
||||
fn cookies(&self) -> Vec<Cookie> {
|
||||
self.headers().get("Cookie").map_or(Vec::new(), |header| {
|
||||
header
|
||||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.split("; ")
|
||||
.map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named("")))
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
fn cookie(&self, name: &str) -> Option<Cookie> {
|
||||
self.cookies().into_iter().find(|c| c.name() == name)
|
||||
}
|
||||
}
|
||||
|
||||
impl ResponseExt for Response<Body> {
|
||||
fn cookies(&self) -> Vec<Cookie> {
|
||||
self.headers().get("Cookie").map_or(Vec::new(), |header| {
|
||||
header
|
||||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.split("; ")
|
||||
.map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named("")))
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
fn insert_cookie(&mut self, cookie: Cookie) {
|
||||
if let Ok(val) = HeaderValue::from_str(&cookie.to_string()) {
|
||||
self.headers_mut().append("Set-Cookie", val);
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_cookie(&mut self, name: String) {
|
||||
let mut cookie = Cookie::named(name);
|
||||
cookie.set_path("/");
|
||||
cookie.set_max_age(Duration::second());
|
||||
if let Ok(val) = HeaderValue::from_str(&cookie.to_string()) {
|
||||
self.headers_mut().append("Set-Cookie", val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Route<'_> {
|
||||
fn method(&mut self, method: Method, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
||||
self.router.add(&format!("/{}{}", method.as_str(), self.path), dest);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add an endpoint for `GET` requests
|
||||
pub fn get(&mut self, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
||||
self.method(Method::GET, dest)
|
||||
}
|
||||
|
||||
/// Add an endpoint for `POST` requests
|
||||
pub fn post(&mut self, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
||||
self.method(Method::POST, dest)
|
||||
}
|
||||
}
|
||||
|
||||
impl Server {
|
||||
pub fn new() -> Self {
|
||||
Server {
|
||||
default_headers: HeaderMap::new(),
|
||||
router: Router::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn at(&mut self, path: &str) -> Route {
|
||||
Route {
|
||||
path: path.to_owned(),
|
||||
router: &mut self.router,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn listen(self, addr: String) -> Boxed<Result<(), hyper::Error>> {
|
||||
let make_svc = make_service_fn(move |_conn| {
|
||||
// For correct borrowing, these values need to be borrowed
|
||||
let router = self.router.clone();
|
||||
let default_headers = self.default_headers.clone();
|
||||
|
||||
// This is the `Service` that will handle the connection.
|
||||
// `service_fn` is a helper to convert a function that
|
||||
// returns a Response into a `Service`.
|
||||
// let shared_router = router.clone();
|
||||
async move {
|
||||
Ok::<_, String>(service_fn(move |req: Request<Body>| {
|
||||
let headers = default_headers.clone();
|
||||
|
||||
// Remove double slashes
|
||||
let mut path = req.uri().path().replace("//", "/");
|
||||
|
||||
// Remove trailing slashes
|
||||
if path != "/" && path.ends_with('/') {
|
||||
path.pop();
|
||||
}
|
||||
|
||||
// Match the visited path with an added route
|
||||
match router.recognize(&format!("/{}{}", req.method().as_str(), path)) {
|
||||
// If a route was configured for this path
|
||||
Ok(found) => {
|
||||
let mut parammed = req;
|
||||
parammed.set_params(found.params().to_owned());
|
||||
|
||||
// Run the route's function
|
||||
let func = (found.handler().to_owned().to_owned())(parammed);
|
||||
async move {
|
||||
let res: Result<Response<Body>, String> = func.await;
|
||||
// Add default headers to response
|
||||
res.map(|mut response| {
|
||||
response.headers_mut().extend(headers);
|
||||
response
|
||||
})
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
// If there was a routing error
|
||||
Err(e) => async move {
|
||||
// Return a 404 error
|
||||
let res: Result<Response<Body>, String> = Ok(Response::builder().status(404).body(e.into()).unwrap_or_default());
|
||||
// Add default headers to response
|
||||
res.map(|mut response| {
|
||||
response.headers_mut().extend(headers);
|
||||
response
|
||||
})
|
||||
}
|
||||
.boxed(),
|
||||
}
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
// Build SocketAddr from provided address
|
||||
let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {} as address (example format: 0.0.0.0:8080)", addr));
|
||||
|
||||
// Bind server to address specified above. Gracefully shut down if CTRL+C is pressed
|
||||
let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async {
|
||||
// Wait for the CTRL+C signal
|
||||
tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler")
|
||||
});
|
||||
|
||||
server.boxed()
|
||||
}
|
||||
}
|
133
src/settings.rs
Normal file
133
src/settings.rs
Normal file
@ -0,0 +1,133 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
// CRATES
|
||||
use crate::server::ResponseExt;
|
||||
use crate::utils::{redirect, template, Preferences};
|
||||
use askama::Template;
|
||||
use cookie::Cookie;
|
||||
use futures_lite::StreamExt;
|
||||
use hyper::{Body, Request, Response};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
#[template(path = "settings.html")]
|
||||
struct SettingsTemplate {
|
||||
prefs: Preferences,
|
||||
}
|
||||
|
||||
// CONSTANTS
|
||||
|
||||
const PREFS: [&str; 9] = [
|
||||
"theme",
|
||||
"front_page",
|
||||
"layout",
|
||||
"wide",
|
||||
"comment_sort",
|
||||
"post_sort",
|
||||
"show_nsfw",
|
||||
"use_hls",
|
||||
"hide_hls_notification",
|
||||
];
|
||||
|
||||
// FUNCTIONS
|
||||
|
||||
// Retrieve cookies from request "Cookie" header
|
||||
pub async fn get(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
template(SettingsTemplate { prefs: Preferences::new(req) })
|
||||
}
|
||||
|
||||
// Set cookies using response "Set-Cookie" header
|
||||
pub async fn set(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// Split the body into parts
|
||||
let (parts, mut body) = req.into_parts();
|
||||
|
||||
// Grab existing cookies
|
||||
let _cookies: Vec<Cookie> = parts
|
||||
.headers
|
||||
.get_all("Cookie")
|
||||
.iter()
|
||||
.filter_map(|header| Cookie::parse(header.to_str().unwrap_or_default()).ok())
|
||||
.collect();
|
||||
|
||||
// Aggregate the body...
|
||||
// let whole_body = hyper::body::aggregate(req).await.map_err(|e| e.to_string())?;
|
||||
let body_bytes = body
|
||||
.try_fold(Vec::new(), |mut data, chunk| {
|
||||
data.extend_from_slice(&chunk);
|
||||
Ok(data)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let form = url::form_urlencoded::parse(&body_bytes).collect::<HashMap<_, _>>();
|
||||
|
||||
let mut response = redirect("/settings".to_string());
|
||||
|
||||
for &name in &PREFS {
|
||||
match form.get(name) {
|
||||
Some(value) => response.insert_cookie(
|
||||
Cookie::build(name.to_owned(), value.to_owned())
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.finish(),
|
||||
),
|
||||
None => response.remove_cookie(name.to_string()),
|
||||
};
|
||||
}
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body> {
|
||||
// Split the body into parts
|
||||
let (parts, _) = req.into_parts();
|
||||
|
||||
// Grab existing cookies
|
||||
let _cookies: Vec<Cookie> = parts
|
||||
.headers
|
||||
.get_all("Cookie")
|
||||
.iter()
|
||||
.filter_map(|header| Cookie::parse(header.to_str().unwrap_or_default()).ok())
|
||||
.collect();
|
||||
|
||||
let query = parts.uri.query().unwrap_or_default().as_bytes();
|
||||
|
||||
let form = url::form_urlencoded::parse(query).collect::<HashMap<_, _>>();
|
||||
|
||||
let path = match form.get("redirect") {
|
||||
Some(value) => format!("/{}", value.replace("%26", "&").replace("%23", "#")),
|
||||
None => "/".to_string(),
|
||||
};
|
||||
|
||||
let mut response = redirect(path);
|
||||
|
||||
for name in [PREFS.to_vec(), vec!["subscriptions"]].concat() {
|
||||
match form.get(name) {
|
||||
Some(value) => response.insert_cookie(
|
||||
Cookie::build(name.to_owned(), value.to_owned())
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.finish(),
|
||||
),
|
||||
None => {
|
||||
if remove_cookies {
|
||||
response.remove_cookie(name.to_string())
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
response
|
||||
}
|
||||
|
||||
// Set cookies using response "Set-Cookie" header
|
||||
pub async fn restore(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
Ok(set_cookies_method(req, true))
|
||||
}
|
||||
|
||||
pub async fn update(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
Ok(set_cookies_method(req, false))
|
||||
}
|
383
src/subreddit.rs
383
src/subreddit.rs
@ -1,7 +1,11 @@
|
||||
// CRATES
|
||||
use crate::utils::{error, fetch_posts, format_num, format_url, param, request, rewrite_url, val, Post, Subreddit};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use crate::esc;
|
||||
use crate::utils::{catch_random, error, format_num, format_url, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit};
|
||||
use crate::{client::json, server::ResponseExt, RequestExt};
|
||||
use askama::Template;
|
||||
use cookie::Cookie;
|
||||
use hyper::{Body, Request, Response};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
@ -11,6 +15,8 @@ struct SubredditTemplate {
|
||||
posts: Vec<Post>,
|
||||
sort: (String, String),
|
||||
ends: (String, String),
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
@ -19,87 +25,332 @@ struct WikiTemplate {
|
||||
sub: String,
|
||||
wiki: String,
|
||||
page: String,
|
||||
prefs: Preferences,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "wall.html", escape = "none")]
|
||||
struct WallTemplate {
|
||||
title: String,
|
||||
sub: String,
|
||||
msg: String,
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn page(req: HttpRequest) -> Result<HttpResponse> {
|
||||
let path = format!("{}.json?{}", req.path(), req.query_string());
|
||||
let sub = req.match_info().get("sub").unwrap_or("popular").to_string();
|
||||
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
|
||||
pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// Build Reddit API path
|
||||
let root = req.uri().path() == "/";
|
||||
let subscribed = setting(&req, "subscriptions");
|
||||
let front_page = setting(&req, "front_page");
|
||||
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
|
||||
let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort));
|
||||
|
||||
let sub_result = if !&sub.contains('+') && sub != "popular" {
|
||||
subreddit(&sub).await.unwrap_or_default()
|
||||
} else {
|
||||
Subreddit::default()
|
||||
};
|
||||
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok(items) => {
|
||||
let s = SubredditTemplate {
|
||||
sub: sub_result,
|
||||
posts: items.0,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), items.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
let sub = req.param("sub").unwrap_or(if front_page == "default" || front_page.is_empty() {
|
||||
if subscribed.is_empty() {
|
||||
"popular".to_string()
|
||||
} else {
|
||||
subscribed.to_owned()
|
||||
}
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
} else {
|
||||
front_page.to_owned()
|
||||
});
|
||||
let quarantined = can_access_quarantine(&req, &sub) || root;
|
||||
|
||||
// Handle random subreddits
|
||||
if let Ok(random) = catch_random(&sub, "").await {
|
||||
return Ok(random);
|
||||
}
|
||||
|
||||
if req.param("sub").is_some() && sub.starts_with("u_") {
|
||||
return Ok(redirect(["/user/", &sub[2..]].concat()));
|
||||
}
|
||||
|
||||
let path = format!("/r/{}/{}.json?{}&raw_json=1", sub, sort, req.uri().query().unwrap_or_default());
|
||||
|
||||
match Post::fetch(&path, String::new(), quarantined).await {
|
||||
Ok((posts, after)) => {
|
||||
// If you can get subreddit posts, also request subreddit metadata
|
||||
let sub = if !sub.contains('+') && sub != subscribed && sub != "popular" && sub != "all" {
|
||||
// Regular subreddit
|
||||
subreddit(&sub, quarantined).await.unwrap_or_default()
|
||||
} else if sub == subscribed {
|
||||
// Subscription feed
|
||||
if req.uri().path().starts_with("/r/") {
|
||||
subreddit(&sub, quarantined).await.unwrap_or_default()
|
||||
} else {
|
||||
Subreddit::default()
|
||||
}
|
||||
} else if sub.contains('+') {
|
||||
// Multireddit
|
||||
Subreddit {
|
||||
name: sub,
|
||||
..Subreddit::default()
|
||||
}
|
||||
} else {
|
||||
Subreddit::default()
|
||||
};
|
||||
|
||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||
|
||||
template(SubredditTemplate {
|
||||
sub,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||
ends: (param(&path, "after").unwrap_or_default(), after),
|
||||
prefs: Preferences::new(req),
|
||||
url,
|
||||
})
|
||||
}
|
||||
Err(msg) => match msg.as_str() {
|
||||
"quarantined" => quarantine(req, sub),
|
||||
"private" => error(req, format!("r/{} is a private community", sub)).await,
|
||||
"banned" => error(req, format!("r/{} has been banned from Reddit", sub)).await,
|
||||
_ => error(req, msg).await,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn wiki(req: HttpRequest) -> Result<HttpResponse> {
|
||||
let sub = req.match_info().get("sub").unwrap_or("reddit.com");
|
||||
let page = req.match_info().get("page").unwrap_or("index");
|
||||
let path: String = format!("r/{}/wiki/{}.json?raw_json=1", sub, page);
|
||||
pub fn quarantine(req: Request<Body>, sub: String) -> Result<Response<Body>, String> {
|
||||
let wall = WallTemplate {
|
||||
title: format!("r/{} is quarantined", sub),
|
||||
msg: "Please click the button below to continue to this subreddit.".to_string(),
|
||||
url: req.uri().to_string(),
|
||||
sub,
|
||||
prefs: Preferences::new(req),
|
||||
};
|
||||
|
||||
match request(&path).await {
|
||||
Ok(res) => {
|
||||
let s = WikiTemplate {
|
||||
sub: sub.to_string(),
|
||||
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap()),
|
||||
page: page.to_string(),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
}
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(403)
|
||||
.header("content-type", "text/html")
|
||||
.body(wall.render().unwrap_or_default().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn add_quarantine_exception(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let subreddit = req.param("sub").ok_or("Invalid URL")?;
|
||||
let redir = param(&format!("?{}", req.uri().query().unwrap_or_default()), "redir").ok_or("Invalid URL")?;
|
||||
let mut response = redirect(redir);
|
||||
response.insert_cookie(
|
||||
Cookie::build(&format!("allow_quaran_{}", subreddit.to_lowercase()), "true")
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(cookie::Expiration::Session)
|
||||
.finish(),
|
||||
);
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub fn can_access_quarantine(req: &Request<Body>, sub: &str) -> bool {
|
||||
// Determine if the subreddit can be accessed
|
||||
setting(&req, &format!("allow_quaran_{}", sub.to_lowercase())).parse().unwrap_or_default()
|
||||
}
|
||||
|
||||
// Sub or unsub by setting subscription cookie using response "Set-Cookie" header
|
||||
pub async fn subscriptions(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
// Handle random subreddits
|
||||
if sub == "random" || sub == "randnsfw" {
|
||||
return Err("Can't subscribe to random subreddit!".to_string());
|
||||
}
|
||||
|
||||
let query = req.uri().query().unwrap_or_default().to_string();
|
||||
let action: Vec<String> = req.uri().path().split('/').map(String::from).collect();
|
||||
|
||||
let mut sub_list = Preferences::new(req).subscriptions;
|
||||
|
||||
// Retrieve list of posts for these subreddits to extract display names
|
||||
let posts = json(format!("/r/{}/hot.json?raw_json=1", sub), true).await?;
|
||||
let display_lookup: Vec<(String, &str)> = posts["data"]["children"]
|
||||
.as_array()
|
||||
.map(|list| {
|
||||
list
|
||||
.iter()
|
||||
.map(|post| {
|
||||
let display_name = post["data"]["subreddit"].as_str().unwrap_or_default();
|
||||
(display_name.to_lowercase(), display_name)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
// Find each subreddit name (separated by '+') in sub parameter
|
||||
for part in sub.split('+') {
|
||||
// Retrieve display name for the subreddit
|
||||
let display;
|
||||
let part = if let Some(&(_, display)) = display_lookup.iter().find(|x| x.0 == part.to_lowercase()) {
|
||||
// This is already known, doesn't require seperate request
|
||||
display
|
||||
} else {
|
||||
// This subreddit display name isn't known, retrieve it
|
||||
let path: String = format!("/r/{}/about.json?raw_json=1", part);
|
||||
display = json(path, true).await?;
|
||||
display["data"]["display_name"].as_str().ok_or_else(|| "Failed to query subreddit name".to_string())?
|
||||
};
|
||||
|
||||
// Modify sub list based on action
|
||||
if action.contains(&"subscribe".to_string()) && !sub_list.contains(&part.to_owned()) {
|
||||
// Add each sub name to the subscribed list
|
||||
sub_list.push(part.to_owned());
|
||||
// Reorder sub names alphabettically
|
||||
sub_list.sort_by_key(|a| a.to_lowercase())
|
||||
} else if action.contains(&"unsubscribe".to_string()) {
|
||||
// Remove sub name from subscribed list
|
||||
sub_list.retain(|s| s.to_lowercase() != part.to_lowercase());
|
||||
}
|
||||
}
|
||||
|
||||
// Redirect back to subreddit
|
||||
// check for redirect parameter if unsubscribing from outside sidebar
|
||||
let path = if let Some(redirect_path) = param(&format!("?{}", query), "redirect") {
|
||||
format!("/{}/", redirect_path)
|
||||
} else {
|
||||
format!("/r/{}", sub)
|
||||
};
|
||||
|
||||
let mut response = redirect(path);
|
||||
|
||||
// Delete cookie if empty, else set
|
||||
if sub_list.is_empty() {
|
||||
response.remove_cookie("subscriptions".to_string());
|
||||
} else {
|
||||
response.insert_cookie(
|
||||
Cookie::build("subscriptions", sub_list.join("+"))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.finish(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn wiki(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let sub = req.param("sub").unwrap_or_else(|| "reddit.com".to_string());
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
// Handle random subreddits
|
||||
if let Ok(random) = catch_random(&sub, "/wiki").await {
|
||||
return Ok(random);
|
||||
}
|
||||
|
||||
let page = req.param("page").unwrap_or_else(|| "index".to_string());
|
||||
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
|
||||
|
||||
match json(path, quarantined).await {
|
||||
Ok(response) => template(WikiTemplate {
|
||||
sub,
|
||||
wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or("<h3>Wiki not found</h3>")),
|
||||
page,
|
||||
prefs: Preferences::new(req),
|
||||
}),
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" {
|
||||
quarantine(req, sub)
|
||||
} else {
|
||||
error(req, msg).await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn sidebar(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let sub = req.param("sub").unwrap_or_else(|| "reddit.com".to_string());
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
|
||||
// Handle random subreddits
|
||||
if let Ok(random) = catch_random(&sub, "/about/sidebar").await {
|
||||
return Ok(random);
|
||||
}
|
||||
|
||||
// Build the Reddit JSON API url
|
||||
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
||||
|
||||
// Send a request to the url
|
||||
match json(path, quarantined).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => template(WikiTemplate {
|
||||
wiki: format!(
|
||||
"{}<hr><h1>Moderators</h1><br><ul>{}</ul>",
|
||||
rewrite_urls(&val(&response, "description_html").replace("\\", "")),
|
||||
moderators(&sub, quarantined).await?.join(""),
|
||||
),
|
||||
sub,
|
||||
page: "Sidebar".to_string(),
|
||||
prefs: Preferences::new(req),
|
||||
}),
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" {
|
||||
quarantine(req, sub)
|
||||
} else {
|
||||
error(req, msg).await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn moderators(sub: &str, quarantined: bool) -> Result<Vec<String>, String> {
|
||||
// Retrieve and format the html for the moderators list
|
||||
Ok(
|
||||
moderators_list(sub, quarantined)
|
||||
.await?
|
||||
.iter()
|
||||
.map(|m| format!("<li><a style=\"color: var(--accent)\" href=\"/u/{name}\">{name}</a></li>", name = m))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
async fn moderators_list(sub: &str, quarantined: bool) -> Result<Vec<String>, String> {
|
||||
// Build the moderator list URL
|
||||
let path: String = format!("/r/{}/about/moderators.json?raw_json=1", sub);
|
||||
|
||||
// Retrieve response
|
||||
json(path, quarantined).await.map(|response| {
|
||||
// Traverse json tree and format into list of strings
|
||||
response["data"]["children"]
|
||||
.as_array()
|
||||
.unwrap_or(&Vec::new())
|
||||
.iter()
|
||||
.filter_map(|moderator| {
|
||||
let name = moderator["name"].as_str().unwrap_or_default();
|
||||
if name.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(name.to_string())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
}
|
||||
|
||||
// SUBREDDIT
|
||||
async fn subreddit(sub: &str) -> Result<Subreddit, &'static str> {
|
||||
async fn subreddit(sub: &str, quarantined: bool) -> Result<Subreddit, String> {
|
||||
// Build the Reddit JSON API url
|
||||
let path: String = format!("r/{}/about.json?raw_json=1", sub);
|
||||
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
||||
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(res) => {
|
||||
// Metadata regarding the subreddit
|
||||
let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
|
||||
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
|
||||
let res = json(path, quarantined).await?;
|
||||
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let community_icon: &str = res["data"]["community_icon"].as_str().unwrap_or("").split('?').collect::<Vec<&str>>()[0];
|
||||
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
||||
// Metadata regarding the subreddit
|
||||
let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
|
||||
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
|
||||
|
||||
let sub = Subreddit {
|
||||
name: val(&res, "display_name"),
|
||||
title: val(&res, "title"),
|
||||
description: val(&res, "public_description"),
|
||||
info: rewrite_url(&val(&res, "description_html").replace("\\", "")),
|
||||
icon: format_url(icon),
|
||||
members: format_num(members),
|
||||
active: format_num(active),
|
||||
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
||||
};
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let community_icon: &str = res["data"]["community_icon"].as_str().unwrap_or_default();
|
||||
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
||||
|
||||
Ok(sub)
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg),
|
||||
}
|
||||
Ok(Subreddit {
|
||||
name: esc!(&res, "display_name"),
|
||||
title: esc!(&res, "title"),
|
||||
description: esc!(&res, "public_description"),
|
||||
info: rewrite_urls(&val(&res, "description_html").replace("\\", "")),
|
||||
moderators: moderators_list(sub, quarantined).await?,
|
||||
icon: format_url(&icon),
|
||||
members: format_num(members),
|
||||
active: format_num(active),
|
||||
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
96
src/user.rs
96
src/user.rs
@ -1,8 +1,11 @@
|
||||
// CRATES
|
||||
use crate::utils::{error, fetch_posts, format_url, nested_val, param, request, Post, User};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use crate::client::json;
|
||||
use crate::esc;
|
||||
use crate::server::RequestExt;
|
||||
use crate::utils::{error, format_url, param, template, Post, Preferences, User};
|
||||
use askama::Template;
|
||||
use chrono::{TimeZone, Utc};
|
||||
use hyper::{Body, Request, Response};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
@ -12,65 +15,68 @@ struct UserTemplate {
|
||||
posts: Vec<Post>,
|
||||
sort: (String, String),
|
||||
ends: (String, String),
|
||||
prefs: Preferences,
|
||||
url: String,
|
||||
}
|
||||
|
||||
pub async fn profile(req: HttpRequest) -> Result<HttpResponse> {
|
||||
// FUNCTIONS
|
||||
pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// Build the Reddit JSON API path
|
||||
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
|
||||
let path = format!(
|
||||
"/user/{}.json?{}&raw_json=1",
|
||||
req.param("name").unwrap_or_else(|| "reddit".to_string()),
|
||||
req.uri().query().unwrap_or_default()
|
||||
);
|
||||
|
||||
// Retrieve other variables from Libreddit request
|
||||
let sort = param(&path, "sort");
|
||||
let username = req.match_info().get("username").unwrap_or("").to_string();
|
||||
let sort = param(&path, "sort").unwrap_or_default();
|
||||
let username = req.param("name").unwrap_or_default();
|
||||
|
||||
// Request user profile data and user posts/comments from Reddit
|
||||
let user = user(&username).await;
|
||||
let posts = fetch_posts(&path, "Comment".to_string()).await;
|
||||
// Request user posts/comments from Reddit
|
||||
let posts = Post::fetch(&path, "Comment".to_string(), false).await;
|
||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||
|
||||
match posts {
|
||||
Ok(items) => {
|
||||
let s = UserTemplate {
|
||||
user: user.unwrap(),
|
||||
posts: items.0,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), items.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
Ok((posts, after)) => {
|
||||
// If you can get user posts, also request user data
|
||||
let user = user(&username).await.unwrap_or_default();
|
||||
|
||||
template(UserTemplate {
|
||||
user,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||
ends: (param(&path, "after").unwrap_or_default(), after),
|
||||
prefs: Preferences::new(req),
|
||||
url,
|
||||
})
|
||||
}
|
||||
// If there is an error show error page
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
Err(msg) => error(req, msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
// USER
|
||||
async fn user(name: &str) -> Result<User, &'static str> {
|
||||
async fn user(name: &str) -> Result<User, String> {
|
||||
// Build the Reddit JSON API path
|
||||
let path: String = format!("user/{}/about.json", name);
|
||||
|
||||
let res;
|
||||
let path: String = format!("/user/{}/about.json?raw_json=1", name);
|
||||
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => {
|
||||
res = response;
|
||||
json(path, false).await.map(|res| {
|
||||
// Grab creation date as unix timestamp
|
||||
let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
|
||||
|
||||
// Closure used to parse JSON from Reddit APIs
|
||||
let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string();
|
||||
|
||||
// Parse the JSON output into a User struct
|
||||
User {
|
||||
name: name.to_string(),
|
||||
title: esc!(about("title")),
|
||||
icon: format_url(&about("icon_img")),
|
||||
karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
|
||||
created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"),
|
||||
banner: esc!(about("banner_img")),
|
||||
description: about("public_description"),
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg),
|
||||
}
|
||||
|
||||
// Grab creation date as unix timestamp
|
||||
let created: i64 = res["data"]["created"].as_f64().unwrap().round() as i64;
|
||||
|
||||
// Parse the JSON output into a User struct
|
||||
Ok(User {
|
||||
name: name.to_string(),
|
||||
title: nested_val(&res, "subreddit", "title"),
|
||||
icon: format_url(nested_val(&res, "subreddit", "icon_img")),
|
||||
karma: res["data"]["total_karma"].as_i64().unwrap(),
|
||||
created: Utc.timestamp(created, 0).format("%b %e, %Y").to_string(),
|
||||
banner: nested_val(&res, "subreddit", "banner_img"),
|
||||
description: nested_val(&res, "subreddit", "public_description"),
|
||||
})
|
||||
}
|
||||
|
694
src/utils.rs
694
src/utils.rs
@ -1,55 +1,349 @@
|
||||
//
|
||||
// CRATES
|
||||
//
|
||||
use actix_web::{http::StatusCode, HttpResponse, Result};
|
||||
use crate::{client::json, esc, server::RequestExt};
|
||||
use askama::Template;
|
||||
use chrono::{TimeZone, Utc};
|
||||
use cookie::Cookie;
|
||||
use hyper::{Body, Request, Response};
|
||||
use regex::Regex;
|
||||
use serde_json::from_str;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use time::{Duration, OffsetDateTime};
|
||||
use url::Url;
|
||||
// use surf::{client, get, middleware::Redirect};
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
use base64::encode;
|
||||
// Post flair with content, background color and foreground color
|
||||
pub struct Flair {
|
||||
pub flair_parts: Vec<FlairPart>,
|
||||
pub text: String,
|
||||
pub background_color: String,
|
||||
pub foreground_color: String,
|
||||
}
|
||||
|
||||
// Part of flair, either emoji or text
|
||||
pub struct FlairPart {
|
||||
pub flair_part_type: String,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl FlairPart {
|
||||
pub fn parse(flair_type: &str, rich_flair: Option<&Vec<Value>>, text_flair: Option<&str>) -> Vec<Self> {
|
||||
// Parse type of flair
|
||||
match flair_type {
|
||||
// If flair contains emojis and text
|
||||
"richtext" => match rich_flair {
|
||||
Some(rich) => rich
|
||||
.iter()
|
||||
// For each part of the flair, extract text and emojis
|
||||
.map(|part| {
|
||||
let value = |name: &str| part[name].as_str().unwrap_or_default();
|
||||
Self {
|
||||
flair_part_type: value("e").to_string(),
|
||||
value: match value("e") {
|
||||
"text" => value("t").to_string(),
|
||||
"emoji" => format_url(value("u")),
|
||||
_ => String::new(),
|
||||
},
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Self>>(),
|
||||
None => Vec::new(),
|
||||
},
|
||||
// If flair contains only text
|
||||
"text" => match text_flair {
|
||||
Some(text) => vec![Self {
|
||||
flair_part_type: "text".to_string(),
|
||||
value: text.to_string(),
|
||||
}],
|
||||
None => Vec::new(),
|
||||
},
|
||||
_ => Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Author {
|
||||
pub name: String,
|
||||
pub flair: Flair,
|
||||
pub distinguished: String,
|
||||
}
|
||||
|
||||
//
|
||||
// STRUCTS
|
||||
//
|
||||
// Post flair with text, background color and foreground color
|
||||
pub struct Flair(pub String, pub String, pub String);
|
||||
// Post flags with nsfw and stickied
|
||||
pub struct Flags {
|
||||
pub nsfw: bool,
|
||||
pub stickied: bool,
|
||||
}
|
||||
|
||||
pub struct Media {
|
||||
pub url: String,
|
||||
pub alt_url: String,
|
||||
pub width: i64,
|
||||
pub height: i64,
|
||||
pub poster: String,
|
||||
}
|
||||
|
||||
impl Media {
|
||||
pub async fn parse(data: &Value) -> (String, Self, Vec<GalleryMedia>) {
|
||||
let mut gallery = Vec::new();
|
||||
|
||||
// If post is a video, return the video
|
||||
let (post_type, url_val, alt_url_val) = if data["preview"]["reddit_video_preview"]["fallback_url"].is_string() {
|
||||
// Return reddit video
|
||||
(
|
||||
if data["preview"]["reddit_video_preview"]["is_gif"].as_bool().unwrap_or(false) {
|
||||
"gif"
|
||||
} else {
|
||||
"video"
|
||||
},
|
||||
&data["preview"]["reddit_video_preview"]["fallback_url"],
|
||||
Some(&data["preview"]["reddit_video_preview"]["hls_url"]),
|
||||
)
|
||||
} else if data["secure_media"]["reddit_video"]["fallback_url"].is_string() {
|
||||
// Return reddit video
|
||||
(
|
||||
if data["preview"]["reddit_video_preview"]["is_gif"].as_bool().unwrap_or(false) {
|
||||
"gif"
|
||||
} else {
|
||||
"video"
|
||||
},
|
||||
&data["secure_media"]["reddit_video"]["fallback_url"],
|
||||
Some(&data["secure_media"]["reddit_video"]["hls_url"]),
|
||||
)
|
||||
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
||||
// Handle images, whether GIFs or pics
|
||||
let preview = &data["preview"]["images"][0];
|
||||
let mp4 = &preview["variants"]["mp4"];
|
||||
|
||||
if mp4.is_object() {
|
||||
// Return the mp4 if the media is a gif
|
||||
("gif", &mp4["source"]["url"], None)
|
||||
} else {
|
||||
// Return the picture if the media is an image
|
||||
if data["domain"] == "i.redd.it" {
|
||||
("image", &data["url"], None)
|
||||
} else {
|
||||
("image", &preview["source"]["url"], None)
|
||||
}
|
||||
}
|
||||
} else if data["is_self"].as_bool().unwrap_or_default() {
|
||||
// If type is self, return permalink
|
||||
("self", &data["permalink"], None)
|
||||
} else if data["is_gallery"].as_bool().unwrap_or_default() {
|
||||
// If this post contains a gallery of images
|
||||
gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]);
|
||||
|
||||
("gallery", &data["url"], None)
|
||||
} else {
|
||||
// If type can't be determined, return url
|
||||
("link", &data["url"], None)
|
||||
};
|
||||
|
||||
let source = &data["preview"]["images"][0]["source"];
|
||||
|
||||
let url = if post_type == "self" || post_type == "link" {
|
||||
url_val.as_str().unwrap_or_default().to_string()
|
||||
} else {
|
||||
format_url(url_val.as_str().unwrap_or_default())
|
||||
};
|
||||
|
||||
let alt_url = alt_url_val.map_or(String::new(), |val| format_url(val.as_str().unwrap_or_default()));
|
||||
|
||||
(
|
||||
post_type.to_string(),
|
||||
Self {
|
||||
url,
|
||||
alt_url,
|
||||
width: source["width"].as_i64().unwrap_or_default(),
|
||||
height: source["height"].as_i64().unwrap_or_default(),
|
||||
poster: format_url(source["url"].as_str().unwrap_or_default()),
|
||||
},
|
||||
gallery,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GalleryMedia {
|
||||
pub url: String,
|
||||
pub width: i64,
|
||||
pub height: i64,
|
||||
pub caption: String,
|
||||
pub outbound_url: String,
|
||||
}
|
||||
|
||||
impl GalleryMedia {
|
||||
fn parse(items: &Value, metadata: &Value) -> Vec<Self> {
|
||||
items
|
||||
.as_array()
|
||||
.unwrap_or(&Vec::new())
|
||||
.iter()
|
||||
.map(|item| {
|
||||
// For each image in gallery
|
||||
let media_id = item["media_id"].as_str().unwrap_or_default();
|
||||
let image = &metadata[media_id]["s"];
|
||||
|
||||
// Construct gallery items
|
||||
Self {
|
||||
url: format_url(image["u"].as_str().unwrap_or_default()),
|
||||
width: image["x"].as_i64().unwrap_or_default(),
|
||||
height: image["y"].as_i64().unwrap_or_default(),
|
||||
caption: item["caption"].as_str().unwrap_or_default().to_string(),
|
||||
outbound_url: item["outbound_url"].as_str().unwrap_or_default().to_string(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Self>>()
|
||||
}
|
||||
}
|
||||
|
||||
// Post containing content, metadata and media
|
||||
pub struct Post {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub community: String,
|
||||
pub body: String,
|
||||
pub author: String,
|
||||
pub author_flair: Flair,
|
||||
pub url: String,
|
||||
pub score: String,
|
||||
pub author: Author,
|
||||
pub permalink: String,
|
||||
pub score: (String, String),
|
||||
pub upvote_ratio: i64,
|
||||
pub post_type: String,
|
||||
pub flair: Flair,
|
||||
pub flags: Flags,
|
||||
pub media: String,
|
||||
pub time: String,
|
||||
pub thumbnail: Media,
|
||||
pub media: Media,
|
||||
pub domain: String,
|
||||
pub rel_time: String,
|
||||
pub created: String,
|
||||
pub comments: (String, String),
|
||||
pub gallery: Vec<GalleryMedia>,
|
||||
}
|
||||
|
||||
impl Post {
|
||||
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
|
||||
pub async fn fetch(path: &str, fallback_title: String, quarantine: bool) -> Result<(Vec<Self>, String), String> {
|
||||
let res;
|
||||
let post_list;
|
||||
|
||||
// Send a request to the url
|
||||
match json(path.to_string(), quarantine).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => {
|
||||
res = response;
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg),
|
||||
}
|
||||
|
||||
// Fetch the list of posts from the JSON response
|
||||
match res["data"]["children"].as_array() {
|
||||
Some(list) => post_list = list,
|
||||
None => return Err("No posts found".to_string()),
|
||||
}
|
||||
|
||||
let mut posts: Vec<Self> = Vec::new();
|
||||
|
||||
// For each post from posts list
|
||||
for post in post_list {
|
||||
let data = &post["data"];
|
||||
|
||||
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
|
||||
let score = data["score"].as_i64().unwrap_or_default();
|
||||
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||
let title = esc!(post, "title");
|
||||
|
||||
// Determine the type of media along with the media URL
|
||||
let (post_type, media, gallery) = Media::parse(&data).await;
|
||||
|
||||
posts.push(Self {
|
||||
id: val(post, "id"),
|
||||
title: esc!(if title.is_empty() { fallback_title.to_owned() } else { title }),
|
||||
community: val(post, "subreddit"),
|
||||
body: rewrite_urls(&val(post, "body_html")),
|
||||
author: Author {
|
||||
name: val(post, "author"),
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
data["author_flair_type"].as_str().unwrap_or_default(),
|
||||
data["author_flair_richtext"].as_array(),
|
||||
data["author_flair_text"].as_str(),
|
||||
),
|
||||
text: esc!(post, "link_flair_text"),
|
||||
background_color: val(post, "author_flair_background_color"),
|
||||
foreground_color: val(post, "author_flair_text_color"),
|
||||
},
|
||||
distinguished: val(post, "distinguished"),
|
||||
},
|
||||
score: if data["hide_score"].as_bool().unwrap_or_default() {
|
||||
("\u{2022}".to_string(), "Hidden".to_string())
|
||||
} else {
|
||||
format_num(score)
|
||||
},
|
||||
upvote_ratio: ratio as i64,
|
||||
post_type,
|
||||
thumbnail: Media {
|
||||
url: format_url(val(post, "thumbnail").as_str()),
|
||||
alt_url: String::new(),
|
||||
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||
poster: "".to_string(),
|
||||
},
|
||||
media,
|
||||
domain: val(post, "domain"),
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
data["link_flair_type"].as_str().unwrap_or_default(),
|
||||
data["link_flair_richtext"].as_array(),
|
||||
data["link_flair_text"].as_str(),
|
||||
),
|
||||
text: esc!(post, "link_flair_text"),
|
||||
background_color: val(post, "link_flair_background_color"),
|
||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
} else {
|
||||
"white".to_string()
|
||||
},
|
||||
},
|
||||
flags: Flags {
|
||||
nsfw: data["over_18"].as_bool().unwrap_or_default(),
|
||||
stickied: data["stickied"].as_bool().unwrap_or_default(),
|
||||
},
|
||||
permalink: val(post, "permalink"),
|
||||
rel_time,
|
||||
created,
|
||||
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
|
||||
gallery,
|
||||
});
|
||||
}
|
||||
|
||||
Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "comment.html", escape = "none")]
|
||||
// Comment with content, post, score and data/time that it was posted
|
||||
pub struct Comment {
|
||||
pub id: String,
|
||||
pub kind: String,
|
||||
pub parent_id: String,
|
||||
pub parent_kind: String,
|
||||
pub post_link: String,
|
||||
pub post_author: String,
|
||||
pub body: String,
|
||||
pub author: String,
|
||||
pub flair: Flair,
|
||||
pub score: String,
|
||||
pub time: String,
|
||||
pub author: Author,
|
||||
pub score: (String, String),
|
||||
pub rel_time: String,
|
||||
pub created: String,
|
||||
pub edited: (String, String),
|
||||
pub replies: Vec<Comment>,
|
||||
pub highlighted: bool,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "error.html", escape = "none")]
|
||||
pub struct ErrorTemplate {
|
||||
pub msg: String,
|
||||
pub prefs: Preferences,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
// User struct containing metadata about user
|
||||
pub struct User {
|
||||
pub name: String,
|
||||
@ -68,9 +362,10 @@ pub struct Subreddit {
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub info: String,
|
||||
pub moderators: Vec<String>,
|
||||
pub icon: String,
|
||||
pub members: String,
|
||||
pub active: String,
|
||||
pub members: (String, String),
|
||||
pub active: (String, String),
|
||||
pub wiki: bool,
|
||||
}
|
||||
|
||||
@ -84,187 +379,246 @@ pub struct Params {
|
||||
pub before: Option<String>,
|
||||
}
|
||||
|
||||
// Error template
|
||||
#[derive(Template)]
|
||||
#[template(path = "error.html", escape = "none")]
|
||||
pub struct ErrorTemplate {
|
||||
pub message: String,
|
||||
#[derive(Default)]
|
||||
pub struct Preferences {
|
||||
pub theme: String,
|
||||
pub front_page: String,
|
||||
pub layout: String,
|
||||
pub wide: String,
|
||||
pub show_nsfw: String,
|
||||
pub hide_hls_notification: String,
|
||||
pub use_hls: String,
|
||||
pub comment_sort: String,
|
||||
pub post_sort: String,
|
||||
pub subscriptions: Vec<String>,
|
||||
}
|
||||
|
||||
impl Preferences {
|
||||
// Build preferences from cookies
|
||||
pub fn new(req: Request<Body>) -> Self {
|
||||
Self {
|
||||
theme: setting(&req, "theme"),
|
||||
front_page: setting(&req, "front_page"),
|
||||
layout: setting(&req, "layout"),
|
||||
wide: setting(&req, "wide"),
|
||||
show_nsfw: setting(&req, "show_nsfw"),
|
||||
use_hls: setting(&req, "use_hls"),
|
||||
hide_hls_notification: setting(&req, "hide_hls_notification"),
|
||||
comment_sort: setting(&req, "comment_sort"),
|
||||
post_sort: setting(&req, "post_sort"),
|
||||
subscriptions: setting(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// FORMATTING
|
||||
//
|
||||
|
||||
// Grab a query param from a url
|
||||
pub fn param(path: &str, value: &str) -> String {
|
||||
let url = Url::parse(format!("https://reddit.com/{}", path).as_str()).unwrap();
|
||||
let pairs: std::collections::HashMap<_, _> = url.query_pairs().into_owned().collect();
|
||||
pairs.get(value).unwrap_or(&String::new()).to_owned()
|
||||
// Grab a query parameter from a url
|
||||
pub fn param(path: &str, value: &str) -> Option<String> {
|
||||
Some(
|
||||
Url::parse(format!("https://libredd.it/{}", path).as_str())
|
||||
.ok()?
|
||||
.query_pairs()
|
||||
.into_owned()
|
||||
.collect::<HashMap<_, _>>()
|
||||
.get(value)?
|
||||
.to_owned(),
|
||||
)
|
||||
}
|
||||
|
||||
// Retrieve the value of a setting by name
|
||||
pub fn setting(req: &Request<Body>, name: &str) -> String {
|
||||
// Parse a cookie value from request
|
||||
req
|
||||
.cookie(name)
|
||||
.unwrap_or_else(|| {
|
||||
// If there is no cookie for this setting, try receiving a default from an environment variable
|
||||
if let Ok(default) = std::env::var(format!("LIBREDDIT_DEFAULT_{}", name.to_uppercase())) {
|
||||
Cookie::new(name, default)
|
||||
} else {
|
||||
Cookie::named(name)
|
||||
}
|
||||
})
|
||||
.value()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
// Detect and redirect in the event of a random subreddit
|
||||
pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> {
|
||||
if (sub == "random" || sub == "randnsfw") && !sub.contains('+') {
|
||||
let new_sub = json(format!("/r/{}/about.json?raw_json=1", sub), false).await?["data"]["display_name"]
|
||||
.as_str()
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
Ok(redirect(format!("/r/{}{}", new_sub, additional)))
|
||||
} else {
|
||||
Err("No redirect needed".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
// Direct urls to proxy if proxy is enabled
|
||||
pub fn format_url(url: String) -> String {
|
||||
if url.is_empty() {
|
||||
return String::new();
|
||||
};
|
||||
pub fn format_url(url: &str) -> String {
|
||||
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
|
||||
String::new()
|
||||
} else {
|
||||
Url::parse(url).map_or(String::new(), |parsed| {
|
||||
let domain = parsed.domain().unwrap_or_default();
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
return "/proxy/".to_string() + encode(url).as_str();
|
||||
let capture = |regex: &str, format: &str, segments: i16| {
|
||||
Regex::new(regex).map_or(String::new(), |re| {
|
||||
re.captures(url).map_or(String::new(), |caps| match segments {
|
||||
1 => [format, &caps[1]].join(""),
|
||||
2 => [format, &caps[1], "/", &caps[2]].join(""),
|
||||
_ => String::new(),
|
||||
})
|
||||
})
|
||||
};
|
||||
|
||||
#[cfg(not(feature = "proxy"))]
|
||||
return url.to_string();
|
||||
macro_rules! chain {
|
||||
() => {
|
||||
{
|
||||
String::new()
|
||||
}
|
||||
};
|
||||
|
||||
( $first_fn:expr, $($other_fns:expr), *) => {
|
||||
{
|
||||
let result = $first_fn;
|
||||
if result.is_empty() {
|
||||
chain!($($other_fns,)*)
|
||||
}
|
||||
else
|
||||
{
|
||||
result
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
match domain {
|
||||
"v.redd.it" => chain!(
|
||||
capture(r"https://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$))", "/vid/", 2),
|
||||
capture(r"https://v\.redd\.it/(.+)/(HLSPlaylist\.m3u8.*)$", "/hls/", 2)
|
||||
),
|
||||
"i.redd.it" => capture(r"https://i\.redd\.it/(.*)", "/img/", 1),
|
||||
"a.thumbs.redditmedia.com" => capture(r"https://a\.thumbs\.redditmedia\.com/(.*)", "/thumb/a/", 1),
|
||||
"b.thumbs.redditmedia.com" => capture(r"https://b\.thumbs\.redditmedia\.com/(.*)", "/thumb/b/", 1),
|
||||
"emoji.redditmedia.com" => capture(r"https://emoji\.redditmedia\.com/(.*)/(.*)", "/emoji/", 2),
|
||||
"preview.redd.it" => capture(r"https://preview\.redd\.it/(.*)", "/preview/pre/", 1),
|
||||
"external-preview.redd.it" => capture(r"https://external\-preview\.redd\.it/(.*)", "/preview/external-pre/", 1),
|
||||
"styles.redditmedia.com" => capture(r"https://styles\.redditmedia\.com/(.*)", "/style/", 1),
|
||||
"www.redditstatic.com" => capture(r"https://www\.redditstatic\.com/(.*)", "/static/", 1),
|
||||
_ => String::new(),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Rewrite Reddit links to Libreddit in body of text
|
||||
pub fn rewrite_url(text: &str) -> String {
|
||||
let re = Regex::new(r#"href="(https://|http://|)(www.|)(reddit).(com)/"#).unwrap();
|
||||
re.replace_all(text, r#"href="/"#).to_string()
|
||||
pub fn rewrite_urls(input_text: &str) -> String {
|
||||
let text1 = Regex::new(r#"href="(https|http|)://(www.|old.|np.|amp.|)(reddit).(com)/"#).map_or(String::new(), |re| re.replace_all(input_text, r#"href="/"#).to_string());
|
||||
|
||||
// Rewrite external media previews to Libreddit
|
||||
Regex::new(r"https://external-preview\.redd\.it(.*)[^?]").map_or(String::new(), |re| {
|
||||
if re.is_match(&text1) {
|
||||
re.replace_all(&text1, format_url(re.find(&text1).map(|x| x.as_str()).unwrap_or_default())).to_string()
|
||||
} else {
|
||||
text1
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Append `m` and `k` for millions and thousands respectively
|
||||
pub fn format_num(num: i64) -> String {
|
||||
if num > 1000000 {
|
||||
format!("{}m", num / 1000000)
|
||||
} else if num > 1000 {
|
||||
format!("{}k", num / 1000)
|
||||
pub fn format_num(num: i64) -> (String, String) {
|
||||
let truncated = if num >= 1_000_000 || num <= -1_000_000 {
|
||||
format!("{}m", num / 1_000_000)
|
||||
} else if num >= 1000 || num <= -1000 {
|
||||
format!("{}k", num / 1_000)
|
||||
} else {
|
||||
num.to_string()
|
||||
}
|
||||
};
|
||||
|
||||
(truncated, num.to_string())
|
||||
}
|
||||
|
||||
//
|
||||
// JSON PARSING
|
||||
//
|
||||
// Parse a relative and absolute time from a UNIX timestamp
|
||||
pub fn time(created: f64) -> (String, String) {
|
||||
let time = OffsetDateTime::from_unix_timestamp(created.round() as i64);
|
||||
let time_delta = OffsetDateTime::now_utc() - time;
|
||||
|
||||
// If the time difference is more than a month, show full date
|
||||
let rel_time = if time_delta > Duration::days(30) {
|
||||
time.format("%b %d '%y")
|
||||
// Otherwise, show relative date/time
|
||||
} else if time_delta.whole_days() > 0 {
|
||||
format!("{}d ago", time_delta.whole_days())
|
||||
} else if time_delta.whole_hours() > 0 {
|
||||
format!("{}h ago", time_delta.whole_hours())
|
||||
} else {
|
||||
format!("{}m ago", time_delta.whole_minutes())
|
||||
};
|
||||
|
||||
(rel_time, time.format("%b %d %Y, %H:%M:%S UTC"))
|
||||
}
|
||||
|
||||
// val() function used to parse JSON from Reddit APIs
|
||||
pub fn val(j: &serde_json::Value, k: &str) -> String {
|
||||
String::from(j["data"][k].as_str().unwrap_or_default())
|
||||
pub fn val(j: &Value, k: &str) -> String {
|
||||
j["data"][k].as_str().unwrap_or_default().to_string()
|
||||
}
|
||||
|
||||
// nested_val() function used to parse JSON from Reddit APIs
|
||||
pub fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
|
||||
String::from(j["data"][n][k].as_str().unwrap_or_default())
|
||||
#[macro_export]
|
||||
macro_rules! esc {
|
||||
($f:expr) => {
|
||||
$f.replace('<', "<").replace('>', ">")
|
||||
};
|
||||
($j:expr, $k:expr) => {
|
||||
$j["data"][$k].as_str().unwrap_or_default().to_string().replace('<', "<").replace('>', ">")
|
||||
};
|
||||
}
|
||||
|
||||
// Fetch posts of a user or subreddit
|
||||
pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
|
||||
let res;
|
||||
let post_list;
|
||||
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => {
|
||||
res = response;
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg),
|
||||
}
|
||||
|
||||
// Fetch the list of posts from the JSON response
|
||||
match res["data"]["children"].as_array() {
|
||||
Some(list) => post_list = list,
|
||||
None => return Err("No posts found"),
|
||||
}
|
||||
|
||||
let mut posts: Vec<Post> = Vec::new();
|
||||
|
||||
for post in post_list {
|
||||
let img = if val(post, "thumbnail").starts_with("https:/") {
|
||||
format_url(val(post, "thumbnail"))
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap_or_default().round() as i64;
|
||||
let score = post["data"]["score"].as_i64().unwrap_or_default();
|
||||
let title = val(post, "title");
|
||||
|
||||
posts.push(Post {
|
||||
title: if title.is_empty() { fallback_title.to_owned() } else { title },
|
||||
community: val(post, "subreddit"),
|
||||
body: rewrite_url(&val(post, "body_html")),
|
||||
author: val(post, "author"),
|
||||
author_flair: Flair(
|
||||
val(post, "author_flair_text"),
|
||||
val(post, "author_flair_background_color"),
|
||||
val(post, "author_flair_text_color"),
|
||||
),
|
||||
score: format_num(score),
|
||||
post_type: "link".to_string(),
|
||||
media: img,
|
||||
flair: Flair(
|
||||
val(post, "link_flair_text"),
|
||||
val(post, "link_flair_background_color"),
|
||||
if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
} else {
|
||||
"white".to_string()
|
||||
},
|
||||
),
|
||||
flags: Flags {
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
||||
},
|
||||
url: val(post, "permalink"),
|
||||
time: Utc.timestamp(unix_time, 0).format("%b %e '%y").to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok((posts, res["data"]["after"].as_str().unwrap_or("").to_string()))
|
||||
}
|
||||
// Escape < and > to accurately render HTML
|
||||
// pub fn esc(j: &Value, k: &str) -> String {
|
||||
// val(j,k)
|
||||
// // .replace('&', "&")
|
||||
// .replace('<', "<")
|
||||
// .replace('>', ">")
|
||||
// // .replace('"', """)
|
||||
// // .replace('\'', "'")
|
||||
// // .replace('/', "/")
|
||||
// }
|
||||
|
||||
//
|
||||
// NETWORKING
|
||||
//
|
||||
|
||||
pub async fn error(message: String) -> Result<HttpResponse> {
|
||||
let msg = if message.is_empty() { "Page not found".to_string() } else { message };
|
||||
let body = ErrorTemplate { message: msg }.render().unwrap_or_default();
|
||||
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(body))
|
||||
pub fn template(t: impl Template) -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "text/html")
|
||||
.body(t.render().unwrap_or_default().into())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
// Make a request to a Reddit API and parse the JSON response
|
||||
pub async fn request(path: &str) -> Result<serde_json::Value, &'static str> {
|
||||
let url = format!("https://www.reddit.com/{}", path);
|
||||
pub fn redirect(path: String) -> Response<Body> {
|
||||
Response::builder()
|
||||
.status(302)
|
||||
.header("content-type", "text/html")
|
||||
.header("Location", &path)
|
||||
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path).into())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
// --- actix-web::client ---
|
||||
// let client = actix_web::client::Client::default();
|
||||
// let res = client
|
||||
// .get(url)
|
||||
// .send()
|
||||
// .await?
|
||||
// .body()
|
||||
// .limit(1000000)
|
||||
// .await?;
|
||||
|
||||
// let body = std::str::from_utf8(res.as_ref())?; // .as_ref converts Bytes to [u8]
|
||||
|
||||
// --- surf ---
|
||||
// let req = get(&url).header("User-Agent", "libreddit");
|
||||
// let client = client().with(Redirect::new(5));
|
||||
// let mut res = client.send(req).await.unwrap();
|
||||
// let success = res.status().is_success();
|
||||
// let body = res.body_string().await.unwrap();
|
||||
|
||||
// --- reqwest ---
|
||||
let res = reqwest::get(&url).await.unwrap();
|
||||
// Read the status from the response
|
||||
match res.status().is_success() {
|
||||
true => {
|
||||
// Parse the response from Reddit as JSON
|
||||
match from_str(res.text().await.unwrap_or_default().as_str()) {
|
||||
Ok(json) => Ok(json),
|
||||
Err(_) => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Failed to parse page JSON data", url));
|
||||
Err("Failed to parse page JSON data")
|
||||
}
|
||||
}
|
||||
}
|
||||
false => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Page not found", url));
|
||||
Err("Page not found")
|
||||
}
|
||||
pub async fn error(req: Request<Body>, msg: String) -> Result<Response<Body>, String> {
|
||||
let body = ErrorTemplate {
|
||||
msg,
|
||||
prefs: Preferences::new(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
||||
}
|
||||
|
BIN
static/Inter.var.woff2
Normal file
BIN
static/Inter.var.woff2
Normal file
Binary file not shown.
BIN
static/apple-touch-icon.png
Normal file
BIN
static/apple-touch-icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 8.0 KiB |
BIN
static/favicon.ico
Normal file
BIN
static/favicon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.2 KiB |
BIN
static/favicon.png
Normal file
BIN
static/favicon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 969 B |
5
static/hls.min.js
vendored
Normal file
5
static/hls.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
static/logo.png
Normal file
BIN
static/logo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 7.9 KiB |
23
static/manifest.json
Normal file
23
static/manifest.json
Normal file
@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "Libreddit",
|
||||
"short_name": "Libreddit",
|
||||
"display": "standalone",
|
||||
"background_color": "#1f1f1f",
|
||||
"description": "An alternative private front-end to Reddit",
|
||||
"theme_color": "#1f1f1f",
|
||||
"icons": [
|
||||
{
|
||||
"src": "logo.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "apple-touch-icon.png",
|
||||
"sizes": "180x180"
|
||||
},
|
||||
{
|
||||
"src": "favicon.ico",
|
||||
"sizes": "32x32"
|
||||
}
|
||||
]
|
||||
}
|
77
static/playHLSVideo.js
Normal file
77
static/playHLSVideo.js
Normal file
@ -0,0 +1,77 @@
|
||||
// @license http://www.gnu.org/licenses/agpl-3.0.html AGPL-3.0
|
||||
(function () {
|
||||
if (Hls.isSupported()) {
|
||||
var videoSources = document.querySelectorAll("video source[type='application/vnd.apple.mpegurl']");
|
||||
videoSources.forEach(function (source) {
|
||||
var playlist = source.src;
|
||||
|
||||
var oldVideo = source.parentNode;
|
||||
var autoplay = oldVideo.classList.contains("hls_autoplay");
|
||||
|
||||
// If HLS is supported natively then don't use hls.js
|
||||
if (oldVideo.canPlayType(source.type)) {
|
||||
if (autoplay) {
|
||||
oldVideo.play();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Replace video with copy that will have all "source" elements removed
|
||||
var newVideo = oldVideo.cloneNode(true);
|
||||
var allSources = newVideo.querySelectorAll("source");
|
||||
allSources.forEach(function (source) {
|
||||
source.remove();
|
||||
});
|
||||
|
||||
// Empty source to enable play event
|
||||
newVideo.src = "about:blank";
|
||||
|
||||
oldVideo.parentNode.replaceChild(newVideo, oldVideo);
|
||||
|
||||
function initializeHls() {
|
||||
newVideo.removeEventListener('play', initializeHls);
|
||||
|
||||
var hls = new Hls({ autoStartLoad: false });
|
||||
hls.loadSource(playlist);
|
||||
hls.attachMedia(newVideo);
|
||||
hls.on(Hls.Events.MANIFEST_PARSED, function () {
|
||||
hls.loadLevel = hls.levels.length - 1;
|
||||
hls.startLoad();
|
||||
newVideo.play();
|
||||
});
|
||||
|
||||
hls.on(Hls.Events.ERROR, function (event, data) {
|
||||
var errorType = data.type;
|
||||
var errorFatal = data.fatal;
|
||||
if (errorFatal) {
|
||||
switch (errorType) {
|
||||
case Hls.ErrorType.NETWORK_ERROR:
|
||||
hls.startLoad();
|
||||
break;
|
||||
case Hls.ErrorType.MEDIA_ERROR:
|
||||
hls.recoverMediaError();
|
||||
break;
|
||||
default:
|
||||
hls.destroy();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
console.error("HLS error", data);
|
||||
});
|
||||
}
|
||||
|
||||
newVideo.addEventListener('play', initializeHls);
|
||||
|
||||
if (autoplay) {
|
||||
newVideo.play();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
var videos = document.querySelectorAll("video.hls_autoplay");
|
||||
videos.forEach(function (video) {
|
||||
video.setAttribute("autoplay", "");
|
||||
});
|
||||
}
|
||||
})();
|
||||
// @license-end
|
@ -1,2 +0,0 @@
|
||||
User-agent: *
|
||||
Allow: /
|
1089
static/style.css
1089
static/style.css
File diff suppressed because it is too large
Load Diff
@ -3,22 +3,56 @@
|
||||
<head>
|
||||
{% block head %}
|
||||
<title>{% block title %}Libreddit{% endblock %}</title>
|
||||
<meta http-equiv="Referrer-Policy" content="no-referrer">
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; form-action 'self';
|
||||
{% if cfg!(not(feature = "proxy")) %}img-src https://*; media-src https://*{% endif %}">
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
||||
<meta name="description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link rel="stylesheet" href="/style.css">
|
||||
<!-- General PWA -->
|
||||
<meta name="theme-color" content="#1F1F1F">
|
||||
<!-- iOS Application -->
|
||||
<meta name="apple-mobile-web-app-title" content="Libreddit">
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<meta name="apple-mobile-web-app-status-bar-style" content="default">
|
||||
<!-- Android -->
|
||||
<meta name="mobile-web-app-capable" content="yes">
|
||||
<!-- iOS Logo -->
|
||||
<link href="/touch-icon-iphone.png" rel="apple-touch-icon">
|
||||
<!-- PWA Manifest -->
|
||||
<link rel="manifest" type="application/json" href="/manifest.json">
|
||||
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico">
|
||||
<link rel="stylesheet" type="text/css" href="/style.css">
|
||||
{% endblock %}
|
||||
</head>
|
||||
<body>
|
||||
<body class="
|
||||
{% if prefs.layout != "" %}{{ prefs.layout }}{% endif %}
|
||||
{% if prefs.wide == "on" %} wide{% endif %}
|
||||
{% if prefs.theme != "system" %} {{ prefs.theme }}{% endif %}">
|
||||
<!-- NAVIGATION BAR -->
|
||||
<nav>
|
||||
<a id="logo" href="/"><span id="lib">lib</span>reddit. <span id="version">v{{ env!("CARGO_PKG_VERSION") }}</span></a>
|
||||
<div id="logo">
|
||||
<a id="libreddit" href="/"><span id="lib">lib</span><span id="reddit">reddit.</span></a>
|
||||
<span id="version">v{{ env!("CARGO_PKG_VERSION") }}</span>
|
||||
{% block subscriptions %}{% endblock %}
|
||||
</div>
|
||||
{% block search %}{% endblock %}
|
||||
<a id="github" href="https://github.com/spikecodes/libreddit">GITHUB</a>
|
||||
<div id="links">
|
||||
<a id="settings_link" href="/settings">
|
||||
<span>settings</span>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title>settings</title>
|
||||
<circle cx="12" cy="12" r="3"/><path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"/>
|
||||
</svg>
|
||||
</a>
|
||||
<a id="code" href="https://github.com/spikecodes/libreddit">
|
||||
<span>code</span>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title>code</title>
|
||||
<polyline points="16 18 22 12 16 6"/><polyline points="8 6 2 12 8 18"/>
|
||||
</svg>
|
||||
</a>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<!-- MAIN CONTENT -->
|
||||
{% block body %}
|
||||
<main>
|
||||
{% block content %}
|
||||
@ -26,4 +60,4 @@
|
||||
</main>
|
||||
{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
25
templates/comment.html
Normal file
25
templates/comment.html
Normal file
@ -0,0 +1,25 @@
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% if kind == "more" && parent_kind == "t1" %}
|
||||
<a class="deeper_replies" href="{{ post_link }}{{ parent_id }}">→ More replies</a>
|
||||
{% else if kind == "t1" %}
|
||||
<div id="{{ id }}" class="comment">
|
||||
<div class="comment_left">
|
||||
<p class="comment_score" title="{{ score.1 }}">{{ score.0 }}</p>
|
||||
<div class="line"></div>
|
||||
</div>
|
||||
<details class="comment_right" open>
|
||||
<summary class="comment_data">
|
||||
<a class="comment_author {{ author.distinguished }} {% if author.name == post_author %}op{% endif %}" href="/user/{{ author.name }}">u/{{ author.name }}</a>
|
||||
{% if author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call utils::render_flair(author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<a href="{{ post_link }}{{ id }}/?context=3" class="created" title="{{ created }}">{{ rel_time }}</a>
|
||||
{% if edited.0 != "".to_string() %}<span class="edited" title="{{ edited.1 }}">edited {{ edited.0 }}</span>{% endif %}
|
||||
</summary>
|
||||
<div class="comment_body {% if highlighted %}highlighted{% endif %}">{{ body }}</div>
|
||||
<blockquote class="replies">{% for c in replies -%}{{ c.render().unwrap() }}{%- endfor %}
|
||||
</blockquote>
|
||||
</details>
|
||||
</div>
|
||||
{% endif %}
|
@ -1,6 +1,9 @@
|
||||
{% extends "base.html" %}
|
||||
{% block title %}Error: {{ message }}{% endblock %}
|
||||
{% block title %}Error: {{ msg }}{% endblock %}
|
||||
{% block sortstyle %}{% endblock %}
|
||||
{% block content %}
|
||||
<h1 style="text-align: center; font-size: 50px;">{{ message }}</h1>
|
||||
<div id="error">
|
||||
<h1>{{ msg }}</h1>
|
||||
<h3>Head back <a href="/">home</a>?</h3>
|
||||
</div>
|
||||
{% endblock %}
|
@ -10,89 +10,131 @@
|
||||
{% block root %}/r/{{ post.community }}{% endblock %}{% block location %}r/{{ post.community }}{% endblock %}
|
||||
{% block head %}
|
||||
{% call super() %}
|
||||
<meta name="author" content="u/{{ post.author }}">
|
||||
<!-- Meta Tags -->
|
||||
<meta name="author" content="u/{{ post.author.name }}">
|
||||
<meta name="title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="og:type" content="website">
|
||||
<meta property="og:url" content="{{ post.permalink }}">
|
||||
<meta property="og:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="og:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
<meta property="og:image" content="{{ post.thumbnail.url }}">
|
||||
<meta property="twitter:card" content="summary_large_image">
|
||||
<meta property="twitter:url" content="{{ post.permalink }}">
|
||||
<meta property="twitter:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="twitter:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
<meta property="twitter:image" content="{{ post.thumbnail.url }}">
|
||||
{% endblock %}
|
||||
|
||||
{% macro comment(item) -%}
|
||||
|
||||
<div id="{{ item.id }}" class="comment">
|
||||
<div class="comment_left">
|
||||
<p class="comment_score">{{ item.score }}</p>
|
||||
<div class="line"></div>
|
||||
</div>
|
||||
<details class="comment_right" open>
|
||||
<summary class="comment_data"><a class="comment_author {% if item.author == post.author %}op{% endif %}" href="/u/{{ item.author }}">u/{{ item.author }}</a>
|
||||
{% if item.flair.0 != "" %}
|
||||
<small class="author_flair">{{ item.flair.0 }}</small>
|
||||
{% endif %}
|
||||
• <span class="datetime">{{ item.time }}</span>
|
||||
</summary>
|
||||
<p class="comment_body">{{ item.body }}</p>
|
||||
|
||||
{%- endmacro %}
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list(post.community.as_str()) %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
|
||||
<!-- POST CONTENT -->
|
||||
<div class="post highlighted">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<p>
|
||||
<b><a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a></b>
|
||||
•
|
||||
<a class="post_author" href="/u/{{ post.author }}">u/{{ post.author }}</a>
|
||||
{% if post.author_flair.0 != "" %}
|
||||
<small class="author_flair">{{ post.author_flair.0 }}</small>
|
||||
{% endif %}
|
||||
<span class="datetime">{{ post.time }}</span>
|
||||
</p>
|
||||
<a href="{{ post.url }}" class="post_title">
|
||||
{{ post.title }}
|
||||
{% if post.flair.0 != "" %}
|
||||
<small class="post_flair" style="color:{{ post.flair.2 }}; background:{{ post.flair.1 }}">{{ post.flair.0 }}</small>
|
||||
{% endif %}
|
||||
</a>
|
||||
{% if post.post_type == "image" %}
|
||||
<img class="post_media" src="{{ post.media }}"/>
|
||||
{% else if post.post_type == "video" %}
|
||||
<video class="post_media" src="{{ post.media }}" controls autoplay loop>
|
||||
{% else if post.post_type == "link" %}
|
||||
<a id="post_url" href="{{ post.media }}">{{ post.media }}</a>
|
||||
<p class="post_header">
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author" href="/user/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
{% if post.author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call utils::render_flair(post.author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<div class="post_body">{{ post.body }}</div>
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</p>
|
||||
<p class="post_title">
|
||||
<a href="{{ post.permalink }}">{{ post.title }}</a>
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};">{% call utils::render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</p>
|
||||
|
||||
<!-- POST MEDIA -->
|
||||
{% if post.post_type == "image" %}
|
||||
<a href="{{ post.media.url }}" class="post_media_image" >
|
||||
<svg
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||
<desc>
|
||||
<img alt="Post image" src="{{ post.media.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<video class="post_media_video short hls_autoplay" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" controls preload="none">
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
<script src="/playHLSVideo.js"></script>
|
||||
{% else %}
|
||||
<video class="post_media_video" src="{{ post.media.url }}" controls autoplay loop><a href={{ post.media.url }}>Video</a></video>
|
||||
{% call utils::render_hls_notification(post.permalink[1..]) %}
|
||||
{% endif %}
|
||||
{% else if post.post_type == "gallery" %}
|
||||
<div class="gallery">
|
||||
{% for image in post.gallery -%}
|
||||
<figure>
|
||||
<a href="{{ image.url }}" ><img alt="Gallery image" src="{{ image.url }}"/></a>
|
||||
<figcaption>
|
||||
<p>{{ image.caption }}</p>
|
||||
{% if image.outbound_url.len() > 0 %}
|
||||
<p><a class="outbound_url" href="{{ image.outbound_url }}" rel="nofollow">{{ image.outbound_url }}</a>
|
||||
{% endif %}
|
||||
</figcaption>
|
||||
</figure>
|
||||
{%- endfor %}
|
||||
</div>
|
||||
{% else if post.post_type == "link" %}
|
||||
<a id="post_url" href="{{ post.media.url }}" rel="nofollow">{{ post.media.url }}</a>
|
||||
{% endif %}
|
||||
|
||||
<!-- POST BODY -->
|
||||
<div class="post_body">{{ post.body }}</div>
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
<div class="post_footer">
|
||||
<ul id="post_links">
|
||||
<li><a href="/{{ post.id }}">permalink</a></li>
|
||||
<li><a href="https://reddit.com/{{ post.id }}" rel="nofollow">reddit</a></li>
|
||||
</ul>
|
||||
<p>{{ post.upvote_ratio }}% Upvoted</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- SORT FORM -->
|
||||
<form id="sort">
|
||||
<select name="sort">
|
||||
{% call utils::options(sort, ["confidence", "top", "new", "controversial", "old"], "") %}
|
||||
</select><input id="sort_submit" type="submit" value="→">
|
||||
<select name="sort" title="Sort comments by">
|
||||
{% call utils::options(sort, ["confidence", "top", "new", "controversial", "old"], "confidence") %}
|
||||
</select><button id="sort_submit" class="submit">
|
||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||
<path d="M20 50 H100" />
|
||||
<path d="M75 15 L100 50 L75 85" />
|
||||
→
|
||||
</svg>
|
||||
</button>
|
||||
</form>
|
||||
|
||||
<!-- COMMENTS -->
|
||||
{% for c in comments -%}
|
||||
<div class="thread">
|
||||
{% call comment(c) %}
|
||||
<div class="replies">
|
||||
{% for reply1 in c.replies %}
|
||||
{% call comment(reply1) %}
|
||||
<div class="replies">
|
||||
{% for reply2 in reply1.replies %}
|
||||
{% call comment(reply2) %}
|
||||
<div class="replies">
|
||||
{% for reply3 in reply2.replies %}
|
||||
{% call comment(reply3) %}
|
||||
{% if reply3.replies.len() > 0 %}
|
||||
<a class="deeper_replies" href="{{ post.url }}{{ reply3.id }}">→ More replies</a>
|
||||
{% endif %}
|
||||
</details></div>
|
||||
{% endfor %}
|
||||
</div></details></div>
|
||||
{% endfor %}
|
||||
</div></details></div>
|
||||
{% endfor %}
|
||||
</div></details></div>
|
||||
</div>
|
||||
<div class="thread">
|
||||
{% if single_thread %}
|
||||
<p class="thread_nav"><a href="/{{ post.id }}">View all comments</a></p>
|
||||
{% if c.parent_kind == "t1" %}
|
||||
<p class="thread_nav"><a href="?context=9999">Show parent comments</a></p>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{{ c.render().unwrap() }}
|
||||
</div>
|
||||
{%- endfor %}
|
||||
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
@ -1,73 +1,85 @@
|
||||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block title %}Libreddit: search results - {{ query }}{% endblock %}
|
||||
{% block title %}Libreddit: search results - {{ params.q }}{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list("") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
<form id="search_sort">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ query }}">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ params.q }}" title="Search libreddit">
|
||||
{% if sub != "" %}
|
||||
<div id="inside">
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr" checked="checked" data-com.bitwarden.browser.user-edited="yes">
|
||||
<label for="restrict_sr">in r/{{ sub }}</label>
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr" {% if params.restrict_sr != "" %}checked{% endif %}>
|
||||
<label for="restrict_sr" class="search_label">in r/{{ sub }}</label>
|
||||
</div>
|
||||
{% endif %}
|
||||
<select id="sort_options" name="sort">
|
||||
{% call utils::options(sort.0, ["relevance", "hot", "top", "new", "comments"], "") %}
|
||||
</select>{% if sort.0 != "new" %}<select id="timeframe" name="t">
|
||||
{% call utils::options(sort.1, ["hour", "day", "week", "month", "year", "all"], "all") %}
|
||||
</select>{% endif %}<input id="sort_submit" type="submit" value="→">
|
||||
<select id="sort_options" name="sort" title="Sort results by">
|
||||
{% call utils::options(params.sort, ["relevance", "hot", "top", "new", "comments"], "") %}
|
||||
</select>{% if params.sort != "new" %}<select id="timeframe" name="t" title="Timeframe">
|
||||
{% call utils::options(params.t, ["hour", "day", "week", "month", "year", "all"], "all") %}
|
||||
</select>{% endif %}<button id="sort_submit" class="submit">
|
||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||
<path d="M20 50 H100" />
|
||||
<path d="M75 15 L100 50 L75 85" />
|
||||
→
|
||||
</svg>
|
||||
</button>
|
||||
</form>
|
||||
|
||||
{% if subreddits.len() > 0 %}
|
||||
<div id="search_subreddits">
|
||||
{% for subreddit in subreddits %}
|
||||
<a href="{{ subreddit.url }}" class="search_subreddit">
|
||||
<div class="search_subreddit_left">{% if subreddit.icon != "" %}<img src="{{ subreddit.icon }}" alt="r/{{ subreddit.name }} icon">{% endif %}</div>
|
||||
<div class="search_subreddit_right">
|
||||
<p class="search_subreddit_header">
|
||||
<span class="search_subreddit_name">{{ subreddit.name }}</span>
|
||||
<span class="dot">•</span>
|
||||
<span class="search_subreddit_members" title="{{ subreddit.subscribers.1 }} Members">{{ subreddit.subscribers.0 }} Members</span>
|
||||
</p>
|
||||
<p class="search_subreddit_description">{{ subreddit.description }}</p>
|
||||
</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% for post in posts %}
|
||||
{% if post.title != "Comment" %}
|
||||
<div class="post">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<p>
|
||||
<b><a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a></b>
|
||||
• <a class="post_author" href="/u/{{ post.author }}">u/{{ post.author }}</a>
|
||||
{% if post.author_flair.0 != "" %}
|
||||
<small class="author_flair">{{ post.author_flair.0 }}</small>
|
||||
{% endif %}
|
||||
<span class="datetime" style="float: right;">{{ post.time }}</span>
|
||||
</p>
|
||||
<p class="post_title">
|
||||
{% if post.flair.0 != "" %}
|
||||
<small class="post_flair" style="color:{{ post.flair.2 }}; background:{{ post.flair.1 }}">{{ post.flair.0 }}</small>
|
||||
{% endif %}
|
||||
<a href="{{ post.url }}">{{ post.title }}</a>
|
||||
</p>
|
||||
</div>
|
||||
<img class="post_thumbnail" src="{{ post.media }}">
|
||||
</div><br>
|
||||
|
||||
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||
{% else if post.title != "Comment" %}
|
||||
{% call utils::post_in_list(post) %}
|
||||
{% else %}
|
||||
<div class="comment">
|
||||
<div class="comment_left">
|
||||
<p class="comment_score">{{ post.score }}</p>
|
||||
<p class="comment_score" title="{{ post.score.1 }}">{{ post.score.0 }}</p>
|
||||
<div class="line"></div>
|
||||
</div>
|
||||
<details class="comment_right" open>
|
||||
<summary class="comment_data">
|
||||
<a class="comment_link" href="{{ post.url }}">COMMENT</a>
|
||||
<span class="datetime">{{ post.time }}</span>
|
||||
<a class="comment_link" href="{{ post.permalink }}">COMMENT</a>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</summary>
|
||||
<p class="comment_body">{{ post.body }}</p>
|
||||
</details>
|
||||
</div><br>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
<footer>
|
||||
{% if ends.0 != "" %}
|
||||
<a href="?sort={{ sort.0 }}{% if sort.0 == "top" %}&t={{ sort.1 }}{% endif %}&before={{ ends.0 }}">PREV</a>
|
||||
{% if params.before != "" %}
|
||||
<a href="?q={{ params.q }}&restrict_sr={{ params.restrict_sr }}
|
||||
&sort={{ params.sort }}&t={{ params.t }}
|
||||
&before={{ params.before }}">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if ends.1 != "" %}
|
||||
<a href="?sort={{ sort.0 }}{% if sort.0 == "top" %}&t={{ sort.1 }}{% endif %}&after={{ ends.1 }}">NEXT</a>
|
||||
{% if params.after != "" %}
|
||||
<a href="?q={{ params.q }}&restrict_sr={{ params.restrict_sr }}
|
||||
&sort={{ params.sort }}&t={{ params.t }}
|
||||
&after={{ params.after }}">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
</div>
|
||||
|
90
templates/settings.html
Normal file
90
templates/settings.html
Normal file
@ -0,0 +1,90 @@
|
||||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block title %}Libreddit Settings{% endblock %}
|
||||
|
||||
{% block search %}
|
||||
{% call utils::search("".to_owned(), "", "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="settings">
|
||||
<form action="/settings" method="POST">
|
||||
<div class="prefs">
|
||||
<p>Appearance</p>
|
||||
<div id="theme">
|
||||
<label for="theme">Theme:</label>
|
||||
<select name="theme">
|
||||
{% call utils::options(prefs.theme, ["system", "light", "dark", "black", "dracula", "nord", "laserwave", "violet", "gold"], "system") %}
|
||||
</select>
|
||||
</div>
|
||||
<p>Interface</p>
|
||||
<div id="front_page">
|
||||
<label for="front_page">Front page:</label>
|
||||
<select name="front_page">
|
||||
{% call utils::options(prefs.front_page, ["default", "popular", "all"], "default") %}
|
||||
</select>
|
||||
</div>
|
||||
<div id="layout">
|
||||
<label for="layout">Layout:</label>
|
||||
<select name="layout">
|
||||
{% call utils::options(prefs.layout, ["card", "clean", "compact"], "card") %}
|
||||
</select>
|
||||
</div>
|
||||
<div id="wide">
|
||||
<label for="wide">Wide UI:</label>
|
||||
<input type="hidden" value="off" name="wide">
|
||||
<input type="checkbox" name="wide" {% if prefs.wide == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<p>Content</p>
|
||||
<div id="post_sort">
|
||||
<label for="post_sort" title="Applies only to subreddit feeds">Default subreddit post sort:</label>
|
||||
<select name="post_sort">
|
||||
{% call utils::options(prefs.post_sort, ["hot", "new", "top", "rising", "controversial"], "hot") %}
|
||||
</select>
|
||||
</div>
|
||||
<div id="comment_sort">
|
||||
<label for="comment_sort">Default comment sort:</label>
|
||||
<select name="comment_sort">
|
||||
{% call utils::options(prefs.comment_sort, ["confidence", "top", "new", "controversial", "old"], "confidence") %}
|
||||
</select>
|
||||
</div>
|
||||
<div id="show_nsfw">
|
||||
<label for="show_nsfw">Show NSFW posts:</label>
|
||||
<input type="hidden" value="off" name="show_nsfw">
|
||||
<input type="checkbox" name="show_nsfw" {% if prefs.show_nsfw == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div id="use_hls">
|
||||
<label for="use_hls">Use HLS for videos</label>
|
||||
<input type="hidden" value="off" name="use_hls">
|
||||
<input type="checkbox" name="use_hls" {% if prefs.use_hls == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div id="hide_hls_notification">
|
||||
<label for="hide_hls_notification">Hide notification about possible HLS usage</label>
|
||||
<input type="hidden" value="off" name="hide_hls_notification">
|
||||
<input type="checkbox" name="hide_hls_notification" {% if prefs.hide_hls_notification == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<input id="save" type="submit" value="Save">
|
||||
</div>
|
||||
</form>
|
||||
{% if prefs.subscriptions.len() > 0 %}
|
||||
<div class="prefs" id="settings_subs">
|
||||
<p>Subscribed Feeds</p>
|
||||
{% for sub in prefs.subscriptions %}
|
||||
<div>
|
||||
<span>{% if sub.starts_with("u_") -%}{{ format!("u/{}", &sub[2..]) }}{% else -%}{{ format!("r/{}", sub) }}{% endif -%}</span>
|
||||
<form action="/r/{{ sub }}/unsubscribe/?redirect=settings" method="POST">
|
||||
<button class="unsubscribe">Unsubscribe</button>
|
||||
</form>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div id="settings_note">
|
||||
<p><b>Note:</b> settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.</p><br>
|
||||
<p>You can restore your current settings and subscriptions after clearing your cookies using <a href="/settings/restore/?theme={{ prefs.theme }}&front_page={{ prefs.front_page }}&layout={{ prefs.layout }}&wide={{ prefs.wide }}&comment_sort={{ prefs.comment_sort }}&show_nsfw={{ prefs.show_nsfw }}&use_hls={{ prefs.use_hls }}&hide_hls_notification={{ prefs.hide_hls_notification }}&subscriptions={{ prefs.subscriptions.join("%2B") }}">this link</a>.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
@ -11,61 +11,66 @@
|
||||
{% call utils::search(["/r/", sub.name.as_str()].concat(), "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list(sub.name.as_str(), "wide") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<main>
|
||||
<div id="column_one">
|
||||
<form id="sort">
|
||||
<div id="sort_options">
|
||||
{% if sub.name.is_empty() %}
|
||||
{% call utils::sort("", ["hot", "new", "top", "rising"], sort.0) %}
|
||||
{% call utils::sort("", ["hot", "new", "top", "rising", "controversial"], sort.0) %}
|
||||
{% else %}
|
||||
{% call utils::sort(["/r/", sub.name.as_str()].concat(), ["hot", "new", "top", "rising"], sort.0) %}
|
||||
{% call utils::sort(["/r/", sub.name.as_str()].concat(), ["hot", "new", "top", "rising", "controversial"], sort.0) %}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% if sort.0 == "top" %}<select id="timeframe" name="t">
|
||||
{% if sort.0 == "top" || sort.0 == "controversial" %}<select id="timeframe" name="t" title="Timeframe">
|
||||
{% call utils::options(sort.1, ["hour", "day", "week", "month", "year", "all"], "day") %}
|
||||
<input id="sort_submit" type="submit" value="→">
|
||||
</select>{% endif %}
|
||||
</select>
|
||||
<button id="sort_submit" class="submit">
|
||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||
<path d="M20 50 H100" />
|
||||
<path d="M75 15 L100 50 L75 85" />
|
||||
→
|
||||
</svg>
|
||||
</button>
|
||||
{% endif %}
|
||||
</form>
|
||||
|
||||
{% if sub.name.contains("+") %}
|
||||
<form action="/r/{{ sub.name }}/subscribe" method="POST">
|
||||
<button id="multisub" class="subscribe" title="Subscribe to each sub in this multireddit">Subscribe to Multireddit</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
|
||||
<div id="posts">
|
||||
{% for post in posts %}
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %}">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<p>
|
||||
<b><a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a></b>
|
||||
• <a class="post_author" href="/u/{{ post.author }}">u/{{ post.author }}</a>
|
||||
{% if post.author_flair.0 != "" %}
|
||||
<small class="author_flair">{{ post.author_flair.0 }}</small>
|
||||
{% endif %}
|
||||
<span class="datetime">{{ post.time }}</span>
|
||||
</p>
|
||||
<p class="post_title">
|
||||
{% if post.flair.0 != "" %}
|
||||
<small class="post_flair" style="color:{{ post.flair.2 }}; background:{{ post.flair.1 }}">{{ post.flair.0 }}</small>
|
||||
{% endif %}
|
||||
<a href="{{ post.url }}">{{ post.title }}</a>
|
||||
</p>
|
||||
</div>
|
||||
<img class="post_thumbnail" src="{{ post.media }}">
|
||||
</div><br>
|
||||
{% if !(post.flags.nsfw && prefs.show_nsfw != "on") %}
|
||||
<hr class="sep" />
|
||||
{% call utils::post_in_list(post) %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% if prefs.use_hls == "on" %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<script src="/playHLSVideo.js"></script>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<footer>
|
||||
{% if ends.0 != "" %}
|
||||
<a href="?sort={{ sort.0 }}&before={{ ends.0 }}">PREV</a>
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if ends.1 != "" %}
|
||||
<a href="?sort={{ sort.0 }}&after={{ ends.1 }}">NEXT</a>
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
</div>
|
||||
{% if sub.name != "" %}
|
||||
{% if sub.name != "" && !sub.name.contains("+") %}
|
||||
<aside>
|
||||
<div id="subreddit">
|
||||
<div class="panel" id="subreddit">
|
||||
{% if sub.wiki %}
|
||||
<div id="top">
|
||||
<div>Posts</div>
|
||||
@ -73,23 +78,44 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
<div id="sub_meta">
|
||||
<img id="sub_icon" src="{{ sub.icon }}">
|
||||
<img id="sub_icon" src="{{ sub.icon }}" alt="Icon for r/{{ sub.name }}">
|
||||
<p id="sub_title">{{ sub.title }}</p>
|
||||
<p id="sub_name">r/{{ sub.name }}</p>
|
||||
<p id="sub_description">{{ sub.description }}</p>
|
||||
<div id="sub_details">
|
||||
<label>Members</label>
|
||||
<label>Active</label>
|
||||
<div>{{ sub.members }}</div>
|
||||
<div>{{ sub.active }}</div>
|
||||
<div title="{{ sub.members.1 }}">{{ sub.members.0 }}</div>
|
||||
<div title="{{ sub.active.1 }}">{{ sub.active.0 }}</div>
|
||||
</div>
|
||||
<div id="sub_subscription">
|
||||
{% if prefs.subscriptions.contains(sub.name) %}
|
||||
<form action="/r/{{ sub.name }}/unsubscribe" method="POST">
|
||||
<button class="unsubscribe">Unsubscribe</button>
|
||||
</form>
|
||||
{% else %}
|
||||
<form action="/r/{{ sub.name }}/subscribe" method="POST">
|
||||
<button class="subscribe">Subscribe</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<details id="sidebar">
|
||||
<details class="panel" id="sidebar">
|
||||
<summary id="sidebar_label">Sidebar</summary>
|
||||
<div id="sidebar_contents">{{ sub.info }}</div>
|
||||
<div id="sidebar_contents">
|
||||
{{ sub.info }}
|
||||
<hr>
|
||||
<h2>Moderators</h2>
|
||||
<br>
|
||||
<ul>
|
||||
{% for moderator in sub.moderators %}
|
||||
<li><a style="color: var(--accent)" href="/u/{{ moderator }}">{{ moderator }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
</aside>
|
||||
{% endif %}
|
||||
</main>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
@ -6,72 +6,66 @@
|
||||
{% endblock %}
|
||||
|
||||
{% block title %}{{ user.name.replace("u/", "") }} (u/{{ user.name }}) - Libreddit{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list("") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<main style="max-width: 1000px;">
|
||||
<main>
|
||||
<div id="column_one">
|
||||
<form id="sort">
|
||||
<select name="sort">
|
||||
{% call utils::options(sort.0, ["hot", "new", "top"], "") %}
|
||||
</select>{% if sort.0 == "top" %}<select id="timeframe" name="t">
|
||||
{% call utils::options(sort.1, ["hour", "day", "week", "month", "year", "all"], "all") %}
|
||||
</select>{% endif %}<input id="sort_submit" type="submit" value="→">
|
||||
</select>{% endif %}<button id="sort_submit" class="submit">
|
||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||
<path d="M20 50 H100" />
|
||||
<path d="M75 15 L100 50 L75 85" />
|
||||
→
|
||||
</svg>
|
||||
</button>
|
||||
</form>
|
||||
|
||||
<div id="posts">
|
||||
{% for post in posts %}
|
||||
{% if post.title != "Comment" %}
|
||||
<div class='post'>
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<p>
|
||||
<b><a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a></b>
|
||||
• <a class="post_author" href="/u/{{ post.author }}">u/{{ post.author }}</a>
|
||||
{% if post.author_flair.0 != "" %}
|
||||
<small class="author_flair">{{ post.author_flair.0 }}</small>
|
||||
{% endif %}
|
||||
<span class="datetime" style="float: right;">{{ post.time }}</span>
|
||||
</p>
|
||||
<p class="post_title">
|
||||
{% if post.flair.0 == "Comment" %}
|
||||
{% else if post.flair.0 == "" %}
|
||||
{% else %}
|
||||
<small class="post_flair" style="color:{{ post.flair.2 }}; background:{{ post.flair.1 }}">{{ post.flair.0 }}</small>
|
||||
{% endif %}
|
||||
<a href="{{ post.url }}">{{ post.title }}</a>
|
||||
</p>
|
||||
</div>
|
||||
<img class="post_thumbnail" src="{{ post.media }}">
|
||||
</div><br>
|
||||
|
||||
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||
{% else if post.title != "Comment" %}
|
||||
{% call utils::post_in_list(post) %}
|
||||
{% else %}
|
||||
<div class="comment">
|
||||
<div class="comment_left">
|
||||
<p class="comment_score">{{ post.score }}</p>
|
||||
<p class="comment_score" title="{{ post.score.1 }}">{{ post.score.0 }}</p>
|
||||
<div class="line"></div>
|
||||
</div>
|
||||
<details class="comment_right" open>
|
||||
<summary class="comment_data">
|
||||
<a class="comment_link" href="{{ post.url }}">COMMENT</a>
|
||||
<span class="datetime">{{ post.time }}</span>
|
||||
<a class="comment_link" href="{{ post.permalink }}">COMMENT</a>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</summary>
|
||||
<p class="comment_body">{{ post.body }}</p>
|
||||
</details>
|
||||
</div><br>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
<footer>
|
||||
{% if ends.0 != "" %}
|
||||
<a href="?sort={{ sort.0 }}&before={{ ends.0 }}">PREV</a>
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if ends.1 != "" %}
|
||||
<a href="?sort={{ sort.0 }}&after={{ ends.1 }}">NEXT</a>
|
||||
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
</div>
|
||||
<aside>
|
||||
<div id="user">
|
||||
<img id="user_icon" src="{{ user.icon }}">
|
||||
<div class="panel" id="user">
|
||||
<img id="user_icon" src="{{ user.icon }}" alt="User icon">
|
||||
<p id="user_title">{{ user.title }}</p>
|
||||
<p id="user_name">u/{{ user.name }}</p>
|
||||
<div id="user_description">{{ user.description }}</div>
|
||||
@ -81,7 +75,19 @@
|
||||
<div>{{ user.karma }}</div>
|
||||
<div>{{ user.created }}</div>
|
||||
</div>
|
||||
<div id="user_subscription">
|
||||
{% let name = ["u_", user.name.as_str()].join("") %}
|
||||
{% if prefs.subscriptions.contains(name) %}
|
||||
<form action="/r/u_{{ user.name }}/unsubscribe" method="POST">
|
||||
<button class="unsubscribe">Unfollow</button>
|
||||
</form>
|
||||
{% else %}
|
||||
<form action="/r/u_{{ user.name }}/subscribe" method="POST">
|
||||
<button class="subscribe">Follow</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</aside>
|
||||
</main>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
@ -1,7 +1,7 @@
|
||||
{% macro options(current, values, default) -%}
|
||||
{% for value in values %}
|
||||
<option value="{{ value }}" {% if current == value || (current == "" && value == default) %}selected{% endif %}>
|
||||
{{ format!("{}{}", value.get(0..1).unwrap().to_uppercase(), value.get(1..).unwrap()) }}
|
||||
{{ format!("{}{}", value.get(0..1).unwrap_or_default().to_uppercase(), value.get(1..).unwrap_or_default()) }}
|
||||
</option>
|
||||
{% endfor %}
|
||||
{%- endmacro %}
|
||||
@ -9,20 +9,129 @@
|
||||
{% macro sort(root, methods, selected) -%}
|
||||
{% for method in methods %}
|
||||
<a {% if method == selected %}class="selected"{% endif %} href="{{ root }}/{{ method }}">
|
||||
{{ format!("{}{}", method.get(0..1).unwrap().to_uppercase(), method.get(1..).unwrap()) }}
|
||||
{{ format!("{}{}", method.get(0..1).unwrap_or_default().to_uppercase(), method.get(1..).unwrap_or_default()) }}
|
||||
</a>
|
||||
{% endfor %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro search(root, search) -%}
|
||||
<form action="{% if root != "/r/" && !root.is_empty() %}{{ root }}{% endif %}/search/" id="searchbox">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ search }}">
|
||||
<form action="{% if root != "/r/" && !root.is_empty() %}{{ root }}{% endif %}/search" id="searchbox">
|
||||
<input id="search" type="text" name="q" placeholder="Search" title="Search libreddit" value="{{ search }}">
|
||||
{% if root != "/r/" && !root.is_empty() %}
|
||||
<div id="inside">
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr" checked="checked" data-com.bitwarden.browser.user-edited="yes">
|
||||
<label for="restrict_sr">in {{ root }}</label>
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr">
|
||||
<label for="restrict_sr" class="search_label" title="Restrict search to this subreddit">in {{ root }}</label>
|
||||
</div>
|
||||
{% endif %}
|
||||
<input type="submit" value="→">
|
||||
<button class="submit">
|
||||
<svg width="15" viewBox="0 0 110 100" fill="none" stroke-width="10" stroke-linecap="round">
|
||||
<path d="M20 50 H100" />
|
||||
<path d="M75 15 L100 50 L75 85" />
|
||||
→
|
||||
</svg>
|
||||
</button>
|
||||
</form>
|
||||
{%- endmacro %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro render_flair(flair_parts) -%}
|
||||
{% for flair_part in flair_parts %}{% if flair_part.flair_part_type == "emoji" %}<span class="emoji" style="background-image:url('{{ flair_part.value }}');"></span>{% else if flair_part.flair_part_type == "text" && !flair_part.value.is_empty() %}<span>{{ flair_part.value }}</span>{% endif %}{% endfor %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sub_list(current) -%}
|
||||
{% if prefs.subscriptions.len() > 0 %}
|
||||
<details id="feeds">
|
||||
<summary>Feeds</summary>
|
||||
<div id="feed_list">
|
||||
<p>MAIN FEEDS</p>
|
||||
<a href="/">Home</a>
|
||||
<a href="/r/popular">Popular</a>
|
||||
<a href="/r/all">All</a>
|
||||
<p>REDDIT FEEDS</p>
|
||||
{% for sub in prefs.subscriptions %}
|
||||
<a href="/r/{{ sub }}" {% if sub == current %}class="selected"{% endif %}>{{ sub }}</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</details>
|
||||
{% endif %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro render_hls_notification(redirect_url) -%}
|
||||
{% if post.post_type == "video" && !post.media.alt_url.is_empty() && prefs.hide_hls_notification != "on" %}
|
||||
<div class="post_notification"><p><a href="/settings/update/?use_hls=on&redirect={{ redirect_url }}">Enable HLS</a> to view with audio, or <a href="/settings/update/?hide_hls_notification=on&redirect={{ redirect_url }}">disable this notification</a></p></div>
|
||||
{% endif %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro post_in_list(post) -%}
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %}" id="{{ post.id }}">
|
||||
<p class="post_header">
|
||||
{% let community -%}
|
||||
{% if post.community.starts_with("u_") -%}
|
||||
{% let community = format!("u/{}", &post.community[2..]) -%}
|
||||
{% else -%}
|
||||
{% let community = format!("r/{}", post.community) -%}
|
||||
{% endif -%}
|
||||
<a class="post_subreddit" href="/{{ community }}">{{ community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
</p>
|
||||
<p class="post_title">
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};"
|
||||
dir="ltr">{% call render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
<a href="{{ post.permalink }}">{{ post.title }}</a>{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</p>
|
||||
<!-- POST MEDIA/THUMBNAIL -->
|
||||
{% if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "image" %}
|
||||
<a href="{{ post.media.url }}" class="post_media_image {% if post.media.height / post.media.width < 2 %}short{% endif %}" >
|
||||
<svg
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||
<desc>
|
||||
<img alt="Post image" src="{{ post.media.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
||||
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" controls loop autoplay><a href={{ post.media.url }}>Video</a></video>
|
||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<video class="post_media_video short" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" controls preload="none">
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
{% else %}
|
||||
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls autoplay><a href={{ post.media.url }}>Video</a></video>
|
||||
{% call render_hls_notification(format!("{}%23{}", &self.url[1..].replace("&", "%26"), post.id)) %}
|
||||
{% endif %}
|
||||
{% else if post.post_type != "self" %}
|
||||
<a class="post_thumbnail {% if post.thumbnail.url.is_empty() %}no_thumbnail{% endif %}" href="{% if post.post_type == "link" %}{{ post.media.url }}{% else %}{{ post.permalink }}{% endif %}" rel="nofollow">
|
||||
{% if post.thumbnail.url.is_empty() %}
|
||||
<svg viewBox="0 0 100 106" width="140" height="53" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Thumbnail</title>
|
||||
<path d="M35,15h-15a10,10 0,0,0 0,20h25a10,10 0,0,0 10,-10m-12.5,0a10, 10 0,0,1 10, -10h25a10,10 0,0,1 0,20h-15" fill="none" stroke-width="5" stroke-linecap="round"/>
|
||||
</svg>
|
||||
{% else %}
|
||||
<svg width="{{ post.thumbnail.width }}px" height="{{ post.thumbnail.height }}px" xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.thumbnail.url }}"/>
|
||||
<desc>
|
||||
<img alt="Thumbnail" src="{{ post.thumbnail.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
{% endif %}
|
||||
<span>{% if post.post_type == "link" %}{{ post.domain }}{% else %}{{ post.post_type }}{% endif %}</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
<div class="post_footer">
|
||||
<a href="{{ post.permalink }}" class="post_comments" title="{{ post.comments.1 }} comments">{{ post.comments.0 }} comments</a>
|
||||
</div>
|
||||
</div>
|
||||
{%- endmacro %}
|
||||
|
13
templates/wall.html
Normal file
13
templates/wall.html
Normal file
@ -0,0 +1,13 @@
|
||||
{% extends "base.html" %}
|
||||
{% block title %}{{ msg }}{% endblock %}
|
||||
{% block sortstyle %}{% endblock %}
|
||||
{% block content %}
|
||||
<div id="wall">
|
||||
<h1>{{ title }}</h1>
|
||||
<br>
|
||||
<p>{{ msg }}</p>
|
||||
<form action="/r/{{ sub }}?redir={{ url }}" method="POST">
|
||||
<input id="save" type="submit" value="Continue">
|
||||
</form>
|
||||
</div>
|
||||
{% endblock %}
|
@ -10,9 +10,13 @@
|
||||
{% call utils::search(["/r/", sub.as_str()].concat(), "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list(sub.as_str()) %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<main>
|
||||
<div id="column_one">
|
||||
<div class="panel" id="column_one">
|
||||
<div id="top">
|
||||
<a href="/r/{{ sub }}">Posts</a>
|
||||
<div>Wiki</div>
|
||||
|
Reference in New Issue
Block a user