Merge branch 'master' into feature/fixed-navbar
This commit is contained in:
commit
bb5f2674d1
14
.devcontainer/devcontainer.json
Normal file
14
.devcontainer/devcontainer.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "Rust",
|
||||
"image": "mcr.microsoft.com/devcontainers/rust:0-1-bullseye",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {}
|
||||
},
|
||||
"portsAttributes": {
|
||||
"8080": {
|
||||
"label": "libreddit",
|
||||
"onAutoForward": "notify"
|
||||
}
|
||||
},
|
||||
"postCreateCommand": "cargo build"
|
||||
}
|
2
.github/workflows/docker-arm.yml
vendored
2
.github/workflows/docker-arm.yml
vendored
@ -33,6 +33,6 @@ jobs:
|
||||
file: ./Dockerfile.arm
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: spikecodes/libreddit:arm
|
||||
tags: libreddit/libreddit:arm
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
2
.github/workflows/docker-armv7.yml
vendored
2
.github/workflows/docker-armv7.yml
vendored
@ -36,6 +36,6 @@ jobs:
|
||||
file: ./Dockerfile.armv7
|
||||
platforms: linux/arm/v7
|
||||
push: true
|
||||
tags: spikecodes/libreddit:armv7
|
||||
tags: libreddit/libreddit:armv7
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
8
.github/workflows/docker.yml
vendored
8
.github/workflows/docker.yml
vendored
@ -26,6 +26,12 @@ jobs:
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Docker Hub Description
|
||||
uses: peter-evans/dockerhub-description@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
repository: libreddit/libreddit
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@ -33,6 +39,6 @@ jobs:
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: spikecodes/libreddit:latest
|
||||
tags: libreddit/libreddit:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
22
.github/workflows/rust-tests.yml
vendored
Normal file
22
.github/workflows/rust-tests.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build
|
||||
run: cargo build --verbose
|
||||
- name: Run tests
|
||||
run: cargo test --verbose
|
2
.replit
2
.replit
@ -1,2 +1,2 @@
|
||||
run = "while true; do wget -O libreddit https://github.com/spikecodes/libreddit/releases/latest/download/libreddit;chmod +x libreddit;./libreddit -H 63115200;sleep 1;done"
|
||||
run = "while :; do set -ex; curl -o./libreddit -fsSL -- https://github.com/libreddit/libreddit/releases/latest/download/libreddit ; chmod +x libreddit; set +e; ./libreddit -H 63115200; sleep 1; done"
|
||||
language = "bash"
|
91
CREDITS
Normal file
91
CREDITS
Normal file
@ -0,0 +1,91 @@
|
||||
5trongthany <65565784+5trongthany@users.noreply.github.com>
|
||||
674Y3r <87250374+674Y3r@users.noreply.github.com>
|
||||
Adrian Lebioda <adrianlebioda@gmail.com>
|
||||
Alexandre Iooss <erdnaxe@crans.org>
|
||||
AndreVuillemot160 <84594011+AndreVuillemot160@users.noreply.github.com>
|
||||
Andrew Kaufman <57281817+andrew-kaufman@users.noreply.github.com>
|
||||
Artemis <51862164+artemislena@users.noreply.github.com>
|
||||
Arya K <73596856+gi-yt@users.noreply.github.com>
|
||||
Austin Huang <im@austinhuang.me>
|
||||
Basti <pred2k@users.noreply.github.com>
|
||||
Ben Smith <37027883+smithbm2316@users.noreply.github.com>
|
||||
BobIsMyManager <ahoumatt@yahoo.com>
|
||||
Daniel Valentine <Daniel-Valentine@users.noreply.github.com>
|
||||
Daniel Valentine <daniel@vielle.ws>
|
||||
Diego Magdaleno <38844659+DiegoMagdaleno@users.noreply.github.com>
|
||||
Dyras <jevwmguf@duck.com>
|
||||
Edward <101938856+EdwardLangdon@users.noreply.github.com>
|
||||
Esmail EL BoB <github.defilable@simplelogin.co>
|
||||
FireMasterK <20838718+FireMasterK@users.noreply.github.com>
|
||||
George Roubos <cowkingdom@hotmail.com>
|
||||
Harsh Mishra <erbeusgriffincasper@gmail.com>
|
||||
JPyke3 <pyke.jacob1@gmail.com>
|
||||
Johannes Schleifenbaum <johannes@js-webcoding.de>
|
||||
Josiah <70736638+fres7h@users.noreply.github.com>
|
||||
Kavin <20838718+FireMasterK@users.noreply.github.com>
|
||||
Kazi <kzshantonu@users.noreply.github.com>
|
||||
Kieran <42723993+EnderDev@users.noreply.github.com>
|
||||
Kieran <kieran@dothq.co>
|
||||
Kyle Roth <kylrth@gmail.com>
|
||||
Laurențiu Nicola <lnicola@users.noreply.github.com>
|
||||
Lena <102762572+MarshDeer@users.noreply.github.com>
|
||||
Macic <46872282+Macic-Dev@users.noreply.github.com>
|
||||
Mario A <10923513+Midblyte@users.noreply.github.com>
|
||||
Matthew Crossman <matt@crossman.page>
|
||||
Matthew E <matt@matthew.science>
|
||||
Matthew Esposito <matt@matthew.science>
|
||||
Mennaruuk <52135169+Mennaruuk@users.noreply.github.com>
|
||||
NKIPSC <15067635+NKIPSC@users.noreply.github.com>
|
||||
Nainar <nainar.mb@gmail.com>
|
||||
Nathan Moos <moosingin3space@gmail.com>
|
||||
Nicholas Christopher <nchristopher@tuta.io>
|
||||
Nick Lowery <ClockVapor@users.noreply.github.com>
|
||||
Nico <github@dr460nf1r3.org>
|
||||
Om G <34579088+OxyMagnesium@users.noreply.github.com>
|
||||
RiversideRocks <59586759+RiversideRocks@users.noreply.github.com>
|
||||
Robin <8597693+robrobinbin@users.noreply.github.com>
|
||||
Ruben Elshof <15641671+rubenelshof@users.noreply.github.com>
|
||||
Rupert Angermeier <rangermeier@users.noreply.github.com>
|
||||
Scoder12 <34356756+Scoder12@users.noreply.github.com>
|
||||
Slayer <51095261+GhostSlayer@users.noreply.github.com>
|
||||
Soheb <somoso@users.noreply.github.com>
|
||||
Spenser Black <spenserblack01@gmail.com>
|
||||
Spike <19519553+spikecodes@users.noreply.github.com>
|
||||
The TwilightBlood <hwengerstickel@protonmail.com>
|
||||
TheCultLeader666 <65368815+TheCultLeader666@users.noreply.github.com>
|
||||
TheFrenchGhosty <47571719+TheFrenchGhosty@users.noreply.github.com>
|
||||
Tokarak <63452145+Tokarak@users.noreply.github.com>
|
||||
Tsvetomir Bonev <invakid404@riseup.net>
|
||||
Vladislav Nepogodin <nepogodin.vlad@gmail.com>
|
||||
Walkx <walkxnl@gmail.com>
|
||||
Wichai <1482605+Chengings@users.noreply.github.com>
|
||||
Zach <72994911+zachjmurphy@users.noreply.github.com>
|
||||
accountForIssues <52367365+accountForIssues@users.noreply.github.com>
|
||||
alefvanoon <53198048+alefvanoon@users.noreply.github.com>
|
||||
alyaeanyx <alexandra.hollmeier@mailbox.org>
|
||||
arthomnix <35371030+arthomnix@users.noreply.github.com>
|
||||
curlpipe <11898833+curlpipe@users.noreply.github.com>
|
||||
dacousb <53299044+dacousb@users.noreply.github.com>
|
||||
dbrennand <52419383+dbrennand@users.noreply.github.com>
|
||||
dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
|
||||
elliot <75391956+ellieeet123@users.noreply.github.com>
|
||||
erdnaxe <erdnaxe@users.noreply.github.com>
|
||||
git-bruh <e817509a-8ee9-4332-b0ad-3a6bdf9ab63f@aleeas.com>
|
||||
gmnsii <95436780+gmnsii@users.noreply.github.com>
|
||||
guaddy <67671414+guaddy@users.noreply.github.com>
|
||||
igna <igna@intent.cool>
|
||||
imabritishcow <bcow@protonmail.com>
|
||||
laazyCmd <laazy.pr00gramming@protonmail.com>
|
||||
mikupls <93015331+mikupls@users.noreply.github.com>
|
||||
obeho <71698631+obeho@users.noreply.github.com>
|
||||
obscurity <z@x4.pm>
|
||||
robin <8597693+robrobinbin@users.noreply.github.com>
|
||||
robrobinbin <8597693+robrobinbin@users.noreply.github.com>
|
||||
robrobinbin <>
|
||||
robrobinbin <robindepril@gmail.com>
|
||||
somini <somini@users.noreply.github.com>
|
||||
somoso <github@soheb.anonaddy.com>
|
||||
spikecodes <19519553+spikecodes@users.noreply.github.com>
|
||||
sybenx <syb@duck.com>
|
||||
tirz <36501933+tirz@users.noreply.github.com>
|
||||
xatier <xatierlike@gmail.com>
|
807
Cargo.lock
generated
807
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
42
Cargo.toml
42
Cargo.toml
@ -3,25 +3,37 @@ name = "libreddit"
|
||||
description = " Alternative private front-end to Reddit"
|
||||
license = "AGPL-3.0"
|
||||
repository = "https://github.com/spikecodes/libreddit"
|
||||
version = "0.22.8"
|
||||
version = "0.27.1"
|
||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
askama = { version = "0.11.1", default-features = false }
|
||||
async-recursion = "1.0.0"
|
||||
cached = "0.34.0"
|
||||
clap = { version = "3.1.18", default-features = false, features = ["std"] }
|
||||
regex = "1.5.6"
|
||||
serde = { version = "1.0.137", features = ["derive"] }
|
||||
cookie = "0.16.0"
|
||||
cached = "0.42.0"
|
||||
clap = { version = "4.1.1", default-features = false, features = ["std", "env"] }
|
||||
regex = "1.7.1"
|
||||
serde = { version = "1.0.152", features = ["derive"] }
|
||||
cookie = "0.16.2"
|
||||
futures-lite = "1.12.0"
|
||||
hyper = { version = "0.14.18", features = ["full"] }
|
||||
hyper-rustls = "0.23.0"
|
||||
percent-encoding = "2.1.0"
|
||||
hyper = { version = "0.14.23", features = ["full"] }
|
||||
hyper-rustls = "0.23.2"
|
||||
percent-encoding = "2.2.0"
|
||||
route-recognizer = "0.3.1"
|
||||
serde_json = "1.0.81"
|
||||
tokio = { version = "1.18.2", features = ["full"] }
|
||||
time = "0.3.9"
|
||||
url = "2.2.2"
|
||||
rust-embed = "6.4.0"
|
||||
serde_json = "1.0.91"
|
||||
tokio = { version = "1.24.1", features = ["full"] }
|
||||
time = "0.3.17"
|
||||
url = "2.3.1"
|
||||
rust-embed = { version = "6.4.2", features = ["include-exclude"] }
|
||||
libflate = "1.2.0"
|
||||
brotli = { version = "3.3.4", features = ["std"] }
|
||||
toml = "0.5.10"
|
||||
once_cell = "1.17.0"
|
||||
|
||||
[dev-dependencies]
|
||||
lipsum = "0.8.2"
|
||||
sealed_test = "1.0.0"
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
lto = true
|
||||
strip = true
|
||||
|
@ -3,13 +3,18 @@
|
||||
####################################################################################################
|
||||
FROM rust:alpine AS builder
|
||||
|
||||
RUN apk add --no-cache g++
|
||||
RUN apk add --no-cache g++ git
|
||||
|
||||
WORKDIR /usr/src/libreddit
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN cargo install --path .
|
||||
# net.git-fetch-with-cli is specified in order to prevent a potential OOM kill
|
||||
# in low memory environments. See:
|
||||
# https://users.rust-lang.org/t/cargo-uses-too-much-memory-being-run-in-qemu/76531
|
||||
# This is tracked under issue #641. This also requires us to install git in the
|
||||
# builder.
|
||||
RUN cargo install --config net.git-fetch-with-cli=true --path .
|
||||
|
||||
####################################################################################################
|
||||
## Final image
|
||||
|
144
README.md
144
README.md
@ -29,91 +29,17 @@ I appreciate any donations! Your support allows me to continue developing Libred
|
||||
|
||||
# Instances
|
||||
|
||||
Feel free to [open an issue](https://github.com/spikecodes/libreddit/issues/new) to have your [selfhosted instance](#deployment) listed here!
|
||||
|
||||
🔗 **Want to automatically redirect Reddit links to Libreddit? Use [LibRedirect](https://github.com/libredirect/libredirect) or [Privacy Redirect](https://github.com/SimonBrazell/privacy-redirect)!**
|
||||
|
||||
| Website | Country | Cloudflare |
|
||||
|-|-|-|
|
||||
| [libredd.it](https://libredd.it) (official) | 🇺🇸 US | |
|
||||
| [libreddit.spike.codes](https://libreddit.spike.codes) (official) | 🇺🇸 US | |
|
||||
| [libreddit.dothq.co](https://libreddit.dothq.co) | 🇩🇪 DE | ✅ |
|
||||
| [libreddit.kavin.rocks](https://libreddit.kavin.rocks) | 🇮🇳 IN | |
|
||||
| [libreddit.40two.app](https://libreddit.40two.app) | 🇳🇱 NL | |
|
||||
| [reddit.invak.id](https://reddit.invak.id) | 🇧🇬 BG | |
|
||||
| [reddit.phii.me](https://reddit.phii.me) | 🇺🇸 US | |
|
||||
| [lr.riverside.rocks](https://lr.riverside.rocks) | 🇺🇸 US | |
|
||||
| [libreddit.strongthany.cc](https://libreddit.strongthany.cc) | 🇺🇸 US | |
|
||||
| [libreddit.database.red](https://libreddit.database.red) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.privacy.com.de](https://libreddit.privacy.com.de) | 🇩🇪 DE | |
|
||||
| [libreddit.domain.glass](https://libreddit.domain.glass) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.sugoma.tk](https://libreddit.sugoma.tk) | 🇺🇸 US | |
|
||||
| [libreddit.jamiethalacker.dev](https://libreddit.jamiethalacker.dev) | 🇺🇸 US | ✅ |
|
||||
| [reddit.artemislena.eu](https://reddit.artemislena.eu) | 🇩🇪 DE | |
|
||||
| [r.nf](https://r.nf) | 🇩🇪 DE | ✅ |
|
||||
| [libreddit.awesomehub.io](https://libreddit.awesomehub.io) | 🇫🇮 FI | |
|
||||
| [libreddit.some-things.org](https://libreddit.some-things.org) | 🇨🇭 CH | |
|
||||
| [reddit.stuehieyr.com](https://reddit.stuehieyr.com) | 🇩🇪 DE | |
|
||||
| [lr.mint.lgbt](https://lr.mint.lgbt) | 🇨🇦 CA | |
|
||||
| [libreddit.alefvanoon.xyz](https://libreddit.alefvanoon.xyz) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.igna.rocks](https://libreddit.igna.rocks) | 🇺🇸 US | |
|
||||
| [libreddit.autarkic.org](https://libreddit.autarkic.org) | 🇺🇸 US | |
|
||||
| [libreddit.flux.industries](https://libreddit.flux.industries) | 🇩🇪 DE | ✅ |
|
||||
| [libreddit.drivet.xyz](https://libreddit.drivet.xyz) | 🇵🇱 PL | |
|
||||
| [lr.oversold.host](https://lr.oversold.host) | 🇱🇺 LU | |
|
||||
| [libreddit.de](https://libreddit.de) | 🇩🇪 DE | |
|
||||
| [libreddit.pussthecat.org](https://libreddit.pussthecat.org) | 🇩🇪 DE | |
|
||||
| [libreddit.mutahar.rocks](https://libreddit.mutahar.rocks) | 🇫🇷 FR | |
|
||||
| [libreddit.northboot.xyz](https://libreddit.northboot.xyz) | 🇩🇪 DE | |
|
||||
| [leddit.xyz](https://leddit.xyz) | 🇺🇸 US | |
|
||||
| [de.leddit.xyz](https://de.leddit.xyz) | 🇩🇪 DE | |
|
||||
| [lr.cowfee.moe](https://lr.cowfee.moe) | 🇺🇸 US | |
|
||||
| [libreddit.hu](https://libreddit.hu) | 🇫🇮 FI | ✅ |
|
||||
| [libreddit.totaldarkness.net](https://libreddit.totaldarkness.net) | 🇨🇦 CA | |
|
||||
| [libreddit.esmailelbob.xyz](https://libreddit.esmailelbob.xyz) | 🇨🇦 CA | |
|
||||
| [lr.vern.cc](https://lr.vern.cc) | 🇵🇱 PL | |
|
||||
| [libreddit.nl](https://libreddit.nl) | 🇳🇱 NL | |
|
||||
| [lr.stilic.ml](https://lr.stilic.ml) | 🇫🇷 FR | ✅ |
|
||||
| [reddi.tk](https://reddi.tk) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.bus-hit.me](https://libreddit.bus-hit.me) | 🇨🇦 CA | |
|
||||
| [libreddit.datatunnel.xyz](https://libreddit.datatunnel.xyz) | 🇫🇮 FI | |
|
||||
| [libreddit.crewz.me](https://libreddit.crewz.me) | 🇳🇱 NL | ✅ |
|
||||
| [r.walkx.org](https://r.walkx.org) | 🇳🇱 NL | ✅ |
|
||||
| [libreddit.kylrth.com](https://libreddit.kylrth.com) | 🇨🇦 CA | |
|
||||
| [libreddit.yonalee.eu](https://libreddit.yonalee.eu) | 🇱🇺 LU | ✅ |
|
||||
| [libreddit.winscloud.net](https://libreddit.winscloud.net) | 🇹🇭 TH | ✅ |
|
||||
| [libreddit.tiekoetter.com](https://libreddit.tiekoetter.com) | 🇩🇪 DE | |
|
||||
| [reddit.rtrace.io](https://reddit.rtrace.io) | 🇩🇪 DE | |
|
||||
| [libreddit.lunar.icu](https://libreddit.lunar.icu) | 🇩🇪 DE | ✅ |
|
||||
| [libreddit.privacydev.net](https://libreddit.privacydev.net) | 🇺🇸 US | |
|
||||
| [libreddit.notyourcomputer.net](https://libreddit.notyourcomputer.net) | 🇺🇸 US | |
|
||||
| [r.ahwx.org](https://r.ahwx.org) | 🇳🇱 NL | ✅ |
|
||||
| [bob.fr.to](https://bob.fr.to) | 🇺🇸 US | |
|
||||
| [reddit.beparanoid.de](https://reddit.beparanoid.de) | 🇨🇭 CH | |
|
||||
| [libreddit.dcs0.hu](https://libreddit.dcs0.hu) | 🇭🇺 HU | |
|
||||
| [reddit.dr460nf1r3.org](https://reddit.dr460nf1r3.org) | 🇩🇪 DE | ✅ |
|
||||
| [spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion](http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion) | 🇮🇳 IN | |
|
||||
| [fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion](http://fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion) | 🇩🇪 DE | |
|
||||
| [kphht2jcflojtqte4b4kyx7p2ahagv4debjj32nre67dxz7y57seqwyd.onion](http://kphht2jcflojtqte4b4kyx7p2ahagv4debjj32nre67dxz7y57seqwyd.onion) | 🇳🇱 NL | |
|
||||
| [inytumdgnri7xsqtvpntjevaelxtgbjqkuqhtf6txxhwbll2fwqtakqd.onion](http://inytumdgnri7xsqtvpntjevaelxtgbjqkuqhtf6txxhwbll2fwqtakqd.onion) | 🇨🇭 CH | |
|
||||
| [liredejj74h5xjqr2dylnl5howb2bpikfowqoveub55ru27x43357iid.onion](http://liredejj74h5xjqr2dylnl5howb2bpikfowqoveub55ru27x43357iid.onion) | 🇩🇪 DE | |
|
||||
| [kzhfp3nvb4qp575vy23ccbrgfocezjtl5dx66uthgrhu7nscu6rcwjyd.onion](http://kzhfp3nvb4qp575vy23ccbrgfocezjtl5dx66uthgrhu7nscu6rcwjyd.onion) | 🇺🇸 US | |
|
||||
| [ecue64ybzvn6vjzl37kcsnwt4ycmbsyf74nbttyg7rkc3t3qwnj7mcyd.onion](http://ecue64ybzvn6vjzl37kcsnwt4ycmbsyf74nbttyg7rkc3t3qwnj7mcyd.onion) | 🇩🇪 DE | |
|
||||
| [ledditqo2mxfvlgobxnlhrkq4dh34jss6evfkdkb2thlvy6dn4f4gpyd.onion](http://ledditqo2mxfvlgobxnlhrkq4dh34jss6evfkdkb2thlvy6dn4f4gpyd.onion) | 🇺🇸 US | |
|
||||
| [libredoxhxwnmsb6dvzzd35hmgzmawsq5i764es7witwhddvpc2razid.onion](http://libredoxhxwnmsb6dvzzd35hmgzmawsq5i764es7witwhddvpc2razid.onion) | 🇺🇸 US | |
|
||||
| [libreddit.2syis2nnyytz6jnusnjurva4swlaizlnleiks5mjp46phuwjbdjqwgqd.onion](http://libreddit.2syis2nnyytz6jnusnjurva4swlaizlnleiks5mjp46phuwjbdjqwgqd.onion) | 🇪🇬 EG | |
|
||||
| [ol5begilptoou34emq2sshf3may3hlblvipdjtybbovpb7c7zodxmtqd.onion](http://ol5begilptoou34emq2sshf3may3hlblvipdjtybbovpb7c7zodxmtqd.onion) | 🇩🇪 DE | |
|
||||
| [lbrdtjaj7567ptdd4rv74lv27qhxfkraabnyphgcvptl64ijx2tijwid.onion](http://lbrdtjaj7567ptdd4rv74lv27qhxfkraabnyphgcvptl64ijx2tijwid.onion) | 🇨🇦 CA | |
|
||||
| [libreddit.lqs5fjmajyp7rvp4qvyubwofzi6d4imua7vs237rkc4m5qogitqwrgyd.onion](http://libreddit.lqs5fjmajyp7rvp4qvyubwofzi6d4imua7vs237rkc4m5qogitqwrgyd.onion) | 🇨🇦 CA | |
|
||||
| [reddit.prnoid54e44a4bduq5due64jkk7wcnkxcp5kv3juncm7veptjcqudgyd.onion](http://reddit.prnoid54e44a4bduq5due64jkk7wcnkxcp5kv3juncm7veptjcqudgyd.onion) | 🇨🇭 CH | |
|
||||
[Follow this link](https://github.com/libreddit/libreddit-instances/blob/master/instances.md) for an up-to-date table of instances in markdown format. This list is also available as [a machine-readable JSON](https://github.com/libreddit/libreddit-instances/blob/master/instances.json).
|
||||
|
||||
A checkmark in the "Cloudflare" category here refers to the use of the reverse proxy, [Cloudflare](https://cloudflare.com). The checkmark will not be listed for a site that uses Cloudflare DNS but rather the proxying service which grants Cloudflare the ability to monitor traffic to the website.
|
||||
Both files are part of the [libreddit-instances](https://github.com/libreddit/libreddit-instances) repository. To contribute your [self-hosted instance](#deployment) to the list, see the [libreddit-instances README](https://github.com/libreddit/libreddit-instances/blob/master/README.md).
|
||||
|
||||
---
|
||||
|
||||
# About
|
||||
|
||||
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/spikecodes/libreddit), :octocat: [GitHub](https://github.com/spikecodes/libreddit), and 🦊 [GitLab](https://gitlab.com/spikecodes/libreddit).
|
||||
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/libreddit/libreddit), :octocat: [GitHub](https://github.com/libreddit/libreddit), and 🦊 [GitLab](https://gitlab.com/libreddit/libreddit).
|
||||
|
||||
## Built with
|
||||
|
||||
@ -125,7 +51,7 @@ Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [D
|
||||
## Info
|
||||
Libreddit hopes to provide an easier way to browse Reddit, without the ads, trackers, and bloat. Libreddit was inspired by other alternative front-ends to popular services such as [Invidious](https://github.com/iv-org/invidious) for YouTube, [Nitter](https://github.com/zedeus/nitter) for Twitter, and [Bibliogram](https://sr.ht/~cadence/bibliogram/) for Instagram.
|
||||
|
||||
Libreddit currently implements most of Reddit's (signed-out) functionalities but still lacks [a few features](https://github.com/spikecodes/libreddit/issues).
|
||||
Libreddit currently implements most of Reddit's (signed-out) functionalities but still lacks [a few features](https://github.com/libreddit/libreddit/issues).
|
||||
|
||||
## How does it compare to Teddit?
|
||||
|
||||
@ -143,15 +69,15 @@ This section outlines how Libreddit compares to Reddit.
|
||||
|
||||
## Speed
|
||||
|
||||
Lasted tested Jan 17, 2021.
|
||||
Lasted tested Nov 11, 2022.
|
||||
|
||||
Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdotdevsite.appspot.com/lh/html?url=https%3A%2F%2Flibredd.it), [Reddit Report](https://lighthouse-dot-webdotdevsite.appspot.com/lh/html?url=https%3A%2F%2Fwww.reddit.com%2F)).
|
||||
Results from Google PageSpeed Insights ([Libreddit Report](https://pagespeed.web.dev/report?url=https%3A%2F%2Flibreddit.spike.codes%2F), [Reddit Report](https://pagespeed.web.dev/report?url=https://www.reddit.com)).
|
||||
|
||||
| | Libreddit | Reddit |
|
||||
|------------------------|---------------|------------|
|
||||
| Requests | 20 | 70 |
|
||||
| Resource Size (card ui)| 1,224 KiB | 1,690 KiB |
|
||||
| Time to Interactive | **1.5 s** | **11.2 s** |
|
||||
|------------------------|-------------|-----------|
|
||||
| Requests | 60 | 83 |
|
||||
| Speed Index | 2.0s | 10.4s |
|
||||
| Time to Interactive | **2.8s** | **12.4s** |
|
||||
|
||||
## Privacy
|
||||
|
||||
@ -210,21 +136,21 @@ cargo install libreddit
|
||||
|
||||
## 2) Docker
|
||||
|
||||
Deploy the [Docker image](https://hub.docker.com/r/spikecodes/libreddit) of Libreddit:
|
||||
Deploy the [Docker image](https://hub.docker.com/r/libreddit/libreddit) of Libreddit:
|
||||
```
|
||||
docker pull spikecodes/libreddit
|
||||
docker run -d --name libreddit -p 8080:8080 spikecodes/libreddit
|
||||
docker pull libreddit/libreddit
|
||||
docker run -d --name libreddit -p 8080:8080 libreddit/libreddit
|
||||
```
|
||||
|
||||
Deploy using a different port (in this case, port 80):
|
||||
```
|
||||
docker pull spikecodes/libreddit
|
||||
docker run -d --name libreddit -p 80:8080 spikecodes/libreddit
|
||||
docker pull libreddit/libreddit
|
||||
docker run -d --name libreddit -p 80:8080 libreddit/libreddit
|
||||
```
|
||||
|
||||
To deploy on `arm64` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:arm`.
|
||||
To deploy on `arm64` platforms, simply replace `libreddit/libreddit` in the commands above with `libreddit/libreddit:arm`.
|
||||
|
||||
To deploy on `armv7` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:armv7`.
|
||||
To deploy on `armv7` platforms, simply replace `libreddit/libreddit` in the commands above with `libreddit/libreddit:armv7`.
|
||||
|
||||
## 3) AUR
|
||||
|
||||
@ -236,14 +162,14 @@ yay -S libreddit-git
|
||||
|
||||
## 4) GitHub Releases
|
||||
|
||||
If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/spikecodes/libreddit/releases/latest).
|
||||
If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/libreddit/libreddit/releases/latest).
|
||||
|
||||
## 5) Replit/Heroku/Glitch
|
||||
|
||||
**Note:** These are free hosting options but they are *not* private and will monitor server usage to prevent abuse. If you need a free and easy setup, this method may work best for you.
|
||||
|
||||
<a href="https://repl.it/github/spikecodes/libreddit"><img src="https://repl.it/badge/github/spikecodes/libreddit" alt="Run on Repl.it" height="32" /></a>
|
||||
[![Deploy](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy?template=https://github.com/spikecodes/libreddit)
|
||||
<a href="https://repl.it/github/libreddit/libreddit"><img src="https://repl.it/badge/github/libreddit/libreddit" alt="Run on Repl.it" height="32" /></a>
|
||||
[![Deploy](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy?template=https://github.com/libreddit/libreddit)
|
||||
[![Remix on Glitch](https://cdn.glitch.com/2703baf2-b643-4da7-ab91-7ee2a2d00b5b%2Fremix-button-v2.svg)](https://glitch.com/edit/#!/remix/libreddit)
|
||||
|
||||
---
|
||||
@ -256,24 +182,40 @@ Once installed, deploy Libreddit to `0.0.0.0:8080` by running:
|
||||
libreddit
|
||||
```
|
||||
|
||||
## Change Default Settings
|
||||
## Instance settings
|
||||
|
||||
Assign a default value for each setting by passing environment variables to Libreddit in the format `LIBREDDIT_DEFAULT_{X}`. Replace `{X}` with the setting name (see list below) in capital letters.
|
||||
Assign a default value for each instance-specific setting by passing environment variables to Libreddit in the format `LIBREDDIT_{X}`. Replace `{X}` with the setting name (see list below) in capital letters.
|
||||
|
||||
|Name|Possible values|Default value|Description|
|
||||
|-|-|-|-|
|
||||
| `SFW_ONLY` | `["on", "off"]` | `off` | Enables SFW-only mode for the instance, i.e. all NSFW content is filtered. |
|
||||
|
||||
## Default User Settings
|
||||
|
||||
Assign a default value for each user-modifiable setting by passing environment variables to Libreddit in the format `LIBREDDIT_DEFAULT_{Y}`. Replace `{Y}` with the setting name (see list below) in capital letters.
|
||||
|
||||
| Name | Possible values | Default value |
|
||||
|-------------------------|-----------------------------------------------------------------------------------------------------|---------------|
|
||||
| `THEME` | `["system", "light", "dark", "black", "dracula", "nord", "laserwave", "violet", "gold", "rosebox"]` | `system` |
|
||||
| `THEME` | `["system", "light", "dark", "black", "dracula", "nord", "laserwave", "violet", "gold", "rosebox", "gruvboxdark", "gruvboxlight"]` | `system` |
|
||||
| `FRONT_PAGE` | `["default", "popular", "all"]` | `default` |
|
||||
| `LAYOUT` | `["card", "clean", "compact"]` | `card` |
|
||||
| `WIDE` | `["on", "off"]` | `off` |
|
||||
| `COMMENT_SORT` | `["hot", "new", "top", "rising", "controversial"]` | `hot` |
|
||||
| `POST_SORT` | `["confidence", "top", "new", "controversial", "old"]` | `confidence` |
|
||||
| `POST_SORT` | `["hot", "new", "top", "rising", "controversial"]` | `hot` |
|
||||
| `COMMENT_SORT` | `["confidence", "top", "new", "controversial", "old"]` | `confidence` |
|
||||
| `SHOW_NSFW` | `["on", "off"]` | `off` |
|
||||
| `BLUR_NSFW` | `["on", "off"]` | `off` |
|
||||
| `USE_HLS` | `["on", "off"]` | `off` |
|
||||
| `HIDE_HLS_NOTIFICATION` | `["on", "off"]` | `off` |
|
||||
| `AUTOPLAY_VIDEOS` | `["on", "off"]` | `off` |
|
||||
| `FIXED_NAVBAR` | `["on", "off"]` | `on` |
|
||||
|
||||
You can also configure Libreddit with a configuration file. An example `libreddit.toml` can be found below:
|
||||
|
||||
```toml
|
||||
LIBREDDIT_DEFAULT_WIDE = "on"
|
||||
LIBREDDIT_DEFAULT_USE_HLS = "on"
|
||||
```
|
||||
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
@ -286,7 +228,7 @@ LIBREDDIT_DEFAULT_WIDE=on LIBREDDIT_DEFAULT_THEME=dark libreddit -r
|
||||
|
||||
## Proxying using NGINX
|
||||
|
||||
**NOTE** If you're [proxying Libreddit through an NGINX Reverse Proxy](https://github.com/spikecodes/libreddit/issues/122#issuecomment-782226853), add
|
||||
**NOTE** If you're [proxying Libreddit through an NGINX Reverse Proxy](https://github.com/libreddit/libreddit/issues/122#issuecomment-782226853), add
|
||||
```nginx
|
||||
proxy_http_version 1.1;
|
||||
```
|
||||
@ -314,7 +256,7 @@ Before=nginx.service
|
||||
## Building
|
||||
|
||||
```
|
||||
git clone https://github.com/spikecodes/libreddit
|
||||
git clone https://github.com/libreddit/libreddit
|
||||
cd libreddit
|
||||
cargo run
|
||||
```
|
||||
|
6
app.json
6
app.json
@ -32,11 +32,17 @@
|
||||
"LIBREDDIT_DEFAULT_SHOW_NSFW": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_DEFAULT_BLUR_NSFW": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_USE_HLS": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_HIDE_HLS_NOTIFICATION": {
|
||||
"required": false
|
||||
},
|
||||
"LIBREDDIT_SFW_ONLY": {
|
||||
"required": false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
15
scripts/gen-credits.sh
Executable file
15
scripts/gen-credits.sh
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This scripts generates the CREDITS file in the repository root, which
|
||||
# contains a list of all contributors ot the Libreddit project.
|
||||
#
|
||||
# We use git-log to surface the names and emails of all authors and committers,
|
||||
# and grep will filter any automated commits due to GitHub.
|
||||
|
||||
set -o pipefail
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/../" || exit 1
|
||||
git --no-pager log --pretty='%an <%ae>%n%cn <%ce>' master \
|
||||
| sort -t'<' -u -k1,1 -k2,2 \
|
||||
| grep -Fv -- 'GitHub <noreply@github.com>' \
|
||||
> CREDITS
|
163
src/client.rs
163
src/client.rs
@ -1,12 +1,55 @@
|
||||
use cached::proc_macro::cached;
|
||||
use futures_lite::{future::Boxed, FutureExt};
|
||||
use hyper::{body::Buf, client, Body, Request, Response, Uri};
|
||||
use hyper::{body, body::Buf, client, header, Body, Method, Request, Response, Uri};
|
||||
use libflate::gzip;
|
||||
use percent_encoding::{percent_encode, CONTROLS};
|
||||
use serde_json::Value;
|
||||
use std::result::Result;
|
||||
use std::{io, result::Result};
|
||||
|
||||
use crate::dbg_msg;
|
||||
use crate::server::RequestExt;
|
||||
|
||||
const REDDIT_URL_BASE: &str = "https://www.reddit.com";
|
||||
|
||||
/// Gets the canonical path for a resource on Reddit. This is accomplished by
|
||||
/// making a `HEAD` request to Reddit at the path given in `path`.
|
||||
///
|
||||
/// This function returns `Ok(Some(path))`, where `path`'s value is identical
|
||||
/// to that of the value of the argument `path`, if Reddit responds to our
|
||||
/// `HEAD` request with a 2xx-family HTTP code. It will also return an
|
||||
/// `Ok(Some(String))` if Reddit responds to our `HEAD` request with a
|
||||
/// `Location` header in the response, and the HTTP code is in the 3xx-family;
|
||||
/// the `String` will contain the path as reported in `Location`. The return
|
||||
/// value is `Ok(None)` if Reddit responded with a 3xx, but did not provide a
|
||||
/// `Location` header. An `Err(String)` is returned if Reddit responds with a
|
||||
/// 429, or if we were unable to decode the value in the `Location` header.
|
||||
#[cached(size = 1024, time = 600, result = true)]
|
||||
pub async fn canonical_path(path: String) -> Result<Option<String>, String> {
|
||||
let res = reddit_head(path.clone(), true).await?;
|
||||
|
||||
if res.status() == 429 {
|
||||
return Err("Too many requests.".to_string());
|
||||
};
|
||||
|
||||
// If Reddit responds with a 2xx, then the path is already canonical.
|
||||
if res.status().to_string().starts_with('2') {
|
||||
return Ok(Some(path));
|
||||
}
|
||||
|
||||
// If Reddit responds with anything other than 3xx (except for the 2xx as
|
||||
// above), return a None.
|
||||
if !res.status().to_string().starts_with('3') {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
Ok(
|
||||
res
|
||||
.headers()
|
||||
.get(header::LOCATION)
|
||||
.map(|val| percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string()),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, String> {
|
||||
let mut url = format!("{}?{}", format, req.uri().query().unwrap_or_default());
|
||||
|
||||
@ -62,20 +105,39 @@ async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn request(url: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
/// Makes a GET request to Reddit at `path`. By default, this will honor HTTP
|
||||
/// 3xx codes Reddit returns and will automatically redirect.
|
||||
fn reddit_get(path: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
request(&Method::GET, path, true, quarantine)
|
||||
}
|
||||
|
||||
/// Makes a HEAD request to Reddit at `path`. This will not follow redirects.
|
||||
fn reddit_head(path: String, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
request(&Method::HEAD, path, false, quarantine)
|
||||
}
|
||||
|
||||
/// Makes a request to Reddit. If `redirect` is `true`, request_with_redirect
|
||||
/// will recurse on the URL that Reddit provides in the Location HTTP header
|
||||
/// in its response.
|
||||
fn request(method: &'static Method, path: String, redirect: bool, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||
// Build Reddit URL from path.
|
||||
let url = format!("{}{}", REDDIT_URL_BASE, path);
|
||||
|
||||
// Prepare the HTTPS connector.
|
||||
let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_or_http().enable_http1().build();
|
||||
|
||||
// Construct the hyper client from the HTTPS connector.
|
||||
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
|
||||
|
||||
// Build request
|
||||
// Build request to Reddit. When making a GET, request gzip compression.
|
||||
// (Reddit doesn't do brotli yet.)
|
||||
let builder = Request::builder()
|
||||
.method("GET")
|
||||
.method(method)
|
||||
.uri(&url)
|
||||
.header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION")))
|
||||
.header("Host", "www.reddit.com")
|
||||
.header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
|
||||
.header("Accept-Encoding", if method == Method::GET { "gzip" } else { "identity" })
|
||||
.header("Accept-Language", "en-US,en;q=0.5")
|
||||
.header("Connection", "keep-alive")
|
||||
.header("Cookie", if quarantine { "_options=%7B%22pref_quarantine_optin%22%3A%20true%7D" } else { "" })
|
||||
@ -84,26 +146,94 @@ fn request(url: String, quarantine: bool) -> Boxed<Result<Response<Body>, String
|
||||
async move {
|
||||
match builder {
|
||||
Ok(req) => match client.request(req).await {
|
||||
Ok(response) => {
|
||||
Ok(mut response) => {
|
||||
// Reddit may respond with a 3xx. Decide whether or not to
|
||||
// redirect based on caller params.
|
||||
if response.status().to_string().starts_with('3') {
|
||||
request(
|
||||
if !redirect {
|
||||
return Ok(response);
|
||||
};
|
||||
|
||||
return request(
|
||||
method,
|
||||
response
|
||||
.headers()
|
||||
.get("Location")
|
||||
.get(header::LOCATION)
|
||||
.map(|val| {
|
||||
let new_url = percent_encode(val.as_bytes(), CONTROLS).to_string();
|
||||
format!("{}{}raw_json=1", new_url, if new_url.contains('?') { "&" } else { "?" })
|
||||
// We need to make adjustments to the URI
|
||||
// we get back from Reddit. Namely, we
|
||||
// must:
|
||||
//
|
||||
// 1. Remove the authority (e.g.
|
||||
// https://www.reddit.com) that may be
|
||||
// present, so that we recurse on the
|
||||
// path (and query parameters) as
|
||||
// required.
|
||||
//
|
||||
// 2. Percent-encode the path.
|
||||
let new_path = percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string();
|
||||
format!("{}{}raw_json=1", new_path, if new_path.contains('?') { "&" } else { "?" })
|
||||
})
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
true,
|
||||
quarantine,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
.await;
|
||||
};
|
||||
|
||||
match response.headers().get(header::CONTENT_ENCODING) {
|
||||
// Content not compressed.
|
||||
None => Ok(response),
|
||||
|
||||
// Content encoded (hopefully with gzip).
|
||||
Some(hdr) => {
|
||||
match hdr.to_str() {
|
||||
Ok(val) => match val {
|
||||
"gzip" => {}
|
||||
"identity" => return Ok(response),
|
||||
_ => return Err("Reddit response was encoded with an unsupported compressor".to_string()),
|
||||
},
|
||||
Err(_) => return Err("Reddit response was invalid".to_string()),
|
||||
}
|
||||
|
||||
// We get here if the body is gzip-compressed.
|
||||
|
||||
// The body must be something that implements
|
||||
// std::io::Read, hence the conversion to
|
||||
// bytes::buf::Buf and then transformation into a
|
||||
// Reader.
|
||||
let mut decompressed: Vec<u8>;
|
||||
{
|
||||
let mut aggregated_body = match body::aggregate(response.body_mut()).await {
|
||||
Ok(b) => b.reader(),
|
||||
Err(e) => return Err(e.to_string()),
|
||||
};
|
||||
|
||||
let mut decoder = match gzip::Decoder::new(&mut aggregated_body) {
|
||||
Ok(decoder) => decoder,
|
||||
Err(e) => return Err(e.to_string()),
|
||||
};
|
||||
|
||||
decompressed = Vec::<u8>::new();
|
||||
if let Err(e) = io::copy(&mut decoder, &mut decompressed) {
|
||||
return Err(e.to_string());
|
||||
};
|
||||
}
|
||||
|
||||
response.headers_mut().remove(header::CONTENT_ENCODING);
|
||||
response.headers_mut().insert(header::CONTENT_LENGTH, decompressed.len().into());
|
||||
*(response.body_mut()) = Body::from(decompressed);
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
Err(e) => {
|
||||
dbg_msg!("{} {}: {}", method, path, e);
|
||||
|
||||
Err(e.to_string())
|
||||
}
|
||||
},
|
||||
Err(_) => Err("Post url contains non-ASCII characters".to_string()),
|
||||
}
|
||||
@ -114,9 +244,6 @@ fn request(url: String, quarantine: bool) -> Boxed<Result<Response<Body>, String
|
||||
// Make a request to a Reddit API and parse the JSON response
|
||||
#[cached(size = 100, time = 30, result = true)]
|
||||
pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
||||
// Build Reddit url from path
|
||||
let url = format!("https://www.reddit.com{}", path);
|
||||
|
||||
// Closure to quickly build errors
|
||||
let err = |msg: &str, e: String| -> Result<Value, String> {
|
||||
// eprintln!("{} - {}: {}", url, msg, e);
|
||||
@ -124,7 +251,7 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
||||
};
|
||||
|
||||
// Fetch the url...
|
||||
match request(url.clone(), quarantine).await {
|
||||
match reddit_get(path.clone(), quarantine).await {
|
||||
Ok(response) => {
|
||||
let status = response.status();
|
||||
|
||||
@ -142,7 +269,7 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
||||
.as_str()
|
||||
.unwrap_or_else(|| {
|
||||
json["message"].as_str().unwrap_or_else(|| {
|
||||
eprintln!("{} - Error parsing reddit error", url);
|
||||
eprintln!("{}{} - Error parsing reddit error", REDDIT_URL_BASE, path);
|
||||
"Error parsing reddit error"
|
||||
})
|
||||
})
|
||||
|
130
src/config.rs
Normal file
130
src/config.rs
Normal file
@ -0,0 +1,130 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use std::{env::var, fs::read_to_string};
|
||||
|
||||
// Waiting for https://github.com/rust-lang/rust/issues/74465 to land, so we
|
||||
// can reduce reliance on once_cell.
|
||||
//
|
||||
// This is the local static that is initialized at runtime (technically at
|
||||
// first request) and contains the instance settings.
|
||||
static CONFIG: Lazy<Config> = Lazy::new(Config::load);
|
||||
|
||||
/// Stores the configuration parsed from the environment variables and the
|
||||
/// config file. `Config::Default()` contains None for each setting.
|
||||
#[derive(Default, serde::Deserialize)]
|
||||
pub struct Config {
|
||||
#[serde(rename = "LIBREDDIT_SFW_ONLY")]
|
||||
sfw_only: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_THEME")]
|
||||
default_theme: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_FRONT_PAGE")]
|
||||
default_front_page: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_LAYOUT")]
|
||||
default_layout: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_WIDE")]
|
||||
default_wide: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_COMMENT_SORT")]
|
||||
default_comment_sort: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_POST_SORT")]
|
||||
default_post_sort: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_SHOW_NSFW")]
|
||||
default_show_nsfw: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_BLUR_NSFW")]
|
||||
default_blur_nsfw: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_USE_HLS")]
|
||||
default_use_hls: Option<String>,
|
||||
|
||||
#[serde(rename = "LIBREDDIT_DEFAULT_HIDE_HLS_NOTIFICATION")]
|
||||
default_hide_hls_notification: Option<String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Load the configuration from the environment variables and the config file.
|
||||
/// In the case that there are no environment variables set and there is no
|
||||
/// config file, this function returns a Config that contains all None values.
|
||||
pub fn load() -> Self {
|
||||
// Read from libreddit.toml config file. If for any reason, it fails, the
|
||||
// default `Config` is used (all None values)
|
||||
let config: Config = toml::from_str(&read_to_string("libreddit.toml").unwrap_or_default()).unwrap_or_default();
|
||||
// This function defines the order of preference - first check for
|
||||
// environment variables with "LIBREDDIT", then check the config, then if
|
||||
// both are `None`, return a `None` via the `map_or_else` function
|
||||
let parse = |key: &str| -> Option<String> { var(key).ok().map_or_else(|| get_setting_from_config(key, &config), Some) };
|
||||
Self {
|
||||
sfw_only: parse("LIBREDDIT_SFW_ONLY"),
|
||||
default_theme: parse("LIBREDDIT_DEFAULT_THEME"),
|
||||
default_front_page: parse("LIBREDDIT_DEFAULT_FRONT_PAGE"),
|
||||
default_layout: parse("LIBREDDIT_DEFAULT_LAYOUT"),
|
||||
default_post_sort: parse("LIBREDDIT_DEFAULT_POST_SORT"),
|
||||
default_wide: parse("LIBREDDIT_DEFAULT_WIDE"),
|
||||
default_comment_sort: parse("LIBREDDIT_DEFAULT_COMMENT_SORT"),
|
||||
default_show_nsfw: parse("LIBREDDIT_DEFAULT_SHOW_NSFW"),
|
||||
default_blur_nsfw: parse("LIBREDDIT_DEFAULT_BLUR_NSFW"),
|
||||
default_use_hls: parse("LIBREDDIT_DEFAULT_USE_HLS"),
|
||||
default_hide_hls_notification: parse("LIBREDDIT_DEFAULT_HIDE_HLS"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_setting_from_config(name: &str, config: &Config) -> Option<String> {
|
||||
match name {
|
||||
"LIBREDDIT_SFW_ONLY" => config.sfw_only.clone(),
|
||||
"LIBREDDIT_DEFAULT_THEME" => config.default_theme.clone(),
|
||||
"LIBREDDIT_DEFAULT_FRONT_PAGE" => config.default_front_page.clone(),
|
||||
"LIBREDDIT_DEFAULT_LAYOUT" => config.default_layout.clone(),
|
||||
"LIBREDDIT_DEFAULT_COMMENT_SORT" => config.default_comment_sort.clone(),
|
||||
"LIBREDDIT_DEFAULT_POST_SORT" => config.default_post_sort.clone(),
|
||||
"LIBREDDIT_DEFAULT_SHOW_NSFW" => config.default_show_nsfw.clone(),
|
||||
"LIBREDDIT_DEFAULT_BLUR_NSFW" => config.default_blur_nsfw.clone(),
|
||||
"LIBREDDIT_DEFAULT_USE_HLS" => config.default_use_hls.clone(),
|
||||
"LIBREDDIT_DEFAULT_HIDE_HLS_NOTIFICATION" => config.default_hide_hls_notification.clone(),
|
||||
"LIBREDDIT_DEFAULT_WIDE" => config.default_wide.clone(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves setting from environment variable or config file.
|
||||
pub(crate) fn get_setting(name: &str) -> Option<String> {
|
||||
get_setting_from_config(name, &CONFIG)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
use {sealed_test::prelude::*, std::fs::write};
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("LIBREDDIT_SFW_ONLY", "on")])]
|
||||
fn test_env_var() {
|
||||
assert!(crate::utils::sfw_only())
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test]
|
||||
fn test_config() {
|
||||
let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#;
|
||||
write("libreddit.toml", config_to_write).unwrap();
|
||||
assert_eq!(get_setting("LIBREDDIT_DEFAULT_COMMENT_SORT"), Some("best".into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("LIBREDDIT_DEFAULT_COMMENT_SORT", "top")])]
|
||||
fn test_env_config_precedence() {
|
||||
let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#;
|
||||
write("libreddit.toml", config_to_write).unwrap();
|
||||
assert_eq!(get_setting("LIBREDDIT_DEFAULT_COMMENT_SORT"), Some("top".into()))
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[sealed_test(env = [("LIBREDDIT_DEFAULT_COMMENT_SORT", "top")])]
|
||||
fn test_alt_env_config_precedence() {
|
||||
let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#;
|
||||
write("libreddit.toml", config_to_write).unwrap();
|
||||
assert_eq!(get_setting("LIBREDDIT_DEFAULT_COMMENT_SORT"), Some("top".into()))
|
||||
}
|
236
src/duplicates.rs
Normal file
236
src/duplicates.rs
Normal file
@ -0,0 +1,236 @@
|
||||
// Handler for post duplicates.
|
||||
|
||||
use crate::client::json;
|
||||
use crate::server::RequestExt;
|
||||
use crate::subreddit::{can_access_quarantine, quarantine};
|
||||
use crate::utils::{error, filter_posts, get_filters, nsfw_landing, parse_post, setting, template, Post, Preferences};
|
||||
|
||||
use askama::Template;
|
||||
use hyper::{Body, Request, Response};
|
||||
use serde_json::Value;
|
||||
use std::borrow::ToOwned;
|
||||
use std::collections::HashSet;
|
||||
use std::vec::Vec;
|
||||
|
||||
/// DuplicatesParams contains the parameters in the URL.
|
||||
struct DuplicatesParams {
|
||||
before: String,
|
||||
after: String,
|
||||
sort: String,
|
||||
}
|
||||
|
||||
/// DuplicatesTemplate defines an Askama template for rendering duplicate
|
||||
/// posts.
|
||||
#[derive(Template)]
|
||||
#[template(path = "duplicates.html")]
|
||||
struct DuplicatesTemplate {
|
||||
/// params contains the relevant request parameters.
|
||||
params: DuplicatesParams,
|
||||
|
||||
/// post is the post whose ID is specified in the reqeust URL. Note that
|
||||
/// this is not necessarily the "original" post.
|
||||
post: Post,
|
||||
|
||||
/// duplicates is the list of posts that, per Reddit, are duplicates of
|
||||
/// Post above.
|
||||
duplicates: Vec<Post>,
|
||||
|
||||
/// prefs are the user preferences.
|
||||
prefs: Preferences,
|
||||
|
||||
/// url is the request URL.
|
||||
url: String,
|
||||
|
||||
/// num_posts_filtered counts how many posts were filtered from the
|
||||
/// duplicates list.
|
||||
num_posts_filtered: u64,
|
||||
|
||||
/// all_posts_filtered is true if every duplicate was filtered. This is an
|
||||
/// edge case but can still happen.
|
||||
all_posts_filtered: bool,
|
||||
}
|
||||
|
||||
/// Make the GET request to Reddit. It assumes `req` is the appropriate Reddit
|
||||
/// REST endpoint for enumerating post duplicates.
|
||||
pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default());
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
let quarantined = can_access_quarantine(&req, &sub);
|
||||
|
||||
// Log the request in debugging mode
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(req.param("id").unwrap_or_default());
|
||||
|
||||
// Send the GET, and await JSON.
|
||||
match json(path, quarantined).await {
|
||||
// Process response JSON.
|
||||
Ok(response) => {
|
||||
let post = parse_post(&response[0]["data"]["children"][0]).await;
|
||||
|
||||
// Return landing page if this post if this Reddit deems this post
|
||||
// NSFW, but we have also disabled the display of NSFW content
|
||||
// or if the instance is SFW-only.
|
||||
if post.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) {
|
||||
return Ok(nsfw_landing(req).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let filters = get_filters(&req);
|
||||
let (duplicates, num_posts_filtered, all_posts_filtered) = parse_duplicates(&response[1], &filters).await;
|
||||
|
||||
// These are the values for the "before=", "after=", and "sort="
|
||||
// query params, respectively.
|
||||
let mut before: String = String::new();
|
||||
let mut after: String = String::new();
|
||||
let mut sort: String = String::new();
|
||||
|
||||
// FIXME: We have to perform a kludge to work around a Reddit API
|
||||
// bug.
|
||||
//
|
||||
// The JSON object in "data" will never contain a "before" value so
|
||||
// it is impossible to use it to determine our position in a
|
||||
// listing. We'll make do by getting the ID of the first post in
|
||||
// the listing, setting that as our "before" value, and ask Reddit
|
||||
// to give us a batch of duplicate posts up to that post.
|
||||
//
|
||||
// Likewise, if we provide a "before" request in the GET, the
|
||||
// result won't have an "after" in the JSON, in addition to missing
|
||||
// the "before." So we will have to use the final post in the list
|
||||
// of duplicates.
|
||||
//
|
||||
// That being said, we'll also need to capture the value of the
|
||||
// "sort=" parameter as well, so we will need to inspect the
|
||||
// query key-value pairs anyway.
|
||||
let l = duplicates.len();
|
||||
if l > 0 {
|
||||
// This gets set to true if "before=" is one of the GET params.
|
||||
let mut have_before: bool = false;
|
||||
|
||||
// This gets set to true if "after=" is one of the GET params.
|
||||
let mut have_after: bool = false;
|
||||
|
||||
// Inspect the query key-value pairs. We will need to record
|
||||
// the value of "sort=", along with checking to see if either
|
||||
// one of "before=" or "after=" are given.
|
||||
//
|
||||
// If we're in the middle of the batch (evidenced by the
|
||||
// presence of a "before=" or "after=" parameter in the GET),
|
||||
// then use the first post as the "before" reference.
|
||||
//
|
||||
// We'll do this iteratively. Better than with .map_or()
|
||||
// since a closure will continue to operate on remaining
|
||||
// elements even after we've determined one of "before=" or
|
||||
// "after=" (or both) are in the GET request.
|
||||
//
|
||||
// In practice, here should only ever be one of "before=" or
|
||||
// "after=" and never both.
|
||||
let query_str = req.uri().query().unwrap_or_default().to_string();
|
||||
|
||||
if !query_str.is_empty() {
|
||||
for param in query_str.split('&') {
|
||||
let kv: Vec<&str> = param.split('=').collect();
|
||||
if kv.len() < 2 {
|
||||
// Reject invalid query parameter.
|
||||
continue;
|
||||
}
|
||||
|
||||
let key: &str = kv[0];
|
||||
match key {
|
||||
"before" => have_before = true,
|
||||
"after" => have_after = true,
|
||||
"sort" => {
|
||||
let val: &str = kv[1];
|
||||
match val {
|
||||
"new" | "num_comments" => sort = val.to_string(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if have_after {
|
||||
before = "t3_".to_owned();
|
||||
before.push_str(&duplicates[0].id);
|
||||
}
|
||||
|
||||
// Address potentially missing "after". If "before=" is in the
|
||||
// GET, then "after" will be null in the JSON (see FIXME
|
||||
// above).
|
||||
if have_before {
|
||||
// The next batch will need to start from one after the
|
||||
// last post in the current batch.
|
||||
after = "t3_".to_owned();
|
||||
after.push_str(&duplicates[l - 1].id);
|
||||
|
||||
// Here is where things get terrible. Notice that we
|
||||
// haven't set `before`. In order to do so, we will
|
||||
// need to know if there is a batch that exists before
|
||||
// this one, and doing so requires actually fetching the
|
||||
// previous batch. In other words, we have to do yet one
|
||||
// more GET to Reddit. There is no other way to determine
|
||||
// whether or not to define `before`.
|
||||
//
|
||||
// We'll mitigate that by requesting at most one duplicate.
|
||||
let new_path: String = format!(
|
||||
"{}.json?before=t3_{}&sort={}&limit=1&raw_json=1",
|
||||
req.uri().path(),
|
||||
&duplicates[0].id,
|
||||
if sort.is_empty() { "num_comments".to_string() } else { sort.clone() }
|
||||
);
|
||||
match json(new_path, true).await {
|
||||
Ok(response) => {
|
||||
if !response[1]["data"]["children"].as_array().unwrap_or(&Vec::new()).is_empty() {
|
||||
before = "t3_".to_owned();
|
||||
before.push_str(&duplicates[0].id);
|
||||
}
|
||||
}
|
||||
Err(msg) => {
|
||||
// Abort entirely if we couldn't get the previous
|
||||
// batch.
|
||||
return error(req, msg).await;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
after = response[1]["data"]["after"].as_str().unwrap_or_default().to_string();
|
||||
}
|
||||
}
|
||||
let url = req.uri().to_string();
|
||||
|
||||
template(DuplicatesTemplate {
|
||||
params: DuplicatesParams { before, after, sort },
|
||||
post,
|
||||
duplicates,
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
num_posts_filtered,
|
||||
all_posts_filtered,
|
||||
})
|
||||
}
|
||||
|
||||
// Process error.
|
||||
Err(msg) => {
|
||||
if msg == "quarantined" {
|
||||
let sub = req.param("sub").unwrap_or_default();
|
||||
quarantine(req, sub)
|
||||
} else {
|
||||
error(req, msg).await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// DUPLICATES
|
||||
async fn parse_duplicates(json: &serde_json::Value, filters: &HashSet<String>) -> (Vec<Post>, u64, bool) {
|
||||
let post_duplicates: &Vec<Value> = &json["data"]["children"].as_array().map_or(Vec::new(), ToOwned::to_owned);
|
||||
let mut duplicates: Vec<Post> = Vec::new();
|
||||
|
||||
// Process each post and place them in the Vec<Post>.
|
||||
for val in post_duplicates.iter() {
|
||||
let post: Post = parse_post(val).await;
|
||||
duplicates.push(post);
|
||||
}
|
||||
|
||||
let (num_posts_filtered, all_posts_filtered) = filter_posts(&mut duplicates, filters);
|
||||
(duplicates, num_posts_filtered, all_posts_filtered)
|
||||
}
|
57
src/main.rs
57
src/main.rs
@ -3,6 +3,8 @@
|
||||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
// Reference local files
|
||||
mod config;
|
||||
mod duplicates;
|
||||
mod post;
|
||||
mod search;
|
||||
mod settings;
|
||||
@ -11,13 +13,13 @@ mod user;
|
||||
mod utils;
|
||||
|
||||
// Import Crates
|
||||
use clap::{Arg, Command};
|
||||
use clap::{Arg, ArgAction, Command};
|
||||
|
||||
use futures_lite::FutureExt;
|
||||
use hyper::{header::HeaderValue, Body, Request, Response};
|
||||
|
||||
mod client;
|
||||
use client::proxy;
|
||||
use client::{canonical_path, proxy};
|
||||
use server::RequestExt;
|
||||
use utils::{error, redirect, ThemeAssets};
|
||||
|
||||
@ -112,7 +114,7 @@ async fn main() {
|
||||
.short('r')
|
||||
.long("redirect-https")
|
||||
.help("Redirect all HTTP requests to HTTPS (no longer functional)")
|
||||
.takes_value(false),
|
||||
.num_args(0),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("address")
|
||||
@ -121,16 +123,18 @@ async fn main() {
|
||||
.value_name("ADDRESS")
|
||||
.help("Sets address to listen on")
|
||||
.default_value("0.0.0.0")
|
||||
.takes_value(true),
|
||||
.num_args(1),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("port")
|
||||
.short('p')
|
||||
.long("port")
|
||||
.value_name("PORT")
|
||||
.env("PORT")
|
||||
.help("Port to listen on")
|
||||
.default_value("8080")
|
||||
.takes_value(true),
|
||||
.action(ArgAction::Set)
|
||||
.num_args(1),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("hsts")
|
||||
@ -139,15 +143,15 @@ async fn main() {
|
||||
.value_name("EXPIRE_TIME")
|
||||
.help("HSTS header to tell browsers that this site should only be accessed over HTTPS")
|
||||
.default_value("604800")
|
||||
.takes_value(true),
|
||||
.num_args(1),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
let address = matches.value_of("address").unwrap_or("0.0.0.0");
|
||||
let port = std::env::var("PORT").unwrap_or_else(|_| matches.value_of("port").unwrap_or("8080").to_string());
|
||||
let hsts = matches.value_of("hsts");
|
||||
let address = matches.get_one::<String>("address").unwrap();
|
||||
let port = matches.get_one::<String>("port").unwrap();
|
||||
let hsts = matches.get_one("hsts").map(|m: &String| m.as_str());
|
||||
|
||||
let listener = [address, ":", &port].concat();
|
||||
let listener = [address, ":", port].concat();
|
||||
|
||||
println!("Starting Libreddit...");
|
||||
|
||||
@ -238,6 +242,16 @@ async fn main() {
|
||||
app.at("/r/:sub/comments/:id").get(|r| post::item(r).boxed());
|
||||
app.at("/r/:sub/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/r/:sub/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/comments").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/comments/:comment_id").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||
app.at("/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||
|
||||
app.at("/r/:sub/duplicates/:id").get(|r| duplicates::item(r).boxed());
|
||||
app.at("/r/:sub/duplicates/:id/:title").get(|r| duplicates::item(r).boxed());
|
||||
app.at("/duplicates/:id").get(|r| duplicates::item(r).boxed());
|
||||
app.at("/duplicates/:id/:title").get(|r| duplicates::item(r).boxed());
|
||||
|
||||
app.at("/r/:sub/search").get(|r| search::find(r).boxed());
|
||||
|
||||
@ -254,9 +268,6 @@ async fn main() {
|
||||
|
||||
app.at("/r/:sub/:sort").get(|r| subreddit::community(r).boxed());
|
||||
|
||||
// Comments handler
|
||||
app.at("/comments/:id").get(|r| post::item(r).boxed());
|
||||
|
||||
// Front page
|
||||
app.at("/").get(|r| subreddit::community(r).boxed());
|
||||
|
||||
@ -274,13 +285,25 @@ async fn main() {
|
||||
// Handle about pages
|
||||
app.at("/about").get(|req| error(req, "About pages aren't added yet".to_string()).boxed());
|
||||
|
||||
app.at("/:id").get(|req: Request<Body>| match req.param("id").as_deref() {
|
||||
app.at("/:id").get(|req: Request<Body>| {
|
||||
Box::pin(async move {
|
||||
match req.param("id").as_deref() {
|
||||
// Sort front page
|
||||
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).boxed(),
|
||||
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await,
|
||||
|
||||
// Short link for post
|
||||
Some(id) if id.len() > 4 && id.len() < 7 => post::item(req).boxed(),
|
||||
Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/{}", id)).await {
|
||||
Ok(path_opt) => match path_opt {
|
||||
Some(path) => Ok(redirect(path)),
|
||||
None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,
|
||||
},
|
||||
Err(e) => error(req, e).await,
|
||||
},
|
||||
|
||||
// Error message for unknown pages
|
||||
_ => error(req, "Nothing here".to_string()).boxed(),
|
||||
_ => error(req, "Nothing here".to_string()).await,
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
// Default service in case no routes match
|
||||
|
111
src/post.rs
111
src/post.rs
@ -3,7 +3,7 @@ use crate::client::json;
|
||||
use crate::server::RequestExt;
|
||||
use crate::subreddit::{can_access_quarantine, quarantine};
|
||||
use crate::utils::{
|
||||
error, format_num, format_url, get_filters, param, rewrite_urls, setting, template, time, val, Author, Awards, Comment, Flags, Flair, FlairPart, Media, Post, Preferences,
|
||||
error, format_num, get_filters, nsfw_landing, param, parse_post, rewrite_urls, setting, template, time, val, Author, Awards, Comment, Flair, FlairPart, Post, Preferences,
|
||||
};
|
||||
use hyper::{Body, Request, Response};
|
||||
|
||||
@ -54,8 +54,16 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// Otherwise, grab the JSON output from the request
|
||||
Ok(response) => {
|
||||
// Parse the JSON into Post and Comment structs
|
||||
let post = parse_post(&response[0]).await;
|
||||
let comments = parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req));
|
||||
let post = parse_post(&response[0]["data"]["children"][0]).await;
|
||||
|
||||
// Return landing page if this post if this Reddit deems this post
|
||||
// NSFW, but we have also disabled the display of NSFW content
|
||||
// or if the instance is SFW-only.
|
||||
if post.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) {
|
||||
return Ok(nsfw_landing(req).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let comments = parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req), &req);
|
||||
let url = req.uri().to_string();
|
||||
|
||||
// Use the Post and Comment structs to generate a website to show users
|
||||
@ -63,7 +71,7 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
comments,
|
||||
post,
|
||||
sort,
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
single_thread,
|
||||
url,
|
||||
})
|
||||
@ -80,94 +88,8 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
}
|
||||
}
|
||||
|
||||
// POSTS
|
||||
async fn parse_post(json: &serde_json::Value) -> Post {
|
||||
// Retrieve post (as opposed to comments) from JSON
|
||||
let post: &serde_json::Value = &json["data"]["children"][0];
|
||||
|
||||
// Grab UTC time as unix timestamp
|
||||
let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default());
|
||||
// Parse post score and upvote ratio
|
||||
let score = post["data"]["score"].as_i64().unwrap_or_default();
|
||||
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||
|
||||
// Determine the type of media along with the media URL
|
||||
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
||||
|
||||
let awards: Awards = Awards::parse(&post["data"]["all_awardings"]);
|
||||
|
||||
let permalink = val(post, "permalink");
|
||||
|
||||
let body = if val(post, "removed_by_category") == "moderator" {
|
||||
format!(
|
||||
"<div class=\"md\"><p>[removed] — <a href=\"https://www.reveddit.com{}\">view removed post</a></p></div>",
|
||||
permalink
|
||||
)
|
||||
} else {
|
||||
rewrite_urls(&val(post, "selftext_html"))
|
||||
};
|
||||
|
||||
// Build a post using data parsed from Reddit post API
|
||||
Post {
|
||||
id: val(post, "id"),
|
||||
title: val(post, "title"),
|
||||
community: val(post, "subreddit"),
|
||||
body,
|
||||
author: Author {
|
||||
name: val(post, "author"),
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
post["data"]["author_flair_type"].as_str().unwrap_or_default(),
|
||||
post["data"]["author_flair_richtext"].as_array(),
|
||||
post["data"]["author_flair_text"].as_str(),
|
||||
),
|
||||
text: val(post, "link_flair_text"),
|
||||
background_color: val(post, "author_flair_background_color"),
|
||||
foreground_color: val(post, "author_flair_text_color"),
|
||||
},
|
||||
distinguished: val(post, "distinguished"),
|
||||
},
|
||||
permalink,
|
||||
score: format_num(score),
|
||||
upvote_ratio: ratio as i64,
|
||||
post_type,
|
||||
media,
|
||||
thumbnail: Media {
|
||||
url: format_url(val(post, "thumbnail").as_str()),
|
||||
alt_url: String::new(),
|
||||
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||
poster: "".to_string(),
|
||||
},
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
post["data"]["link_flair_type"].as_str().unwrap_or_default(),
|
||||
post["data"]["link_flair_richtext"].as_array(),
|
||||
post["data"]["link_flair_text"].as_str(),
|
||||
),
|
||||
text: val(post, "link_flair_text"),
|
||||
background_color: val(post, "link_flair_background_color"),
|
||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
} else {
|
||||
"white".to_string()
|
||||
},
|
||||
},
|
||||
flags: Flags {
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
||||
},
|
||||
domain: val(post, "domain"),
|
||||
rel_time,
|
||||
created,
|
||||
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
|
||||
gallery,
|
||||
awards,
|
||||
}
|
||||
}
|
||||
|
||||
// COMMENTS
|
||||
fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str, filters: &HashSet<String>) -> Vec<Comment> {
|
||||
fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str, filters: &HashSet<String>, req: &Request<Body>) -> Vec<Comment> {
|
||||
// Parse the comment JSON into a Vector of Comments
|
||||
let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned);
|
||||
|
||||
@ -187,7 +109,7 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str,
|
||||
|
||||
// If this comment contains replies, handle those too
|
||||
let replies: Vec<Comment> = if data["replies"].is_object() {
|
||||
parse_comments(&data["replies"], post_link, post_author, highlighted_comment, filters)
|
||||
parse_comments(&data["replies"], post_link, post_author, highlighted_comment, filters, req)
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
@ -200,9 +122,9 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str,
|
||||
let id = val(&comment, "id");
|
||||
let highlighted = id == highlighted_comment;
|
||||
|
||||
let body = if val(&comment, "author") == "[deleted]" && val(&comment, "body") == "[removed]" {
|
||||
let body = if (val(&comment, "author") == "[deleted]" && val(&comment, "body") == "[removed]") || val(&comment, "body") == "[ Removed by Reddit ]" {
|
||||
format!(
|
||||
"<div class=\"md\"><p>[removed] — <a href=\"https://www.reveddit.com{}{}\">view removed comment</a></p></div>",
|
||||
"<div class=\"md\"><p>[removed] — <a href=\"https://www.unddit.com{}{}\">view removed comment</a></p></div>",
|
||||
post_link, id
|
||||
)
|
||||
} else {
|
||||
@ -255,6 +177,7 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str,
|
||||
awards,
|
||||
collapsed,
|
||||
is_filtered,
|
||||
prefs: Preferences::new(req),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
|
@ -1,5 +1,5 @@
|
||||
// CRATES
|
||||
use crate::utils::{catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences};
|
||||
use crate::utils::{self, catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences};
|
||||
use crate::{
|
||||
client::json,
|
||||
subreddit::{can_access_quarantine, quarantine},
|
||||
@ -7,6 +7,8 @@ use crate::{
|
||||
};
|
||||
use askama::Template;
|
||||
use hyper::{Body, Request, Response};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
// STRUCTS
|
||||
struct SearchParams {
|
||||
@ -42,13 +44,25 @@ struct SearchTemplate {
|
||||
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||
/// and all fetched posts being filtered).
|
||||
all_posts_filtered: bool,
|
||||
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||
all_posts_hidden_nsfw: bool,
|
||||
no_posts: bool,
|
||||
}
|
||||
|
||||
// Regex matched against search queries to determine if they are reddit urls.
|
||||
static REDDIT_URL_MATCH: Lazy<Regex> = Lazy::new(|| Regex::new(r"^https?://([^\./]+\.)*reddit.com/").unwrap());
|
||||
|
||||
// SERVICES
|
||||
pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let nsfw_results = if setting(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" };
|
||||
// This ensures that during a search, no NSFW posts are fetched at all
|
||||
let nsfw_results = if setting(&req, "show_nsfw") == "on" && !utils::sfw_only() {
|
||||
"&include_over_18=on"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
let path = format!("{}.json?{}{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
|
||||
let query = param(&path, "q").unwrap_or_default();
|
||||
let mut query = param(&path, "q").unwrap_or_default();
|
||||
query = REDDIT_URL_MATCH.replace(&query, "").to_string();
|
||||
|
||||
if query.is_empty() {
|
||||
return Ok(redirect("/".to_string()));
|
||||
@ -96,16 +110,19 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
|
||||
typed,
|
||||
},
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
is_filtered: true,
|
||||
all_posts_filtered: false,
|
||||
all_posts_hidden_nsfw: false,
|
||||
no_posts: false,
|
||||
})
|
||||
} else {
|
||||
match Post::fetch(&path, quarantined).await {
|
||||
Ok((mut posts, after)) => {
|
||||
let all_posts_filtered = filter_posts(&mut posts, &filters);
|
||||
|
||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||
let no_posts = posts.is_empty();
|
||||
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
||||
template(SearchTemplate {
|
||||
posts,
|
||||
subreddits,
|
||||
@ -119,10 +136,12 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
|
||||
typed,
|
||||
},
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
is_filtered: false,
|
||||
all_posts_filtered,
|
||||
all_posts_hidden_nsfw,
|
||||
no_posts,
|
||||
})
|
||||
}
|
||||
Err(msg) => {
|
||||
|
569
src/server.rs
569
src/server.rs
@ -1,17 +1,80 @@
|
||||
use brotli::enc::{BrotliCompress, BrotliEncoderParams};
|
||||
use cached::proc_macro::cached;
|
||||
use cookie::Cookie;
|
||||
use core::f64;
|
||||
use futures_lite::{future::Boxed, Future, FutureExt};
|
||||
use hyper::{
|
||||
header::HeaderValue,
|
||||
body,
|
||||
body::HttpBody,
|
||||
header,
|
||||
service::{make_service_fn, service_fn},
|
||||
HeaderMap,
|
||||
};
|
||||
use hyper::{Body, Method, Request, Response, Server as HyperServer};
|
||||
use libflate::gzip;
|
||||
use route_recognizer::{Params, Router};
|
||||
use std::{pin::Pin, result::Result};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
io,
|
||||
pin::Pin,
|
||||
result::Result,
|
||||
str::{from_utf8, Split},
|
||||
string::ToString,
|
||||
};
|
||||
use time::Duration;
|
||||
|
||||
use crate::dbg_msg;
|
||||
|
||||
type BoxResponse = Pin<Box<dyn Future<Output = Result<Response<Body>, String>> + Send>>;
|
||||
|
||||
/// Compressors for the response Body, in ascending order of preference.
|
||||
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
||||
enum CompressionType {
|
||||
Passthrough,
|
||||
Gzip,
|
||||
Brotli,
|
||||
}
|
||||
|
||||
/// All browsers support gzip, so if we are given `Accept-Encoding: *`, deliver
|
||||
/// gzipped-content.
|
||||
///
|
||||
/// Brotli would be nice universally, but Safari (iOS, iPhone, macOS) reportedly
|
||||
/// doesn't support it yet.
|
||||
const DEFAULT_COMPRESSOR: CompressionType = CompressionType::Gzip;
|
||||
|
||||
impl CompressionType {
|
||||
/// Returns a `CompressionType` given a content coding
|
||||
/// in [RFC 7231](https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.4)
|
||||
/// format.
|
||||
fn parse(s: &str) -> Option<CompressionType> {
|
||||
let c = match s {
|
||||
// Compressors we support.
|
||||
"gzip" => CompressionType::Gzip,
|
||||
"br" => CompressionType::Brotli,
|
||||
|
||||
// The wildcard means that we can choose whatever
|
||||
// compression we prefer. In this case, use the
|
||||
// default.
|
||||
"*" => DEFAULT_COMPRESSOR,
|
||||
|
||||
// Compressor not supported.
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(c)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for CompressionType {
|
||||
fn to_string(&self) -> String {
|
||||
match self {
|
||||
CompressionType::Gzip => "gzip".to_string(),
|
||||
CompressionType::Brotli => "br".to_string(),
|
||||
_ => String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Route<'a> {
|
||||
router: &'a mut Router<fn(Request<Body>) -> BoxResponse>,
|
||||
path: String,
|
||||
@ -97,7 +160,7 @@ impl ResponseExt for Response<Body> {
|
||||
}
|
||||
|
||||
fn insert_cookie(&mut self, cookie: Cookie) {
|
||||
if let Ok(val) = HeaderValue::from_str(&cookie.to_string()) {
|
||||
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||
self.headers_mut().append("Set-Cookie", val);
|
||||
}
|
||||
}
|
||||
@ -106,7 +169,7 @@ impl ResponseExt for Response<Body> {
|
||||
let mut cookie = Cookie::named(name);
|
||||
cookie.set_path("/");
|
||||
cookie.set_max_age(Duration::seconds(1));
|
||||
if let Ok(val) = HeaderValue::from_str(&cookie.to_string()) {
|
||||
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||
self.headers_mut().append("Set-Cookie", val);
|
||||
}
|
||||
}
|
||||
@ -156,7 +219,8 @@ impl Server {
|
||||
// let shared_router = router.clone();
|
||||
async move {
|
||||
Ok::<_, String>(service_fn(move |req: Request<Body>| {
|
||||
let headers = default_headers.clone();
|
||||
let req_headers = req.headers().clone();
|
||||
let def_headers = default_headers.clone();
|
||||
|
||||
// Remove double slashes and decode encoded slashes
|
||||
let mut path = req.uri().path().replace("//", "/").replace("%2F", "/");
|
||||
@ -176,26 +240,20 @@ impl Server {
|
||||
// Run the route's function
|
||||
let func = (found.handler().to_owned().to_owned())(parammed);
|
||||
async move {
|
||||
let res: Result<Response<Body>, String> = func.await;
|
||||
// Add default headers to response
|
||||
res.map(|mut response| {
|
||||
response.headers_mut().extend(headers);
|
||||
response
|
||||
})
|
||||
match func.await {
|
||||
Ok(mut res) => {
|
||||
res.headers_mut().extend(def_headers);
|
||||
let _ = compress_response(&req_headers, &mut res).await;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await,
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
// If there was a routing error
|
||||
Err(e) => async move {
|
||||
// Return a 404 error
|
||||
let res: Result<Response<Body>, String> = Ok(Response::builder().status(404).body(e.into()).unwrap_or_default());
|
||||
// Add default headers to response
|
||||
res.map(|mut response| {
|
||||
response.headers_mut().extend(headers);
|
||||
response
|
||||
})
|
||||
}
|
||||
.boxed(),
|
||||
Err(e) => async move { new_boilerplate(def_headers, req_headers, 404, e.into()).await }.boxed(),
|
||||
}
|
||||
}))
|
||||
}
|
||||
@ -213,3 +271,472 @@ impl Server {
|
||||
server.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a boilerplate Response for error conditions. This response will be
|
||||
/// compressed if requested by client.
|
||||
async fn new_boilerplate(
|
||||
default_headers: HeaderMap<header::HeaderValue>,
|
||||
req_headers: HeaderMap<header::HeaderValue>,
|
||||
status: u16,
|
||||
body: Body,
|
||||
) -> Result<Response<Body>, String> {
|
||||
match Response::builder().status(status).body(body) {
|
||||
Ok(mut res) => {
|
||||
let _ = compress_response(&req_headers, &mut res).await;
|
||||
|
||||
res.headers_mut().extend(default_headers.clone());
|
||||
Ok(res)
|
||||
}
|
||||
Err(msg) => Err(msg.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines the desired compressor based on the Accept-Encoding header.
|
||||
///
|
||||
/// This function will honor the [q-value](https://developer.mozilla.org/en-US/docs/Glossary/Quality_values)
|
||||
/// for each compressor. The q-value is an optional parameter, a decimal value
|
||||
/// on \[0..1\], to order the compressors by preference. An Accept-Encoding value
|
||||
/// with no q-values is also accepted.
|
||||
///
|
||||
/// Here are [examples](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding#examples)
|
||||
/// of valid Accept-Encoding headers.
|
||||
///
|
||||
/// ```http
|
||||
/// Accept-Encoding: gzip
|
||||
/// Accept-Encoding: gzip, compress, br
|
||||
/// Accept-Encoding: br;q=1.0, gzip;q=0.8, *;q=0.1
|
||||
/// ```
|
||||
#[cached]
|
||||
fn determine_compressor(accept_encoding: String) -> Option<CompressionType> {
|
||||
if accept_encoding.is_empty() {
|
||||
return None;
|
||||
};
|
||||
|
||||
// Keep track of the compressor candidate based on both the client's
|
||||
// preference and our own. Concrete examples:
|
||||
//
|
||||
// 1. "Accept-Encoding: gzip, br" => assuming we like brotli more than
|
||||
// gzip, and the browser supports brotli, we choose brotli
|
||||
//
|
||||
// 2. "Accept-Encoding: gzip;q=0.8, br;q=0.3" => the client has stated a
|
||||
// preference for gzip over brotli, so we choose gzip
|
||||
//
|
||||
// To do this, we need to define a struct which contains the requested
|
||||
// requested compressor (abstracted as a CompressionType enum) and the
|
||||
// q-value. If no q-value is defined for the compressor, we assume one of
|
||||
// 1.0. We first compare compressor candidates by comparing q-values, and
|
||||
// then CompressionTypes. We keep track of whatever is the greatest per our
|
||||
// ordering.
|
||||
|
||||
struct CompressorCandidate {
|
||||
alg: CompressionType,
|
||||
q: f64,
|
||||
}
|
||||
|
||||
impl Ord for CompressorCandidate {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
// Compare q-values. Break ties with the
|
||||
// CompressionType values.
|
||||
|
||||
match self.q.total_cmp(&other.q) {
|
||||
Ordering::Equal => self.alg.cmp(&other.alg),
|
||||
ord => ord,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for CompressorCandidate {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
// Guard against NAN, both on our end and on the other.
|
||||
if self.q.is_nan() || other.q.is_nan() {
|
||||
return None;
|
||||
};
|
||||
|
||||
// f64 and CompressionType are ordered, except in the case
|
||||
// where the f64 is NAN (which we checked against), so we
|
||||
// can safely return a Some here.
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for CompressorCandidate {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
(self.q == other.q) && (self.alg == other.alg)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for CompressorCandidate {}
|
||||
|
||||
// This is the current candidate.
|
||||
//
|
||||
// Assmume no candidate so far. We do this by assigning the sentinel value
|
||||
// of negative infinity to the q-value. If this value is negative infinity,
|
||||
// that means there was no viable compressor candidate.
|
||||
let mut cur_candidate = CompressorCandidate {
|
||||
alg: CompressionType::Passthrough,
|
||||
q: f64::NEG_INFINITY,
|
||||
};
|
||||
|
||||
// This loop reads the requested compressors and keeps track of whichever
|
||||
// one has the highest priority per our heuristic.
|
||||
for val in accept_encoding.to_string().split(',') {
|
||||
let mut q: f64 = 1.0;
|
||||
|
||||
// The compressor and q-value (if the latter is defined)
|
||||
// will be delimited by semicolons.
|
||||
let mut spl: Split<char> = val.split(';');
|
||||
|
||||
// Get the compressor. For example, in
|
||||
// gzip;q=0.8
|
||||
// this grabs "gzip" in the string. It
|
||||
// will further validate the compressor against the
|
||||
// list of those we support. If it is not supported,
|
||||
// we move onto the next one.
|
||||
let compressor: CompressionType = match spl.next() {
|
||||
// CompressionType::parse will return the appropriate enum given
|
||||
// a string. For example, it will return CompressionType::Gzip
|
||||
// when given "gzip".
|
||||
Some(s) => match CompressionType::parse(s.trim()) {
|
||||
Some(candidate) => candidate,
|
||||
|
||||
// We don't support the requested compression algorithm.
|
||||
None => continue,
|
||||
},
|
||||
|
||||
// We should never get here, but I'm paranoid.
|
||||
None => continue,
|
||||
};
|
||||
|
||||
// Get the q-value. This might not be defined, in which case assume
|
||||
// 1.0.
|
||||
if let Some(s) = spl.next() {
|
||||
if !(s.len() > 2 && s.starts_with("q=")) {
|
||||
// If the q-value is malformed, the header is malformed, so
|
||||
// abort.
|
||||
return None;
|
||||
}
|
||||
|
||||
match s[2..].parse::<f64>() {
|
||||
Ok(val) => {
|
||||
if (0.0..=1.0).contains(&val) {
|
||||
q = val;
|
||||
} else {
|
||||
// If the value is outside [0..1], header is malformed.
|
||||
// Abort.
|
||||
return None;
|
||||
};
|
||||
}
|
||||
Err(_) => {
|
||||
// If this isn't a f64, then assume a malformed header
|
||||
// value and abort.
|
||||
return None;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// If new_candidate > cur_candidate, make new_candidate the new
|
||||
// cur_candidate. But do this safely! It is very possible that
|
||||
// someone gave us the string "NAN", which (&str).parse::<f64>
|
||||
// will happily translate to f64::NAN.
|
||||
let new_candidate = CompressorCandidate { alg: compressor, q };
|
||||
if let Some(ord) = new_candidate.partial_cmp(&cur_candidate) {
|
||||
if ord == Ordering::Greater {
|
||||
cur_candidate = new_candidate;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if cur_candidate.q != f64::NEG_INFINITY {
|
||||
Some(cur_candidate.alg)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Compress the response body, if possible or desirable. The Body will be
|
||||
/// compressed in place, and a new header Content-Encoding will be set
|
||||
/// indicating the compression algorithm.
|
||||
///
|
||||
/// This function deems Body eligible compression if and only if the following
|
||||
/// conditions are met:
|
||||
///
|
||||
/// 1. the HTTP client requests a compression encoding in the Content-Encoding
|
||||
/// header (hence the need for the req_headers);
|
||||
///
|
||||
/// 2. the content encoding corresponds to a compression algorithm we support;
|
||||
///
|
||||
/// 3. the Media type in the Content-Type response header is text with any
|
||||
/// subtype (e.g. text/plain) or application/json.
|
||||
///
|
||||
/// compress_response returns Ok on successful compression, or if not all three
|
||||
/// conditions above are met. It returns Err if there was a problem decoding
|
||||
/// any header in either req_headers or res, but res will remain intact.
|
||||
///
|
||||
/// This function logs errors to stderr, but only in debug mode. No information
|
||||
/// is logged in release builds.
|
||||
async fn compress_response(req_headers: &HeaderMap<header::HeaderValue>, res: &mut Response<Body>) -> Result<(), String> {
|
||||
// Check if the data is eligible for compression.
|
||||
if let Some(hdr) = res.headers().get(header::CONTENT_TYPE) {
|
||||
match from_utf8(hdr.as_bytes()) {
|
||||
Ok(val) => {
|
||||
let s = val.to_string();
|
||||
|
||||
// TODO: better determination of what is eligible for compression
|
||||
if !(s.starts_with("text/") || s.starts_with("application/json")) {
|
||||
return Ok(());
|
||||
};
|
||||
}
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
};
|
||||
} else {
|
||||
// Response declares no Content-Type. Assume for simplicity that it
|
||||
// cannot be compressed.
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Don't bother if the size of the size of the response body will fit
|
||||
// within an IP frame (less the bytes that make up the TCP/IP and HTTP
|
||||
// headers).
|
||||
if res.body().size_hint().lower() < 1452 {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Check to see which compressor is requested, and if we can use it.
|
||||
let accept_encoding: String = match req_headers.get(header::ACCEPT_ENCODING) {
|
||||
None => return Ok(()), // Client requested no compression.
|
||||
|
||||
Some(hdr) => match String::from_utf8(hdr.as_bytes().into()) {
|
||||
Ok(val) => val,
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
Err(_) => return Ok(()),
|
||||
},
|
||||
};
|
||||
|
||||
let compressor: CompressionType = match determine_compressor(accept_encoding) {
|
||||
Some(c) => c,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
// Get the body from the response.
|
||||
let body_bytes: Vec<u8> = match body::to_bytes(res.body_mut()).await {
|
||||
Ok(b) => b.to_vec(),
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
};
|
||||
|
||||
// Compress!
|
||||
match compress_body(compressor, body_bytes) {
|
||||
Ok(compressed) => {
|
||||
// We get here iff the compression was successful. Replace the body
|
||||
// with the compressed payload, and add the appropriate
|
||||
// Content-Encoding header in the response.
|
||||
res.headers_mut().insert(header::CONTENT_ENCODING, compressor.to_string().parse().unwrap());
|
||||
*(res.body_mut()) = Body::from(compressed);
|
||||
}
|
||||
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Compresses a `Vec<u8>` given a [`CompressionType`].
|
||||
///
|
||||
/// This is a helper function for [`compress_response`] and should not be
|
||||
/// called directly.
|
||||
|
||||
// I've chosen a TTL of 600 (== 10 minutes) since compression is
|
||||
// computationally expensive and we don't want to be doing it often. This is
|
||||
// larger than client::json's TTL, but that's okay, because if client::json
|
||||
// returns a new serde_json::Value, body_bytes changes, so this function will
|
||||
// execute again.
|
||||
#[cached(size = 100, time = 600, result = true)]
|
||||
fn compress_body(compressor: CompressionType, body_bytes: Vec<u8>) -> Result<Vec<u8>, String> {
|
||||
// io::Cursor implements io::Read, required for our encoders.
|
||||
let mut reader = io::Cursor::new(body_bytes);
|
||||
|
||||
let compressed: Vec<u8> = match compressor {
|
||||
CompressionType::Gzip => {
|
||||
let mut gz: gzip::Encoder<Vec<u8>> = match gzip::Encoder::new(Vec::new()) {
|
||||
Ok(gz) => gz,
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
};
|
||||
|
||||
match io::copy(&mut reader, &mut gz) {
|
||||
Ok(_) => match gz.finish().into_result() {
|
||||
Ok(compressed) => compressed,
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CompressionType::Brotli => {
|
||||
// We may want to make the compression parameters configurable
|
||||
// in the future. For now, the defaults are sufficient.
|
||||
let brotli_params = BrotliEncoderParams::default();
|
||||
|
||||
let mut compressed = Vec::<u8>::new();
|
||||
match BrotliCompress(&mut reader, &mut compressed, &brotli_params) {
|
||||
Ok(_) => compressed,
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This arm is for any requested compressor for which we don't yet
|
||||
// have an implementation.
|
||||
_ => {
|
||||
let msg = "unsupported compressor".to_string();
|
||||
return Err(msg);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(compressed)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use brotli::Decompressor as BrotliDecompressor;
|
||||
use futures_lite::future::block_on;
|
||||
use lipsum::lipsum;
|
||||
use std::{boxed::Box, io};
|
||||
|
||||
#[test]
|
||||
fn test_determine_compressor() {
|
||||
// Single compressor given.
|
||||
assert_eq!(determine_compressor("unsupported".to_string()), None);
|
||||
assert_eq!(determine_compressor("gzip".to_string()), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("*".to_string()), Some(DEFAULT_COMPRESSOR));
|
||||
|
||||
// Multiple compressors.
|
||||
assert_eq!(determine_compressor("gzip, br".to_string()), Some(CompressionType::Brotli));
|
||||
assert_eq!(determine_compressor("gzip;q=0.8, br;q=0.3".to_string()), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("br, gzip".to_string()), Some(CompressionType::Brotli));
|
||||
assert_eq!(determine_compressor("br;q=0.3, gzip;q=0.4".to_string()), Some(CompressionType::Gzip));
|
||||
|
||||
// Invalid q-values.
|
||||
assert_eq!(determine_compressor("gzip;q=NAN".to_string()), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compress_response() {
|
||||
// This macro generates an Accept-Encoding header value given any number of
|
||||
// compressors.
|
||||
macro_rules! ae_gen {
|
||||
($x:expr) => {
|
||||
$x.to_string().as_str()
|
||||
};
|
||||
|
||||
($x:expr, $($y:expr),+) => {
|
||||
format!("{}, {}", $x.to_string(), ae_gen!($($y),+)).as_str()
|
||||
};
|
||||
}
|
||||
|
||||
for accept_encoding in [
|
||||
"*",
|
||||
ae_gen!(CompressionType::Gzip),
|
||||
ae_gen!(CompressionType::Brotli, CompressionType::Gzip),
|
||||
ae_gen!(CompressionType::Brotli),
|
||||
] {
|
||||
// Determine what the expected encoding should be based on both the
|
||||
// specific encodings we accept.
|
||||
let expected_encoding: CompressionType = match determine_compressor(accept_encoding.to_string()) {
|
||||
Some(s) => s,
|
||||
None => panic!("determine_compressor(accept_encoding.to_string()) => None"),
|
||||
};
|
||||
|
||||
// Build headers with our Accept-Encoding.
|
||||
let mut req_headers = HeaderMap::new();
|
||||
req_headers.insert(header::ACCEPT_ENCODING, header::HeaderValue::from_str(accept_encoding).unwrap());
|
||||
|
||||
// Build test response.
|
||||
let lorem_ipsum: String = lipsum(10000);
|
||||
let expected_lorem_ipsum = Vec::<u8>::from(lorem_ipsum.as_str());
|
||||
let mut res = Response::builder()
|
||||
.status(200)
|
||||
.header(header::CONTENT_TYPE, "text/plain")
|
||||
.body(Body::from(lorem_ipsum))
|
||||
.unwrap();
|
||||
|
||||
// Perform the compression.
|
||||
if let Err(e) = block_on(compress_response(&req_headers, &mut res)) {
|
||||
panic!("compress_response(&req_headers, &mut res) => Err(\"{}\")", e);
|
||||
};
|
||||
|
||||
// If the content was compressed, we expect the Content-Encoding
|
||||
// header to be modified.
|
||||
assert_eq!(
|
||||
res
|
||||
.headers()
|
||||
.get(header::CONTENT_ENCODING)
|
||||
.unwrap_or_else(|| panic!("missing content-encoding header"))
|
||||
.to_str()
|
||||
.unwrap_or_else(|_| panic!("failed to convert Content-Encoding header::HeaderValue to String")),
|
||||
expected_encoding.to_string()
|
||||
);
|
||||
|
||||
// Decompress body and make sure it's equal to what we started
|
||||
// with.
|
||||
//
|
||||
// In the case of no compression, just make sure the "new" body in
|
||||
// the Response is the same as what with which we start.
|
||||
let body_vec = match block_on(body::to_bytes(res.body_mut())) {
|
||||
Ok(b) => b.to_vec(),
|
||||
Err(e) => panic!("{}", e),
|
||||
};
|
||||
|
||||
if expected_encoding == CompressionType::Passthrough {
|
||||
assert!(body_vec.eq(&expected_lorem_ipsum));
|
||||
continue;
|
||||
}
|
||||
|
||||
// This provides an io::Read for the underlying body.
|
||||
let mut body_cursor: io::Cursor<Vec<u8>> = io::Cursor::new(body_vec);
|
||||
|
||||
// Match the appropriate decompresor for the given
|
||||
// expected_encoding.
|
||||
let mut decoder: Box<dyn io::Read> = match expected_encoding {
|
||||
CompressionType::Gzip => match gzip::Decoder::new(&mut body_cursor) {
|
||||
Ok(dgz) => Box::new(dgz),
|
||||
Err(e) => panic!("{}", e),
|
||||
},
|
||||
|
||||
CompressionType::Brotli => Box::new(BrotliDecompressor::new(body_cursor, expected_lorem_ipsum.len())),
|
||||
|
||||
_ => panic!("no decompressor for {}", expected_encoding.to_string()),
|
||||
};
|
||||
|
||||
let mut decompressed = Vec::<u8>::new();
|
||||
if let Err(e) = io::copy(&mut decoder, &mut decompressed) {
|
||||
panic!("{}", e);
|
||||
};
|
||||
|
||||
assert!(decompressed.eq(&expected_lorem_ipsum));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ struct SettingsTemplate {
|
||||
|
||||
// CONSTANTS
|
||||
|
||||
const PREFS: [&str; 11] = [
|
||||
const PREFS: [&str; 13] = [
|
||||
"theme",
|
||||
"front_page",
|
||||
"layout",
|
||||
@ -27,10 +27,12 @@ const PREFS: [&str; 11] = [
|
||||
"comment_sort",
|
||||
"post_sort",
|
||||
"show_nsfw",
|
||||
"blur_nsfw",
|
||||
"use_hls",
|
||||
"hide_hls_notification",
|
||||
"autoplay_videos",
|
||||
"fixed_navbar",
|
||||
"hide_awards",
|
||||
];
|
||||
|
||||
// FUNCTIONS
|
||||
@ -39,7 +41,7 @@ const PREFS: [&str; 11] = [
|
||||
pub async fn get(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let url = req.uri().to_string();
|
||||
template(SettingsTemplate {
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
// CRATES
|
||||
use crate::utils::{
|
||||
catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit,
|
||||
catch_random, error, filter_posts, format_num, format_url, get_filters, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit,
|
||||
};
|
||||
use crate::{client::json, server::ResponseExt, RequestExt};
|
||||
use askama::Template;
|
||||
@ -24,6 +24,9 @@ struct SubredditTemplate {
|
||||
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||
/// and all fetched posts being filtered).
|
||||
all_posts_filtered: bool,
|
||||
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||
all_posts_hidden_nsfw: bool,
|
||||
no_posts: bool,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
@ -94,6 +97,12 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
}
|
||||
};
|
||||
|
||||
// Return landing page if this post if this is NSFW community but the user
|
||||
// has disabled the display of NSFW content or if the instance is SFW-only.
|
||||
if sub.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) {
|
||||
return Ok(nsfw_landing(req).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let path = format!("/r/{}/{}.json?{}&raw_json=1", sub_name.clone(), sort, req.uri().query().unwrap_or_default());
|
||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B");
|
||||
@ -106,27 +115,32 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
posts: Vec::new(),
|
||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||
ends: (param(&path, "after").unwrap_or_default(), "".to_string()),
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
redirect_url,
|
||||
is_filtered: true,
|
||||
all_posts_filtered: false,
|
||||
all_posts_hidden_nsfw: false,
|
||||
no_posts: false,
|
||||
})
|
||||
} else {
|
||||
match Post::fetch(&path, quarantined).await {
|
||||
Ok((mut posts, after)) => {
|
||||
let all_posts_filtered = filter_posts(&mut posts, &filters);
|
||||
|
||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||
let no_posts = posts.is_empty();
|
||||
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
||||
template(SubredditTemplate {
|
||||
sub,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||
ends: (param(&path, "after").unwrap_or_default(), after),
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
redirect_url,
|
||||
is_filtered: false,
|
||||
all_posts_filtered,
|
||||
all_posts_hidden_nsfw,
|
||||
no_posts,
|
||||
})
|
||||
}
|
||||
Err(msg) => match msg.as_str() {
|
||||
@ -145,7 +159,7 @@ pub fn quarantine(req: Request<Body>, sub: String) -> Result<Response<Body>, Str
|
||||
msg: "Please click the button below to continue to this subreddit.".to_string(),
|
||||
url: req.uri().to_string(),
|
||||
sub,
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
};
|
||||
|
||||
Ok(
|
||||
@ -192,7 +206,7 @@ pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>,
|
||||
|
||||
let query = req.uri().query().unwrap_or_default().to_string();
|
||||
|
||||
let preferences = Preferences::new(req);
|
||||
let preferences = Preferences::new(&req);
|
||||
let mut sub_list = preferences.subscriptions;
|
||||
let mut filters = preferences.filters;
|
||||
|
||||
@ -305,7 +319,7 @@ pub async fn wiki(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
sub,
|
||||
wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or("<h3>Wiki not found</h3>")),
|
||||
page,
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
}),
|
||||
Err(msg) => {
|
||||
@ -343,7 +357,7 @@ pub async fn sidebar(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// ),
|
||||
sub,
|
||||
page: "Sidebar".to_string(),
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
}),
|
||||
Err(msg) => {
|
||||
@ -416,5 +430,6 @@ async fn subreddit(sub: &str, quarantined: bool) -> Result<Subreddit, String> {
|
||||
members: format_num(members),
|
||||
active: format_num(active),
|
||||
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
||||
nsfw: res["data"]["over18"].as_bool().unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
28
src/user.rs
28
src/user.rs
@ -1,7 +1,7 @@
|
||||
// CRATES
|
||||
use crate::client::json;
|
||||
use crate::server::RequestExt;
|
||||
use crate::utils::{error, filter_posts, format_url, get_filters, param, template, Post, Preferences, User};
|
||||
use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User};
|
||||
use askama::Template;
|
||||
use hyper::{Body, Request, Response};
|
||||
use time::{macros::format_description, OffsetDateTime};
|
||||
@ -24,6 +24,9 @@ struct UserTemplate {
|
||||
/// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
|
||||
/// and all fetched posts being filtered).
|
||||
all_posts_filtered: bool,
|
||||
/// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW)
|
||||
all_posts_hidden_nsfw: bool,
|
||||
no_posts: bool,
|
||||
}
|
||||
|
||||
// FUNCTIONS
|
||||
@ -43,8 +46,17 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
// Retrieve other variables from Libreddit request
|
||||
let sort = param(&path, "sort").unwrap_or_default();
|
||||
let username = req.param("name").unwrap_or_default();
|
||||
|
||||
// Retrieve info from user about page.
|
||||
let user = user(&username).await.unwrap_or_default();
|
||||
|
||||
// Return landing page if this post if this Reddit deems this user NSFW,
|
||||
// but we have also disabled the display of NSFW content or if the instance
|
||||
// is SFW-only.
|
||||
if user.nsfw && (setting(&req, "show_nsfw") != "on" || crate::utils::sfw_only()) {
|
||||
return Ok(nsfw_landing(req).await.unwrap_or_default());
|
||||
}
|
||||
|
||||
let filters = get_filters(&req);
|
||||
if filters.contains(&["u_", &username].concat()) {
|
||||
template(UserTemplate {
|
||||
@ -53,29 +65,34 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||
ends: (param(&path, "after").unwrap_or_default(), "".to_string()),
|
||||
listing,
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
redirect_url,
|
||||
is_filtered: true,
|
||||
all_posts_filtered: false,
|
||||
all_posts_hidden_nsfw: false,
|
||||
no_posts: false,
|
||||
})
|
||||
} else {
|
||||
// Request user posts/comments from Reddit
|
||||
match Post::fetch(&path, false).await {
|
||||
Ok((mut posts, after)) => {
|
||||
let all_posts_filtered = filter_posts(&mut posts, &filters);
|
||||
|
||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||
let no_posts = posts.is_empty();
|
||||
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
||||
template(UserTemplate {
|
||||
user,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||
ends: (param(&path, "after").unwrap_or_default(), after),
|
||||
listing,
|
||||
prefs: Preferences::new(req),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
redirect_url,
|
||||
is_filtered: false,
|
||||
all_posts_filtered,
|
||||
all_posts_hidden_nsfw,
|
||||
no_posts,
|
||||
})
|
||||
}
|
||||
// If there is an error show error page
|
||||
@ -107,6 +124,7 @@ async fn user(name: &str) -> Result<User, String> {
|
||||
created: created.format(format_description!("[month repr:short] [day] '[year repr:last_two]")).unwrap_or_default(),
|
||||
banner: about("banner_img"),
|
||||
description: about("public_description"),
|
||||
nsfw: res["data"]["subreddit"]["over_18"].as_bool().unwrap_or_default(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
260
src/utils.rs
260
src/utils.rs
@ -9,10 +9,36 @@ use regex::Regex;
|
||||
use rust_embed::RustEmbed;
|
||||
use serde_json::Value;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::env;
|
||||
use std::str::FromStr;
|
||||
use time::{macros::format_description, Duration, OffsetDateTime};
|
||||
use url::Url;
|
||||
|
||||
/// Write a message to stderr on debug mode. This function is a no-op on
|
||||
/// release code.
|
||||
#[macro_export]
|
||||
macro_rules! dbg_msg {
|
||||
($x:expr) => {
|
||||
#[cfg(debug_assertions)]
|
||||
eprintln!("{}:{}: {}", file!(), line!(), $x.to_string())
|
||||
};
|
||||
|
||||
($($x:expr),+) => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg_msg!(format!($($x),+))
|
||||
};
|
||||
}
|
||||
|
||||
/// Identifies whether or not the page is a subreddit, a user page, or a post.
|
||||
/// This is used by the NSFW landing template to determine the mesage to convey
|
||||
/// to the user.
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub enum ResourceType {
|
||||
Subreddit,
|
||||
User,
|
||||
Post,
|
||||
}
|
||||
|
||||
// Post flair with content, background color and foreground color
|
||||
pub struct Flair {
|
||||
pub flair_parts: Vec<FlairPart>,
|
||||
@ -210,9 +236,11 @@ pub struct Post {
|
||||
pub domain: String,
|
||||
pub rel_time: String,
|
||||
pub created: String,
|
||||
pub num_duplicates: u64,
|
||||
pub comments: (String, String),
|
||||
pub gallery: Vec<GalleryMedia>,
|
||||
pub awards: Awards,
|
||||
pub nsfw: bool,
|
||||
}
|
||||
|
||||
impl Post {
|
||||
@ -304,14 +332,16 @@ impl Post {
|
||||
},
|
||||
flags: Flags {
|
||||
nsfw: data["over_18"].as_bool().unwrap_or_default(),
|
||||
stickied: data["stickied"].as_bool().unwrap_or_default(),
|
||||
stickied: data["stickied"].as_bool().unwrap_or_default() || data["pinned"].as_bool().unwrap_or_default(),
|
||||
},
|
||||
permalink: val(post, "permalink"),
|
||||
rel_time,
|
||||
created,
|
||||
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
|
||||
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
|
||||
gallery,
|
||||
awards,
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
|
||||
@ -340,6 +370,7 @@ pub struct Comment {
|
||||
pub awards: Awards,
|
||||
pub collapsed: bool,
|
||||
pub is_filtered: bool,
|
||||
pub prefs: Preferences,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
@ -403,6 +434,27 @@ pub struct ErrorTemplate {
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
/// Template for NSFW landing page. The landing page is displayed when a page's
|
||||
/// content is wholly NSFW, but a user has not enabled the option to view NSFW
|
||||
/// posts.
|
||||
#[derive(Template)]
|
||||
#[template(path = "nsfwlanding.html")]
|
||||
pub struct NSFWLandingTemplate {
|
||||
/// Identifier for the resource. This is either a subreddit name or a
|
||||
/// username. (In the case of the latter, set is_user to true.)
|
||||
pub res: String,
|
||||
|
||||
/// Identifies whether or not the resource is a subreddit, a user page,
|
||||
/// or a post.
|
||||
pub res_type: ResourceType,
|
||||
|
||||
/// User preferences.
|
||||
pub prefs: Preferences,
|
||||
|
||||
/// Request URL.
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
// User struct containing metadata about user
|
||||
pub struct User {
|
||||
@ -413,6 +465,7 @@ pub struct User {
|
||||
pub created: String,
|
||||
pub banner: String,
|
||||
pub description: String,
|
||||
pub nsfw: bool,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@ -427,6 +480,7 @@ pub struct Subreddit {
|
||||
pub members: (String, String),
|
||||
pub active: (String, String),
|
||||
pub wiki: bool,
|
||||
pub nsfw: bool,
|
||||
}
|
||||
|
||||
// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
|
||||
@ -447,6 +501,7 @@ pub struct Preferences {
|
||||
pub layout: String,
|
||||
pub wide: String,
|
||||
pub show_nsfw: String,
|
||||
pub blur_nsfw: String,
|
||||
pub hide_hls_notification: String,
|
||||
pub use_hls: String,
|
||||
pub autoplay_videos: String,
|
||||
@ -455,6 +510,7 @@ pub struct Preferences {
|
||||
pub post_sort: String,
|
||||
pub subscriptions: Vec<String>,
|
||||
pub filters: Vec<String>,
|
||||
pub hide_awards: String,
|
||||
}
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
@ -464,7 +520,7 @@ pub struct ThemeAssets;
|
||||
|
||||
impl Preferences {
|
||||
// Build preferences from cookies
|
||||
pub fn new(req: Request<Body>) -> Self {
|
||||
pub fn new(req: &Request<Body>) -> Self {
|
||||
// Read available theme names from embedded css files.
|
||||
// Always make the default "system" theme available.
|
||||
let mut themes = vec!["system".to_string()];
|
||||
@ -479,6 +535,7 @@ impl Preferences {
|
||||
layout: setting(&req, "layout"),
|
||||
wide: setting(&req, "wide"),
|
||||
show_nsfw: setting(&req, "show_nsfw"),
|
||||
blur_nsfw: setting(&req, "blur_nsfw"),
|
||||
use_hls: setting(&req, "use_hls"),
|
||||
hide_hls_notification: setting(&req, "hide_hls_notification"),
|
||||
autoplay_videos: setting(&req, "autoplay_videos"),
|
||||
@ -487,6 +544,7 @@ impl Preferences {
|
||||
post_sort: setting(&req, "post_sort"),
|
||||
subscriptions: setting(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
||||
filters: setting(&req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
||||
hide_awards: setting(&req, "hide_awards"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -496,15 +554,111 @@ pub fn get_filters(req: &Request<Body>) -> HashSet<String> {
|
||||
setting(req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect::<HashSet<String>>()
|
||||
}
|
||||
|
||||
/// Filters a `Vec<Post>` by the given `HashSet` of filters (each filter being a subreddit name or a user name). If a
|
||||
/// `Post`'s subreddit or author is found in the filters, it is removed. Returns `true` if _all_ posts were filtered
|
||||
/// out, or `false` otherwise.
|
||||
pub fn filter_posts(posts: &mut Vec<Post>, filters: &HashSet<String>) -> bool {
|
||||
/// Filters a `Vec<Post>` by the given `HashSet` of filters (each filter being
|
||||
/// a subreddit name or a user name). If a `Post`'s subreddit or author is
|
||||
/// found in the filters, it is removed.
|
||||
///
|
||||
/// The first value of the return tuple is the number of posts filtered. The
|
||||
/// second return value is `true` if all posts were filtered.
|
||||
pub fn filter_posts(posts: &mut Vec<Post>, filters: &HashSet<String>) -> (u64, bool) {
|
||||
// This is the length of the Vec<Post> prior to applying the filter.
|
||||
let lb: u64 = posts.len().try_into().unwrap_or(0);
|
||||
|
||||
if posts.is_empty() {
|
||||
false
|
||||
(0, false)
|
||||
} else {
|
||||
posts.retain(|p| !filters.contains(&p.community) && !filters.contains(&["u_", &p.author.name].concat()));
|
||||
posts.is_empty()
|
||||
posts.retain(|p| !(filters.contains(&p.community) || filters.contains(&["u_", &p.author.name].concat())));
|
||||
|
||||
// Get the length of the Vec<Post> after applying the filter.
|
||||
// If lb > la, then at least one post was removed.
|
||||
let la: u64 = posts.len().try_into().unwrap_or(0);
|
||||
|
||||
(lb - la, posts.is_empty())
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a [`Post`] from a provided JSON.
|
||||
pub async fn parse_post(post: &serde_json::Value) -> Post {
|
||||
// Grab UTC time as unix timestamp
|
||||
let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default());
|
||||
// Parse post score and upvote ratio
|
||||
let score = post["data"]["score"].as_i64().unwrap_or_default();
|
||||
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||
|
||||
// Determine the type of media along with the media URL
|
||||
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
||||
|
||||
let awards: Awards = Awards::parse(&post["data"]["all_awardings"]);
|
||||
|
||||
let permalink = val(post, "permalink");
|
||||
|
||||
let body = if val(post, "removed_by_category") == "moderator" {
|
||||
format!(
|
||||
"<div class=\"md\"><p>[removed] — <a href=\"https://www.unddit.com{}\">view removed post</a></p></div>",
|
||||
permalink
|
||||
)
|
||||
} else {
|
||||
rewrite_urls(&val(post, "selftext_html"))
|
||||
};
|
||||
|
||||
// Build a post using data parsed from Reddit post API
|
||||
Post {
|
||||
id: val(post, "id"),
|
||||
title: val(post, "title"),
|
||||
community: val(post, "subreddit"),
|
||||
body,
|
||||
author: Author {
|
||||
name: val(post, "author"),
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
post["data"]["author_flair_type"].as_str().unwrap_or_default(),
|
||||
post["data"]["author_flair_richtext"].as_array(),
|
||||
post["data"]["author_flair_text"].as_str(),
|
||||
),
|
||||
text: val(post, "link_flair_text"),
|
||||
background_color: val(post, "author_flair_background_color"),
|
||||
foreground_color: val(post, "author_flair_text_color"),
|
||||
},
|
||||
distinguished: val(post, "distinguished"),
|
||||
},
|
||||
permalink,
|
||||
score: format_num(score),
|
||||
upvote_ratio: ratio as i64,
|
||||
post_type,
|
||||
media,
|
||||
thumbnail: Media {
|
||||
url: format_url(val(post, "thumbnail").as_str()),
|
||||
alt_url: String::new(),
|
||||
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||
poster: String::new(),
|
||||
},
|
||||
flair: Flair {
|
||||
flair_parts: FlairPart::parse(
|
||||
post["data"]["link_flair_type"].as_str().unwrap_or_default(),
|
||||
post["data"]["link_flair_richtext"].as_array(),
|
||||
post["data"]["link_flair_text"].as_str(),
|
||||
),
|
||||
text: val(post, "link_flair_text"),
|
||||
background_color: val(post, "link_flair_background_color"),
|
||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
} else {
|
||||
"white".to_string()
|
||||
},
|
||||
},
|
||||
flags: Flags {
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(),
|
||||
stickied: post["data"]["stickied"].as_bool().unwrap_or_default() || post["data"]["pinned"].as_bool().unwrap_or(false),
|
||||
},
|
||||
domain: val(post, "domain"),
|
||||
rel_time,
|
||||
created,
|
||||
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
|
||||
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
|
||||
gallery,
|
||||
awards,
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -531,8 +685,8 @@ pub fn setting(req: &Request<Body>, name: &str) -> String {
|
||||
req
|
||||
.cookie(name)
|
||||
.unwrap_or_else(|| {
|
||||
// If there is no cookie for this setting, try receiving a default from an environment variable
|
||||
if let Ok(default) = std::env::var(format!("LIBREDDIT_DEFAULT_{}", name.to_uppercase())) {
|
||||
// If there is no cookie for this setting, try receiving a default from the config
|
||||
if let Some(default) = crate::config::get_setting(&format!("LIBREDDIT_DEFAULT_{}", name.to_uppercase())) {
|
||||
Cookie::new(name, default)
|
||||
} else {
|
||||
Cookie::named(name)
|
||||
@ -713,11 +867,12 @@ pub fn redirect(path: String) -> Response<Body> {
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub async fn error(req: Request<Body>, msg: String) -> Result<Response<Body>, String> {
|
||||
/// Renders a generic error landing page.
|
||||
pub async fn error(req: Request<Body>, msg: impl ToString) -> Result<Response<Body>, String> {
|
||||
let url = req.uri().to_string();
|
||||
let body = ErrorTemplate {
|
||||
msg,
|
||||
prefs: Preferences::new(req),
|
||||
msg: msg.to_string(),
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
}
|
||||
.render()
|
||||
@ -726,10 +881,54 @@ pub async fn error(req: Request<Body>, msg: String) -> Result<Response<Body>, St
|
||||
Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
||||
}
|
||||
|
||||
/// Returns true if the config/env variable `LIBREDDIT_SFW_ONLY` carries the
|
||||
/// value `on`.
|
||||
///
|
||||
/// If this variable is set as such, the instance will operate in SFW-only
|
||||
/// mode; all NSFW content will be filtered. Attempts to access NSFW
|
||||
/// subreddits or posts or userpages for users Reddit has deemed NSFW will
|
||||
/// be denied.
|
||||
pub fn sfw_only() -> bool {
|
||||
match crate::config::get_setting("LIBREDDIT_SFW_ONLY") {
|
||||
Some(val) => val == "on",
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Renders the landing page for NSFW content when the user has not enabled
|
||||
/// "show NSFW posts" in settings.
|
||||
pub async fn nsfw_landing(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||
let res_type: ResourceType;
|
||||
let url = req.uri().to_string();
|
||||
|
||||
// Determine from the request URL if the resource is a subreddit, a user
|
||||
// page, or a post.
|
||||
let res: String = if !req.param("name").unwrap_or_default().is_empty() {
|
||||
res_type = ResourceType::User;
|
||||
req.param("name").unwrap_or_default()
|
||||
} else if !req.param("id").unwrap_or_default().is_empty() {
|
||||
res_type = ResourceType::Post;
|
||||
req.param("id").unwrap_or_default()
|
||||
} else {
|
||||
res_type = ResourceType::Subreddit;
|
||||
req.param("sub").unwrap_or_default()
|
||||
};
|
||||
|
||||
let body = NSFWLandingTemplate {
|
||||
res,
|
||||
res_type,
|
||||
prefs: Preferences::new(&req),
|
||||
url,
|
||||
}
|
||||
.render()
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(Response::builder().status(403).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::format_num;
|
||||
use super::rewrite_urls;
|
||||
use super::{format_num, format_url, rewrite_urls};
|
||||
|
||||
#[test]
|
||||
fn format_num_works() {
|
||||
@ -749,4 +948,33 @@ mod tests {
|
||||
r#"<a href="https://www.reddit.com/r/linux_gaming/comments/x/just_a_test/">https://www.reddit.com/r/linux_gaming/comments/x/just_a_test/</a>"#
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_url() {
|
||||
assert_eq!(format_url("https://a.thumbs.redditmedia.com/XYZ.jpg"), "/thumb/a/XYZ.jpg");
|
||||
assert_eq!(format_url("https://emoji.redditmedia.com/a/b"), "/emoji/a/b");
|
||||
|
||||
assert_eq!(
|
||||
format_url("https://external-preview.redd.it/foo.jpg?auto=webp&s=bar"),
|
||||
"/preview/external-pre/foo.jpg?auto=webp&s=bar"
|
||||
);
|
||||
|
||||
assert_eq!(format_url("https://i.redd.it/foobar.jpg"), "/img/foobar.jpg");
|
||||
assert_eq!(
|
||||
format_url("https://preview.redd.it/qwerty.jpg?auto=webp&s=asdf"),
|
||||
"/preview/pre/qwerty.jpg?auto=webp&s=asdf"
|
||||
);
|
||||
assert_eq!(format_url("https://v.redd.it/foo/DASH_360.mp4?source=fallback"), "/vid/foo/360.mp4");
|
||||
assert_eq!(
|
||||
format_url("https://v.redd.it/foo/HLSPlaylist.m3u8?a=bar&v=1&f=sd"),
|
||||
"/hls/foo/HLSPlaylist.m3u8?a=bar&v=1&f=sd"
|
||||
);
|
||||
assert_eq!(format_url("https://www.redditstatic.com/gold/awards/icon/icon.png"), "/static/gold/awards/icon/icon.png");
|
||||
|
||||
assert_eq!(format_url(""), "");
|
||||
assert_eq!(format_url("self"), "");
|
||||
assert_eq!(format_url("default"), "");
|
||||
assert_eq!(format_url("nsfw"), "");
|
||||
assert_eq!(format_url("spoiler"), "");
|
||||
}
|
||||
}
|
||||
|
4
static/hls.min.js
vendored
4
static/hls.min.js
vendored
File diff suppressed because one or more lines are too long
205
static/style.css
205
static/style.css
@ -26,6 +26,9 @@
|
||||
--highlighted: #333;
|
||||
--visited: #aaa;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
||||
|
||||
/* Hint color theme to browser for scrollbar */
|
||||
color-scheme: dark;
|
||||
}
|
||||
|
||||
/* Browser-defined light theme */
|
||||
@ -42,6 +45,9 @@
|
||||
--highlighted: white;
|
||||
--visited: #555;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
|
||||
/* Hint color theme to browser for scrollbar */
|
||||
color-scheme: light;
|
||||
}
|
||||
}
|
||||
|
||||
@ -161,21 +167,41 @@ main {
|
||||
}
|
||||
|
||||
#column_one {
|
||||
width: 100%;
|
||||
max-width: 750px;
|
||||
border-radius: 5px;
|
||||
overflow: inherit;
|
||||
}
|
||||
|
||||
footer {
|
||||
/* Body footer. */
|
||||
body > footer {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
margin: 20px;
|
||||
}
|
||||
|
||||
body > footer > div#sfw-only {
|
||||
color: var(--green);
|
||||
border: 1px solid var(--green);
|
||||
padding: 5px;
|
||||
box-sizing: border-box;
|
||||
border-radius: 5px;
|
||||
}
|
||||
/* / Body footer. */
|
||||
|
||||
/* Footer in content block. */
|
||||
main > * > footer {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
footer > a {
|
||||
main > * > footer > a {
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
/* / Footer in content block. */
|
||||
|
||||
button {
|
||||
background: none;
|
||||
border: none;
|
||||
@ -487,7 +513,11 @@ button.submit:hover > svg { stroke: var(--accent); }
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
#sort_options, #listing_options, footer > a {
|
||||
#listing_options {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
#sort_options, #listing_options, main > * > footer > a {
|
||||
border-radius: 5px;
|
||||
align-items: center;
|
||||
box-shadow: var(--shadow);
|
||||
@ -496,7 +526,7 @@ button.submit:hover > svg { stroke: var(--accent); }
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
#sort_options > a, #listing_options > a, footer > a {
|
||||
#sort_options > a, #listing_options > a, main > * > footer > a {
|
||||
color: var(--text);
|
||||
padding: 10px 20px;
|
||||
text-align: center;
|
||||
@ -719,22 +749,39 @@ a.search_subreddit:hover {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.post_media_image, .post .__NoScript_PlaceHolder__, .post_media_video, .gallery {
|
||||
.post_media_content, .post .__NoScript_PlaceHolder__, .gallery {
|
||||
max-width: calc(100% - 40px);
|
||||
grid-area: post_media;
|
||||
margin: 15px auto 5px auto;
|
||||
width: auto;
|
||||
height: auto;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
|
||||
.post_media_video.short {
|
||||
max-height: 512px;
|
||||
.post_media_video {
|
||||
width: auto;
|
||||
height: auto;
|
||||
max-width: 100%;
|
||||
max-height: 512px;
|
||||
display: block;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
.post_media_image.short svg, .post_media_image.short img{
|
||||
max-height: 512px;
|
||||
width: auto;
|
||||
height: auto;
|
||||
max-width: 100%;
|
||||
max-height: 512px;
|
||||
display: block;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
.post_nsfw_blur {
|
||||
filter: blur(1.5rem);
|
||||
}
|
||||
|
||||
.post_nsfw_blur:hover {
|
||||
filter: none;
|
||||
}
|
||||
|
||||
.post_media_image svg{
|
||||
@ -809,6 +856,17 @@ a.search_subreddit:hover {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
#comment_count {
|
||||
font-weight: 500;
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
#comment_count > #sorted_by {
|
||||
font-weight: normal;
|
||||
opacity: 0.7;
|
||||
margin-right: 7px;
|
||||
}
|
||||
|
||||
#post_links {
|
||||
display: flex;
|
||||
list-style: none;
|
||||
@ -820,6 +878,16 @@ a.search_subreddit:hover {
|
||||
margin-right: 15px;
|
||||
}
|
||||
|
||||
#post_links > li.desktop_item {
|
||||
display: auto;
|
||||
}
|
||||
|
||||
@media screen and (min-width: 480px) {
|
||||
#post_links > li.mobile_item {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
.post_thumbnail {
|
||||
border-radius: 5px;
|
||||
border: var(--panel-border);
|
||||
@ -830,13 +898,25 @@ a.search_subreddit:hover {
|
||||
margin: 5px;
|
||||
}
|
||||
|
||||
.post_thumbnail svg {
|
||||
.post_thumbnail div {
|
||||
grid-area: 1 / 1 / 2 / 2;
|
||||
width: 100%;
|
||||
height: auto;
|
||||
object-fit: cover;
|
||||
align-self: center;
|
||||
justify-self: center;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.post_thumbnail div svg {
|
||||
width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.post_thumbnail span {
|
||||
z-index: 0;
|
||||
}
|
||||
|
||||
.thumb_nsfw_blur {
|
||||
filter: blur(0.3rem)
|
||||
}
|
||||
|
||||
.post_thumbnail.no_thumbnail {
|
||||
@ -1081,22 +1161,16 @@ summary.comment_data {
|
||||
}
|
||||
|
||||
.prefs {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
padding: 20px;
|
||||
padding: 10px 20px 20px;
|
||||
background: var(--post);
|
||||
border-radius: 5px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.prefs > div {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
width: 100%;
|
||||
height: 35px;
|
||||
align-items: center;
|
||||
margin-top: 7px;
|
||||
.prefs fieldset {
|
||||
border: 0;
|
||||
padding: 10px 0;
|
||||
margin: 0 0 5px;
|
||||
}
|
||||
|
||||
.prefs legend {
|
||||
@ -1104,11 +1178,25 @@ summary.comment_data {
|
||||
border-bottom: 1px solid var(--highlighted);
|
||||
font-size: 18px;
|
||||
padding-bottom: 10px;
|
||||
margin-bottom: 7px;
|
||||
width: 100%;
|
||||
float: left; /* places the legend inside the (invisible) border, instead of vertically centered on top border*/
|
||||
}
|
||||
|
||||
.prefs legend:not(:first-child) {
|
||||
padding-top: 10px;
|
||||
margin-top: 15px;
|
||||
.prefs-group {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
height: 35px;
|
||||
align-items: center;
|
||||
margin-top: 7px;
|
||||
}
|
||||
|
||||
.prefs-group > *:not(:last-child) {
|
||||
margin-right: 1ch;
|
||||
}
|
||||
|
||||
.prefs-group > *:last-child {
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
.prefs select {
|
||||
@ -1126,7 +1214,8 @@ aside.prefs {
|
||||
background: var(--highlighted);
|
||||
padding: 10px 15px;
|
||||
border-radius: 5px;
|
||||
margin-top: 20px;
|
||||
margin-top: 5px;
|
||||
width: 100%
|
||||
}
|
||||
|
||||
input[type="submit"] {
|
||||
@ -1185,16 +1274,21 @@ input[type="submit"] {
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.md .md-spoiler-text {
|
||||
.md .md-spoiler-text, .md-spoiler-text a {
|
||||
background: var(--highlighted);
|
||||
color: transparent;
|
||||
}
|
||||
|
||||
.md .md-spoiler-text:hover {
|
||||
.md-spoiler-text:hover {
|
||||
background: var(--foreground);
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
.md-spoiler-text:hover a {
|
||||
background: var(--foreground);
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.md li { margin: 10px 0; }
|
||||
.toc_child { list-style: none; }
|
||||
|
||||
@ -1241,6 +1335,54 @@ td, th {
|
||||
#error h3 { opacity: 0.85; }
|
||||
#error a { color: var(--accent); }
|
||||
|
||||
/* Messages */
|
||||
|
||||
#duplicates_msg h3 {
|
||||
display: inline-block;
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/* Warnings */
|
||||
|
||||
.listing_warn {
|
||||
display: inline-block;
|
||||
margin: 10px;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.listing_warn a {
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
/* NSFW Landing Page */
|
||||
|
||||
#nsfw_landing {
|
||||
display: inline-block;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#nsfw_landing h1 {
|
||||
display: inline-block;
|
||||
margin-bottom: 20px;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#nsfw_landing p {
|
||||
display: inline-block;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#nsfw_landing a {
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
/* Mobile */
|
||||
|
||||
@media screen and (max-width: 800px) {
|
||||
@ -1341,4 +1483,9 @@ td, th {
|
||||
padding: 7px 0px;
|
||||
margin-right: -5px;
|
||||
}
|
||||
|
||||
#post_links > li { margin-right: 10px }
|
||||
#post_links > li.desktop_item { display: none }
|
||||
#post_links > li.mobile_item { display: auto }
|
||||
.post_footer > p > span#upvoted { display: none }
|
||||
}
|
||||
|
14
static/themes/dark.css
Normal file
14
static/themes/dark.css
Normal file
@ -0,0 +1,14 @@
|
||||
/* Dark theme setting */
|
||||
.dark{
|
||||
--accent: aqua;
|
||||
--green: #5cff85;
|
||||
--text: white;
|
||||
--foreground: #222;
|
||||
--background: #0f0f0f;
|
||||
--outside: #1f1f1f;
|
||||
--post: #161616;
|
||||
--panel-border: 1px solid #333;
|
||||
--highlighted: #333;
|
||||
--visited: #aaa;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
||||
}
|
13
static/themes/doomone.css
Normal file
13
static/themes/doomone.css
Normal file
@ -0,0 +1,13 @@
|
||||
.doomone {
|
||||
--accent: #51afef;
|
||||
--green: #00a229;
|
||||
--text: #bbc2cf;
|
||||
--foreground: #3d4148;
|
||||
--background: #282c34;
|
||||
--outside: #52565c;
|
||||
--post: #24272e;
|
||||
--panel-border: 2px solid #52565c;
|
||||
--highlighted: #686b70;
|
||||
--visited: #969692;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
13
static/themes/gruvboxdark.css
Normal file
13
static/themes/gruvboxdark.css
Normal file
@ -0,0 +1,13 @@
|
||||
/* Gruvbox-Dark theme setting */
|
||||
.gruvboxdark {
|
||||
--accent: #8ec07c;
|
||||
--green: #b8bb26;
|
||||
--text: #ebdbb2;
|
||||
--foreground: #3c3836;
|
||||
--background: #282828;
|
||||
--outside: #3c3836;
|
||||
--post: #3c3836;
|
||||
--panel-border: 1px solid #504945;
|
||||
--highlighted: #282828;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
|
||||
}
|
18
static/themes/gruvboxlight.css
Normal file
18
static/themes/gruvboxlight.css
Normal file
@ -0,0 +1,18 @@
|
||||
/* Gruvbox-Light theme setting */
|
||||
.gruvboxlight {
|
||||
--accent: #427b58;
|
||||
--green: #79740e;
|
||||
--text: #3c3836;
|
||||
--foreground: #ebdbb2;
|
||||
--background: #fbf1c7;
|
||||
--outside: #ebdbb2;
|
||||
--post: #ebdbb2;
|
||||
--panel-border: 1px solid #d5c4a1;
|
||||
--highlighted: #fbf1c7;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.25);
|
||||
}
|
||||
|
||||
html:has(> .gruvboxlight) {
|
||||
/* Hint color theme to browser for scrollbar */
|
||||
color-scheme: light;
|
||||
}
|
@ -12,3 +12,8 @@
|
||||
--visited: #555;
|
||||
--shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
html:has(> .light) {
|
||||
/* Hint color theme to browser for scrollbar */
|
||||
color-scheme: light;
|
||||
}
|
@ -19,7 +19,7 @@
|
||||
<!-- PWA Manifest -->
|
||||
<link rel="manifest" type="application/json" href="/manifest.json">
|
||||
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico">
|
||||
<link rel="stylesheet" type="text/css" href="/style.css">
|
||||
<link rel="stylesheet" type="text/css" href="/style.css?v={{ env!("CARGO_PKG_VERSION") }}">
|
||||
{% endblock %}
|
||||
</head>
|
||||
<body class="
|
||||
@ -50,7 +50,7 @@
|
||||
<circle cx="12" cy="12" r="3"/><path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"/>
|
||||
</svg>
|
||||
</a>
|
||||
<a id="code" href="https://github.com/spikecodes/libreddit">
|
||||
<a id="code" href="https://github.com/libreddit/libreddit" target="_blank" rel="noopener noreferrer">
|
||||
<span>code</span>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title>code</title>
|
||||
@ -67,5 +67,10 @@
|
||||
{% endblock %}
|
||||
</main>
|
||||
{% endblock %}
|
||||
{% block footer %}
|
||||
{% if crate::utils::sfw_only() %}
|
||||
<footer><div id="sfw-only">This instance of Libreddit is SFW-only.</div></footer>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
|
@ -20,7 +20,7 @@
|
||||
{% endif %}
|
||||
<a href="{{ post_link }}{{ id }}/?context=3" class="created" title="{{ created }}">{{ rel_time }}</a>
|
||||
{% if edited.0 != "".to_string() %}<span class="edited" title="{{ edited.1 }}">edited {{ edited.0 }}</span>{% endif %}
|
||||
{% if !awards.is_empty() %}
|
||||
{% if !awards.is_empty() && prefs.hide_awards != "on" %}
|
||||
<span class="dot">•</span>
|
||||
{% for award in awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
|
107
templates/duplicates.html
Normal file
107
templates/duplicates.html
Normal file
@ -0,0 +1,107 @@
|
||||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block title %}{{ post.title }} - r/{{ post.community }}{% endblock %}
|
||||
|
||||
{% block search %}
|
||||
{% call utils::search(["/r/", post.community.as_str()].concat(), "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block root %}/r/{{ post.community }}{% endblock %}{% block location %}r/{{ post.community }}{% endblock %}
|
||||
{% block head %}
|
||||
{% call super() %}
|
||||
{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
{% call utils::sub_list(post.community.as_str()) %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
{% call utils::post(post) %}
|
||||
|
||||
<!-- DUPLICATES -->
|
||||
{% if post.num_duplicates == 0 %}
|
||||
<span class="listing_warn">(No duplicates found)</span>
|
||||
{% else if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||
<span class="listing_warn">(Enable "Show NSFW posts" in <a href="/settings">settings</a> to show duplicates)</span>
|
||||
{% else %}
|
||||
<div id="duplicates_msg"><h3>Duplicates</h3></div>
|
||||
{% if num_posts_filtered > 0 %}
|
||||
<span class="listing_warn">
|
||||
{% if all_posts_filtered %}
|
||||
(All posts have been filtered)
|
||||
{% else %}
|
||||
(Some posts have been filtered)
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
|
||||
<div id="sort">
|
||||
<div id="sort_options">
|
||||
<a {% if params.sort.is_empty() || params.sort.eq("num_comments") %}class="selected"{% endif %} href="?sort=num_comments">
|
||||
Number of comments
|
||||
</a>
|
||||
<a {% if params.sort.eq("new") %}class="selected"{% endif %} href="?sort=new">
|
||||
New
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="posts">
|
||||
{% for post in duplicates -%}
|
||||
{# TODO: utils::post should be reworked to permit a truncated display of a post as below #}
|
||||
{% if !(post.flags.nsfw) || prefs.show_nsfw == "on" %}
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %}" id="{{ post.id }}">
|
||||
<p class="post_header">
|
||||
{% let community -%}
|
||||
{% if post.community.starts_with("u_") -%}
|
||||
{% let community = format!("u/{}", &post.community[2..]) -%}
|
||||
{% else -%}
|
||||
{% let community = format!("r/{}", post.community) -%}
|
||||
{% endif -%}
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author {{ post.author.distinguished }}" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
{% if !post.awards.is_empty() && prefs.hide_awards != "on" %}
|
||||
{% for award in post.awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
</span>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
</p>
|
||||
<h2 class="post_title">
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};"
|
||||
dir="ltr">{% call utils::render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
<a href="{{ post.permalink }}">{{ post.title }}</a>{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</h2>
|
||||
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
<div class="post_footer">
|
||||
<a href="{{ post.permalink }}" class="post_comments" title="{{ post.comments.1 }} comments">{{ post.comments.0 }} comments</a>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
{% endif %}
|
||||
{%- endfor %}
|
||||
</div>
|
||||
|
||||
<footer>
|
||||
{% if params.before != "" %}
|
||||
<a href="?before={{ params.before }}{% if !params.sort.is_empty() %}&sort={{ params.sort }}{% endif %}" accesskey="P">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if params.after != "" %}
|
||||
<a href="?after={{ params.after }}{% if !params.sort.is_empty() %}&sort={{ params.sort }}{% endif %}" accesskey="N">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
28
templates/nsfwlanding.html
Normal file
28
templates/nsfwlanding.html
Normal file
@ -0,0 +1,28 @@
|
||||
{% extends "base.html" %}
|
||||
{% block title %}NSFW content gated{% endblock %}
|
||||
{% block sortstyle %}{% endblock %}
|
||||
{% block content %}
|
||||
<div id="nsfw_landing">
|
||||
<h1>
|
||||
😱
|
||||
{% if res_type == crate::utils::ResourceType::Subreddit %}
|
||||
r/{{ res }} is a NSFW community!
|
||||
{% else if res_type == crate::utils::ResourceType::User %}
|
||||
u/{{ res }}'s content is NSFW!
|
||||
{% else if res_type == crate::utils::ResourceType::Post %}
|
||||
This post is NSFW!
|
||||
{% endif %}
|
||||
</h1>
|
||||
<br />
|
||||
|
||||
<p>
|
||||
{% if crate::utils::sfw_only() %}
|
||||
This instance of Libreddit is SFW-only.</p>
|
||||
{% else %}
|
||||
Enable "Show NSFW posts" in <a href="/settings">settings</a> to view this {% if res_type == crate::utils::ResourceType::Subreddit %}subreddit{% else if res_type == crate::utils::ResourceType::User %}user's posts or comments{% else if res_type == crate::utils::ResourceType::Post %}post{% endif %}.
|
||||
{% endif %}
|
||||
</p>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% block footer %}
|
||||
{% endblock %}
|
@ -13,16 +13,25 @@
|
||||
<!-- Meta Tags -->
|
||||
<meta name="author" content="u/{{ post.author.name }}">
|
||||
<meta name="title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="og:type" content="website">
|
||||
<meta property="og:url" content="{{ post.permalink }}">
|
||||
<meta property="og:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="og:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
<meta property="og:image" content="{{ post.thumbnail.url }}">
|
||||
<meta property="twitter:card" content="summary_large_image">
|
||||
<meta property="og:url" content="{{ post.permalink }}">
|
||||
<meta property="twitter:url" content="{{ post.permalink }}">
|
||||
<meta property="twitter:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||
<meta property="twitter:description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
{% if post.post_type == "image" %}
|
||||
<meta property="og:type" content="image">
|
||||
<meta property="og:image" content="{{ post.thumbnail.url }}">
|
||||
<meta property="twitter:card" content="summary_large_image">
|
||||
<meta property="twitter:image" content="{{ post.thumbnail.url }}">
|
||||
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||
<meta property="twitter:card" content="video">
|
||||
<meta property="og:type" content="video">
|
||||
<meta property="og:video" content="{{ post.media.url }}">
|
||||
<meta property="og:video:type" content="video/mp4">
|
||||
{% else %}
|
||||
<meta property="og:type" content="website">
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block subscriptions %}
|
||||
@ -31,98 +40,11 @@
|
||||
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
|
||||
<!-- POST CONTENT -->
|
||||
<div class="post highlighted">
|
||||
<p class="post_header">
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author {{ post.author.distinguished }}" href="/user/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
{% if post.author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call utils::render_flair(post.author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
{% if !post.awards.is_empty() %}
|
||||
<span class="dot">•</span>
|
||||
<span class="awards">
|
||||
{% for award in post.awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
{{ award.count }}
|
||||
</span>
|
||||
{% endfor %}
|
||||
</span>
|
||||
{% endif %}
|
||||
</p>
|
||||
<h1 class="post_title">
|
||||
{{ post.title }}
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};">{% call utils::render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</h1>
|
||||
|
||||
<!-- POST MEDIA -->
|
||||
<!-- post_type: {{ post.post_type }} -->
|
||||
{% if post.post_type == "image" %}
|
||||
<a href="{{ post.media.url }}" class="post_media_image" >
|
||||
<svg
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Post image" src="{{ post.media.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls>
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
<script src="/playHLSVideo.js"></script>
|
||||
{% else %}
|
||||
<video class="post_media_video" src="{{ post.media.url }}" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %} loop><a href={{ post.media.url }}>Video</a></video>
|
||||
{% call utils::render_hls_notification(post.permalink[1..]) %}
|
||||
{% endif %}
|
||||
{% else if post.post_type == "gallery" %}
|
||||
<div class="gallery">
|
||||
{% for image in post.gallery -%}
|
||||
<figure>
|
||||
<a href="{{ image.url }}" ><img loading="lazy" alt="Gallery image" src="{{ image.url }}"/></a>
|
||||
<figcaption>
|
||||
<p>{{ image.caption }}</p>
|
||||
{% if image.outbound_url.len() > 0 %}
|
||||
<p><a class="outbound_url" href="{{ image.outbound_url }}" rel="nofollow">{{ image.outbound_url }}</a>
|
||||
{% endif %}
|
||||
</figcaption>
|
||||
</figure>
|
||||
{%- endfor %}
|
||||
</div>
|
||||
{% else if post.post_type == "link" %}
|
||||
<a id="post_url" href="{{ post.media.url }}" rel="nofollow">{{ post.media.url }}</a>
|
||||
{% endif %}
|
||||
|
||||
<!-- POST BODY -->
|
||||
<div class="post_body">{{ post.body|safe }}</div>
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
<div class="post_footer">
|
||||
<ul id="post_links">
|
||||
<li><a href="/{{ post.id }}">permalink</a></li>
|
||||
<li><a href="https://reddit.com/{{ post.id }}" rel="nofollow">reddit</a></li>
|
||||
</ul>
|
||||
<p>{{ post.upvote_ratio }}% Upvoted</p>
|
||||
</div>
|
||||
</div>
|
||||
{% call utils::post(post) %}
|
||||
|
||||
<!-- SORT FORM -->
|
||||
<form id="sort">
|
||||
<p id="comment_count">{{post.comments.0}} {% if post.comments.0 == "1" %}comment{% else %}comments{% endif %} <span id="sorted_by">sorted by </span></p>
|
||||
<select name="sort" title="Sort comments by">
|
||||
{% call utils::options(sort, ["confidence", "top", "new", "controversial", "old"], "confidence") %}
|
||||
</select><button id="sort_submit" class="submit">
|
||||
@ -138,7 +60,7 @@
|
||||
{% for c in comments -%}
|
||||
<div class="thread">
|
||||
{% if single_thread %}
|
||||
<p class="thread_nav"><a href="/{{ post.id }}">View all comments</a></p>
|
||||
<p class="thread_nav"><a href="{{ post.permalink }}">View all comments</a></p>
|
||||
{% if c.parent_kind == "t1" %}
|
||||
<p class="thread_nav"><a href="?context=9999">Show parent comments</a></p>
|
||||
{% endif %}
|
||||
|
@ -10,7 +10,7 @@
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
<form id="search_sort">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ params.q }}" title="Search libreddit">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ params.q|safe }}" title="Search libreddit">
|
||||
{% if sub != "" %}
|
||||
<div id="inside">
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr" {% if params.restrict_sr != "" %}checked{% endif %}>
|
||||
@ -56,10 +56,19 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_hidden_nsfw %}
|
||||
<span class="listing_warn">All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</span>
|
||||
{% endif %}
|
||||
|
||||
{% if no_posts %}
|
||||
<center>No posts were found.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_filtered %}
|
||||
<center>(All content on this page has been filtered)</center>
|
||||
<span class="listing_warn">(All content on this page has been filtered)</span>
|
||||
{% else if is_filtered %}
|
||||
<center>(Content from r/{{ sub }} has been filtered)</center>
|
||||
<span class="listing_warn">(Content from r/{{ sub }} has been filtered)</span>
|
||||
{% else if params.typed != "sr_user" %}
|
||||
{% for post in posts %}
|
||||
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
|
||||
|
@ -66,19 +66,91 @@
|
||||
</div>
|
||||
<div id="use_hls">
|
||||
<label for="use_hls">Use HLS for videos
|
||||
<fieldset>
|
||||
<legend>Appearance</legend>
|
||||
<div class="prefs-group">
|
||||
<label for="theme">Theme:</label>
|
||||
<select name="theme" id="theme">
|
||||
{% call utils::options(prefs.theme, prefs.available_themes, "system") %}
|
||||
</select>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<legend>Interface</legend>
|
||||
<div class="prefs-group">
|
||||
<label for="front_page">Front page:</label>
|
||||
<select name="front_page" id="front_page">
|
||||
{% call utils::options(prefs.front_page, ["default", "popular", "all"], "default") %}
|
||||
</select>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="layout">Layout:</label>
|
||||
<select name="layout" id="layout">
|
||||
{% call utils::options(prefs.layout, ["card", "clean", "compact"], "card") %}
|
||||
</select>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="wide">Wide UI:</label>
|
||||
<input type="hidden" value="off" name="wide">
|
||||
<input type="checkbox" name="wide" id="wide" {% if prefs.wide == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<legend>Content</legend>
|
||||
<div class="prefs-group">
|
||||
<label for="post_sort" title="Applies only to subreddit feeds">Default subreddit post sort:</label>
|
||||
<select name="post_sort">
|
||||
{% call utils::options(prefs.post_sort, ["hot", "new", "top", "rising", "controversial"], "hot") %}
|
||||
</select>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="comment_sort">Default comment sort:</label>
|
||||
<select name="comment_sort" id="comment_sort">
|
||||
{% call utils::options(prefs.comment_sort, ["confidence", "top", "new", "controversial", "old"], "confidence") %}
|
||||
</select>
|
||||
</div>
|
||||
{% if !crate::utils::sfw_only() %}
|
||||
<div class="prefs-group">
|
||||
<label for="show_nsfw">Show NSFW posts:</label>
|
||||
<input type="hidden" value="off" name="show_nsfw">
|
||||
<input type="checkbox" name="show_nsfw" id="show_nsfw" {% if prefs.show_nsfw == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="blur_nsfw">Blur NSFW previews:</label>
|
||||
<input type="hidden" value="off" name="blur_nsfw">
|
||||
<input type="checkbox" name="blur_nsfw" id="blur_nsfw" {% if prefs.blur_nsfw == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="prefs-group">
|
||||
<label for="autoplay_videos">Autoplay videos</label>
|
||||
<input type="hidden" value="off" name="autoplay_videos">
|
||||
<input type="checkbox" name="autoplay_videos" id="autoplay_videos" {% if prefs.autoplay_videos == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="fixed_navbar">Keep navbar fixed</label>
|
||||
<input type="hidden" value="off" name="fixed_navbar">
|
||||
<input type="checkbox" name="fixed_navbar" {% if prefs.fixed_navbar == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="use_hls">Use HLS for videos</label>
|
||||
<details id="feeds">
|
||||
<summary>Why?</summary>
|
||||
<div id="feed_list" class="helper">Reddit videos require JavaScript (via HLS.js) to be enabled to be played with audio. Therefore, this toggle lets you either use Libreddit JS-free or utilize this feature.</div>
|
||||
</details>
|
||||
</label>
|
||||
<input type="hidden" value="off" name="use_hls">
|
||||
<input type="checkbox" name="use_hls" {% if prefs.use_hls == "on" %}checked{% endif %}>
|
||||
<input type="checkbox" name="use_hls" id="use_hls" {% if prefs.use_hls == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div id="hide_hls_notification">
|
||||
<div class="prefs-group">
|
||||
<label for="hide_hls_notification">Hide notification about possible HLS usage</label>
|
||||
<input type="hidden" value="off" name="hide_hls_notification">
|
||||
<input type="checkbox" name="hide_hls_notification" {% if prefs.hide_hls_notification == "on" %}checked{% endif %}>
|
||||
<input type="checkbox" name="hide_hls_notification" id="hide_hls_notification" {% if prefs.hide_hls_notification == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="hide_awards">Hide awards</label>
|
||||
<input type="hidden" value="off" name="hide_awards">
|
||||
<input type="checkbox" name="hide_awards" id="hide_awards" {% if prefs.hide_awards == "on" %}checked{% endif %}>
|
||||
</div>
|
||||
</fieldset>
|
||||
<input id="save" type="submit" value="Save">
|
||||
</div>
|
||||
</form>
|
||||
@ -115,7 +187,7 @@
|
||||
|
||||
<div id="settings_note">
|
||||
<p><b>Note:</b> settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.</p><br>
|
||||
<p>You can restore your current settings and subscriptions after clearing your cookies using <a href="/settings/restore/?theme={{ prefs.theme }}&front_page={{ prefs.front_page }}&layout={{ prefs.layout }}&wide={{ prefs.wide }}&post_sort={{ prefs.post_sort }}&comment_sort={{ prefs.comment_sort }}&show_nsfw={{ prefs.show_nsfw }}&use_hls={{ prefs.use_hls }}&hide_hls_notification={{ prefs.hide_hls_notification }}&fixed_navbar={{ prefs.fixed_navbar }}&subscriptions={{ prefs.subscriptions.join("%2B") }}&filters={{ prefs.filters.join("%2B") }}">this link</a>.</p>
|
||||
<p>You can restore your current settings and subscriptions after clearing your cookies using <a href="/settings/restore/?theme={{ prefs.theme }}&front_page={{ prefs.front_page }}&layout={{ prefs.layout }}&wide={{ prefs.wide }}&post_sort={{ prefs.post_sort }}&comment_sort={{ prefs.comment_sort }}&show_nsfw={{ prefs.show_nsfw }}&use_hls={{ prefs.use_hls }}&hide_hls_notification={{ prefs.hide_hls_notification }}&hide_awards={{ prefs.hide_awards }}&fixed_navbar={{ prefs.fixed_navbar }}&subscriptions={{ prefs.subscriptions.join("%2B") }}&filters={{ prefs.filters.join("%2B") }}">this link</a>.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
@ -46,6 +46,14 @@
|
||||
</form>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_hidden_nsfw %}
|
||||
<center>All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if no_posts %}
|
||||
<center>No posts were found.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_filtered %}
|
||||
<center>(All content on this page has been filtered)</center>
|
||||
{% else %}
|
||||
|
@ -32,6 +32,14 @@
|
||||
</button>
|
||||
</form>
|
||||
|
||||
{% if all_posts_hidden_nsfw %}
|
||||
<center>All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if no_posts %}
|
||||
<center>No posts were found.</center>
|
||||
{% endif %}
|
||||
|
||||
{% if all_posts_filtered %}
|
||||
<center>(All content on this page has been filtered)</center>
|
||||
{% else %}
|
||||
|
@ -61,6 +61,109 @@
|
||||
{% endif %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro post(post) -%}
|
||||
<!-- POST CONTENT -->
|
||||
<div class="post highlighted">
|
||||
<p class="post_header">
|
||||
<a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
||||
<span class="dot">•</span>
|
||||
<a class="post_author {{ post.author.distinguished }}" href="/user/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
{% if post.author.flair.flair_parts.len() > 0 %}
|
||||
<small class="author_flair">{% call render_flair(post.author.flair.flair_parts) %}</small>
|
||||
{% endif %}
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
{% if !post.awards.is_empty() && prefs.hide_awards != "on" %}
|
||||
<span class="dot">•</span>
|
||||
<span class="awards">
|
||||
{% for award in post.awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
{{ award.count }}
|
||||
</span>
|
||||
{% endfor %}
|
||||
</span>
|
||||
{% endif %}
|
||||
</p>
|
||||
<h1 class="post_title">
|
||||
{{ post.title }}
|
||||
{% if post.flair.flair_parts.len() > 0 %}
|
||||
<a href="/r/{{ post.community }}/search?q=flair_name%3A%22{{ post.flair.text }}%22&restrict_sr=on"
|
||||
class="post_flair"
|
||||
style="color:{{ post.flair.foreground_color }}; background:{{ post.flair.background_color }};">{% call render_flair(post.flair.flair_parts) %}</a>
|
||||
{% endif %}
|
||||
{% if post.flags.nsfw %} <small class="nsfw">NSFW</small>{% endif %}
|
||||
</h1>
|
||||
|
||||
<!-- POST MEDIA -->
|
||||
<!-- post_type: {{ post.post_type }} -->
|
||||
{% if post.post_type == "image" %}
|
||||
<div class="post_media_content">
|
||||
<a href="{{ post.media.url }}" class="post_media_image" >
|
||||
<svg
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.media.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Post image" src="{{ post.media.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
</div>
|
||||
{% else if post.post_type == "video" || post.post_type == "gif" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls>
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
</div>
|
||||
<script src="/playHLSVideo.js"></script>
|
||||
{% else %}
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video" src="{{ post.media.url }}" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %} loop><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% call render_hls_notification(post.permalink[1..]) %}
|
||||
{% endif %}
|
||||
{% else if post.post_type == "gallery" %}
|
||||
<div class="gallery">
|
||||
{% for image in post.gallery -%}
|
||||
<figure>
|
||||
<a href="{{ image.url }}" ><img loading="lazy" alt="Gallery image" src="{{ image.url }}"/></a>
|
||||
<figcaption>
|
||||
<p>{{ image.caption }}</p>
|
||||
{% if image.outbound_url.len() > 0 %}
|
||||
<p><a class="outbound_url" href="{{ image.outbound_url }}" rel="nofollow">{{ image.outbound_url }}</a>
|
||||
{% endif %}
|
||||
</figcaption>
|
||||
</figure>
|
||||
{%- endfor %}
|
||||
</div>
|
||||
{% else if post.post_type == "link" %}
|
||||
<a id="post_url" href="{{ post.media.url }}" rel="nofollow">{{ post.media.url }}</a>
|
||||
{% endif %}
|
||||
|
||||
<!-- POST BODY -->
|
||||
<div class="post_body">{{ post.body|safe }}</div>
|
||||
<div class="post_score" title="{{ post.score.1 }}">{{ post.score.0 }}<span class="label"> Upvotes</span></div>
|
||||
<div class="post_footer">
|
||||
<ul id="post_links">
|
||||
<li class="desktop_item"><a href="{{ post.permalink }}">permalink</a></li>
|
||||
<li class="mobile_item"><a href="{{ post.permalink }}">link</a></li>
|
||||
{% if post.num_duplicates > 0 %}
|
||||
<li class="desktop_item"><a href="/r/{{ post.community }}/duplicates/{{ post.id }}">duplicates</a></li>
|
||||
<li class="mobile_item"><a href="/r/{{ post.community }}/duplicates/{{ post.id }}">dupes</a></li>
|
||||
{% endif %}
|
||||
<li class="desktop_item"><a href="https://reddit.com{{ post.permalink }}" rel="nofollow">reddit</a></li>
|
||||
<li class="mobile_item"><a href="https://reddit.com{{ post.permalink }}" rel="nofollow">reddit</a></li>
|
||||
</ul>
|
||||
<p>{{ post.upvote_ratio }}%<span id="upvoted"> Upvoted</span></p>
|
||||
</div>
|
||||
</div>
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro post_in_list(post) -%}
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %}" id="{{ post.id }}">
|
||||
<p class="post_header">
|
||||
@ -75,7 +178,7 @@
|
||||
<a class="post_author {{ post.author.distinguished }}" href="/u/{{ post.author.name }}">u/{{ post.author.name }}</a>
|
||||
<span class="dot">•</span>
|
||||
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||
{% if !post.awards.is_empty() %}
|
||||
{% if !post.awards.is_empty() && prefs.hide_awards != "on" %}
|
||||
{% for award in post.awards.clone() %}
|
||||
<span class="award" title="{{ award.name }}">
|
||||
<img alt="{{ award.name }}" src="{{ award.icon_url }}" width="16" height="16"/>
|
||||
@ -94,8 +197,10 @@
|
||||
</h2>
|
||||
<!-- POST MEDIA/THUMBNAIL -->
|
||||
{% if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "image" %}
|
||||
<div class="post_media_content">
|
||||
<a href="{{ post.media.url }}" class="post_media_image {% if post.media.height / post.media.width < 2 %}short{% endif %}" >
|
||||
<svg
|
||||
{%if post.flags.nsfw && prefs.blur_nsfw=="on" %}class="post_nsfw_blur"{% endif %}
|
||||
width="{{ post.media.width }}px"
|
||||
height="{{ post.media.height }}px"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
@ -105,16 +210,23 @@
|
||||
</desc>
|
||||
</svg>
|
||||
</a>
|
||||
</div>
|
||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
||||
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" controls preload="none">
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" controls preload="none">
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
</div>
|
||||
{% else %}
|
||||
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% call render_hls_notification(format!("{}%23{}", &self.url[1..].replace("&", "%26").replace("+", "%2B"), post.id)) %}
|
||||
{% endif %}
|
||||
{% else if post.post_type != "self" %}
|
||||
@ -125,12 +237,14 @@
|
||||
<path d="M35,15h-15a10,10 0,0,0 0,20h25a10,10 0,0,0 10,-10m-12.5,0a10, 10 0,0,1 10, -10h25a10,10 0,0,1 0,20h-15" fill="none" stroke-width="5" stroke-linecap="round"/>
|
||||
</svg>
|
||||
{% else %}
|
||||
<svg width="{{ post.thumbnail.width }}px" height="{{ post.thumbnail.height }}px" xmlns="http://www.w3.org/2000/svg">
|
||||
<div style="max-width:{{ post.thumbnail.width }}px;max-height:{{ post.thumbnail.height }}px;">
|
||||
<svg {% if post.flags.nsfw && prefs.blur_nsfw=="on" %} class="thumb_nsfw_blur" {% endif %} width="{{ post.thumbnail.width }}px" height="{{ post.thumbnail.height }}px" xmlns="http://www.w3.org/2000/svg">
|
||||
<image width="100%" height="100%" href="{{ post.thumbnail.url }}"/>
|
||||
<desc>
|
||||
<img loading="lazy" alt="Thumbnail" src="{{ post.thumbnail.url }}"/>
|
||||
</desc>
|
||||
</svg>
|
||||
</div>
|
||||
{% endif %}
|
||||
<span>{% if post.post_type == "link" %}{{ post.domain }}{% else %}{{ post.post_type }}{% endif %}</span>
|
||||
</a>
|
||||
@ -141,7 +255,7 @@
|
||||
{{ post.body|safe }}
|
||||
</div>
|
||||
<div class="post_footer">
|
||||
<a href="{{ post.permalink }}" class="post_comments" title="{{ post.comments.1 }} comments">{{ post.comments.0 }} comments</a>
|
||||
<a href="{{ post.permalink }}" class="post_comments" title="{{ post.comments.1 }} {% if post.comments.1 == "1" %}comment{% else %}comments{% endif %}">{{ post.comments.0 }} {% if post.comments.1 == "1" %}comment{% else %}comments{% endif %}</a>
|
||||
</div>
|
||||
</div>
|
||||
{%- endmacro %}
|
||||
|
Loading…
Reference in New Issue
Block a user