Merge remote-tracking branch 'upstream/main'

This commit is contained in:
ayaka 2025-05-24 01:01:17 +12:00
commit 9001a92273
31 changed files with 1434 additions and 515 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "Rust", "name": "Rust",
"image": "mcr.microsoft.com/devcontainers/rust:0-1-bullseye", "image": "mcr.microsoft.com/devcontainers/rust:1.0.9-bookworm",
"features": { "features": {
"ghcr.io/devcontainers/features/docker-in-docker:2": {} "ghcr.io/devcontainers/features/docker-in-docker:2": {}
}, },

618
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@ name = "redsunlib"
description = " Alternative private front-end to Reddit" description = " Alternative private front-end to Reddit"
license = "AGPL-3.0-only" license = "AGPL-3.0-only"
repository = "https://git.stardust.wtf/iridium/redsunlib" repository = "https://git.stardust.wtf/iridium/redsunlib"
version = "0.35.4" version = "0.36.0"
authors = [ authors = [
"Matthew Esposito <matt+cargo@matthew.science>", "Matthew Esposito <matt+cargo@matthew.science>",
"spikecodes <19519553+spikecodes@users.noreply.github.com>", "spikecodes <19519553+spikecodes@users.noreply.github.com>",
@ -27,7 +27,7 @@ hyper = { version = "0.14.31", features = ["full"] }
percent-encoding = "2.3.1" percent-encoding = "2.3.1"
route-recognizer = "0.3.1" route-recognizer = "0.3.1"
serde_json = "1.0.133" serde_json = "1.0.133"
tokio = { version = "1.35.1", features = ["full"] } tokio = { version = "1.44.2", features = ["full"] }
time = { version = "0.3.31", features = ["local-offset"] } time = { version = "0.3.31", features = ["local-offset"] }
url = "2.5.0" url = "2.5.0"
rust-embed = { version = "8.1.0", features = ["include-exclude"] } rust-embed = { version = "8.1.0", features = ["include-exclude"] }
@ -47,8 +47,15 @@ rss = "2.0.7"
arc-swap = "1.7.1" arc-swap = "1.7.1"
serde_json_path = "0.7.1" serde_json_path = "0.7.1"
async-recursion = "1.1.1" async-recursion = "1.1.1"
common-words-all = { version = "0.0.2", default-features = false, features = ["english", "one"] } pulldown-cmark = { version = "0.12.0", features = ["simd", "html"], default-features = false }
hyper-rustls = { version = "0.24.2", features = [ "http2" ] } hyper-rustls = { version = "0.24.2", features = [ "http2" ] }
tegen = "0.1.4"
serde_urlencoded = "0.7.1"
chrono = { version = "0.4.39", default-features = false, features = [ "std" ] }
htmlescape = "0.3.1"
bincode = "1.3.3"
base2048 = "2.0.2"
revision = "0.10.0"
[dev-dependencies] [dev-dependencies]

45
Dockerfile.alpine Normal file
View File

@ -0,0 +1,45 @@
# supported versions here: https://hub.docker.com/_/rust
ARG ALPINE_VERSION=3.20
########################
## builder image
########################
FROM rust:alpine${ALPINE_VERSION} AS builder
RUN apk add --no-cache musl-dev
WORKDIR /redlib
# download (most) dependencies in their own layer
COPY Cargo.lock Cargo.toml ./
RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs
RUN cargo build --release --locked --bin redlib
RUN rm ./src/main.rs && rmdir ./src
# copy the source and build the redlib binary
COPY . ./
RUN cargo build --release --locked --bin redlib
RUN echo "finished building redlib!"
########################
## release image
########################
FROM alpine:${ALPINE_VERSION} AS release
# Import redlib binary from builder
COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib
# Add non-root user for running redlib
RUN adduser --home /nonexistent --no-create-home --disabled-password redlib
USER redlib
# Document that we intend to expose port 8080 to whoever runs the container
EXPOSE 8080
# Run a healthcheck every minute to make sure redlib is functional
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
# Add container metadata
LABEL org.opencontainers.image.authors="sigaloid"
CMD ["redlib"]

51
Dockerfile.ubuntu Normal file
View File

@ -0,0 +1,51 @@
# supported versions here: https://hub.docker.com/_/rust
ARG RUST_BUILDER_VERSION=slim-bookworm
ARG UBUNTU_RELEASE_VERSION=noble
########################
## builder image
########################
FROM rust:${RUST_BUILDER_VERSION} AS builder
WORKDIR /redlib
# download (most) dependencies in their own layer
COPY Cargo.lock Cargo.toml ./
RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs
RUN cargo build --release --locked --bin redlib
RUN rm ./src/main.rs && rmdir ./src
# copy the source and build the redlib binary
COPY . ./
RUN cargo build --release --locked --bin redlib
RUN echo "finished building redlib!"
########################
## release image
########################
FROM ubuntu:${UBUNTU_RELEASE_VERSION} AS release
# Install ca-certificates
RUN apt-get update && apt-get install -y ca-certificates
# Import redlib binary from builder
COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib
# Add non-root user for running redlib
RUN useradd \
--no-create-home \
--password "!" \
--comment "user for running redlib" \
redlib
USER redlib
# Document that we intend to expose port 8080 to whoever runs the container
EXPOSE 8080
# Run a healthcheck every minute to make sure redlib is functional
HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1
# Add container metadata
LABEL org.opencontainers.image.authors="sigaloid"
CMD ["redlib"]

View File

@ -300,6 +300,17 @@ REDLIB_DEFAULT_USE_HLS = "on"
> >
> If using Docker Compose, no changes are needed as the `.env` file is already referenced in `compose.yaml` via the `env_file: .env` line. > If using Docker Compose, no changes are needed as the `.env` file is already referenced in `compose.yaml` via the `env_file: .env` line.
## Command Line Flags
Redlib supports the following command line flags:
- `-4`, `--ipv4-only`: Listen on IPv4 only.
- `-6`, `--ipv6-only`: Listen on IPv6 only.
- `-r`, `--redirect-https`: Redirect all HTTP requests to HTTPS (no longer functional).
- `-a`, `--address <ADDRESS>`: Sets address to listen on. Default is `[::]`.
- `-p`, `--port <PORT>`: Port to listen on. Default is `8080`.
- `-H`, `--hsts <EXPIRE_TIME>`: HSTS header to tell browsers that this site should only be accessed over HTTPS. Default is `604800`.
## Instance settings ## Instance settings
Assign a default value for each instance-specific setting by passing environment variables to Redlib in the format `REDLIB_{X}`. Replace `{X}` with the setting name (see list below) in capital letters. Assign a default value for each instance-specific setting by passing environment variables to Redlib in the format `REDLIB_{X}`. Replace `{X}` with the setting name (see list below) in capital letters.
@ -340,3 +351,4 @@ Assign a default value for each user-modifiable setting by passing environment v
| `HIDE_SIDEBAR_AND_SUMMARY` | `["on", "off"]` | `off` | | `HIDE_SIDEBAR_AND_SUMMARY` | `["on", "off"]` | `off` |
| `HIDE_BANNER` | `["on", "off"]` | `off` | | `HIDE_BANNER` | `["on", "off"]` | `off` |
| `FIXED_NAVBAR` | `["on", "off"]` | `on` | | `FIXED_NAVBAR` | `["on", "off"]` | `on` |
| `REMOVE_DEFAULT_FEEDS` | `["on", "off"]` | `off` |

View File

@ -82,6 +82,9 @@
}, },
"REDLIB_FULL_URL": { "REDLIB_FULL_URL": {
"required": false "required": false
},
"REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS": {
"required": false
} }
} }
} }

32
flake.lock generated
View File

@ -1,17 +1,12 @@
{ {
"nodes": { "nodes": {
"crane": { "crane": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": { "locked": {
"lastModified": 1717025063, "lastModified": 1731974733,
"narHash": "sha256-dIubLa56W9sNNz0e8jGxrX3CAkPXsq7snuFA/Ie6dn8=", "narHash": "sha256-enYSSZVVl15FI5p+0Y5/Ckf5DZAvXe6fBrHxyhA/njc=",
"owner": "ipetkov", "owner": "ipetkov",
"repo": "crane", "repo": "crane",
"rev": "480dff0be03dac0e51a8dfc26e882b0d123a450e", "rev": "3cb338ce81076ce5e461cf77f7824476addb0e1c",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -25,11 +20,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1710146030, "lastModified": 1731533236,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -40,11 +35,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1717112898, "lastModified": 1731890469,
"narHash": "sha256-7R2ZvOnvd9h8fDd65p0JnB7wXfUvreox3xFdYWd1BnY=", "narHash": "sha256-D1FNZ70NmQEwNxpSSdTXCSklBH1z2isPR84J6DQrJGs=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "6132b0f6e344ce2fe34fc051b72fb46e34f668e0", "rev": "5083ec887760adfe12af64830a66807423a859a7",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -64,19 +59,16 @@
}, },
"rust-overlay": { "rust-overlay": {
"inputs": { "inputs": {
"flake-utils": [
"flake-utils"
],
"nixpkgs": [ "nixpkgs": [
"nixpkgs" "nixpkgs"
] ]
}, },
"locked": { "locked": {
"lastModified": 1717121863, "lastModified": 1732069891,
"narHash": "sha256-/3sxIe7MZqF/jw1RTQCSmgTjwVod43mmrk84m50MJQ4=", "narHash": "sha256-moKx8AVJrViCSdA0e0nSsG8b1dAsObI4sRAtbqbvBY8=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "2a7b53172ed08f856b8382d7dcfd36a4e0cbd866", "rev": "8509a51241c407d583b1963d5079585a992506e8",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@ -4,19 +4,13 @@
inputs = { inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
crane = { crane.url = "github:ipetkov/crane";
url = "github:ipetkov/crane";
inputs.nixpkgs.follows = "nixpkgs";
};
flake-utils.url = "github:numtide/flake-utils"; flake-utils.url = "github:numtide/flake-utils";
rust-overlay = { rust-overlay = {
url = "github:oxalica/rust-overlay"; url = "github:oxalica/rust-overlay";
inputs = { inputs.nixpkgs.follows = "nixpkgs";
nixpkgs.follows = "nixpkgs";
flake-utils.follows = "flake-utils";
};
}; };
}; };

View File

@ -24,7 +24,7 @@ echo "// Please do not edit manually" >> "$filename"
echo "// Filled in with real app versions" >> "$filename" echo "// Filled in with real app versions" >> "$filename"
# Open the array in the source file # Open the array in the source file
echo "pub static _IOS_APP_VERSION_LIST: &[&str; $ios_app_count] = &[" >> "$filename" echo "pub const _IOS_APP_VERSION_LIST: &[&str; $ios_app_count] = &[" >> "$filename"
num=0 num=0
@ -39,12 +39,12 @@ done
echo "];" >> "$filename" echo "];" >> "$filename"
# Fetch Android app versions # Fetch Android app versions
page_1=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions/" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq) page_1=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions/" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g')
# Append with pages # Append with pages
page_2=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=2" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq) page_2=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=2" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g')
page_3=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=3" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq) page_3=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=3" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g')
page_4=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=4" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq) page_4=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=4" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g')
page_5=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=5" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq) page_5=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=5" | rg "<a class=\"ver-item\" href=\"(/reddit/com\.reddit\.frontpage/download/phone-20\d{2}\.\d+\.\d+-apk)\" rel=\"nofollow\">" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g')
# Concatenate all pages # Concatenate all pages
versions="${page_1}" versions="${page_1}"
@ -63,7 +63,7 @@ android_count=$(echo "$versions" | wc -l)
echo -e "Fetching \e[32m$android_count Android app versions...\e[0m" echo -e "Fetching \e[32m$android_count Android app versions...\e[0m"
# Append to the source file # Append to the source file
echo "pub static ANDROID_APP_VERSION_LIST: &[&str; $android_count] = &[" >> "$filename" echo "pub const ANDROID_APP_VERSION_LIST: &[&str; $android_count] = &[" >> "$filename"
num=0 num=0
@ -89,7 +89,7 @@ ios_count=$(echo "$table" | wc -l)
echo -e "Fetching \e[34m$ios_count iOS versions...\e[0m" echo -e "Fetching \e[34m$ios_count iOS versions...\e[0m"
# Append to the source file # Append to the source file
echo "pub static _IOS_OS_VERSION_LIST: &[&str; $ios_count] = &[" >> "$filename" echo "pub const _IOS_OS_VERSION_LIST: &[&str; $ios_count] = &[" >> "$filename"
num=0 num=0

View File

@ -19,7 +19,7 @@ use std::{io, result::Result};
use crate::dbg_msg; use crate::dbg_msg;
use crate::oauth::{force_refresh_token, token_daemon, Oauth}; use crate::oauth::{force_refresh_token, token_daemon, Oauth};
use crate::server::RequestExt; use crate::server::RequestExt;
use crate::utils::format_url; use crate::utils::{format_url, Post};
const REDDIT_URL_BASE: &str = "https://oauth.reddit.com"; const REDDIT_URL_BASE: &str = "https://oauth.reddit.com";
const REDDIT_URL_BASE_HOST: &str = "oauth.reddit.com"; const REDDIT_URL_BASE_HOST: &str = "oauth.reddit.com";
@ -45,7 +45,7 @@ pub static OAUTH_RATELIMIT_REMAINING: AtomicU16 = AtomicU16::new(99);
pub static OAUTH_IS_ROLLING_OVER: AtomicBool = AtomicBool::new(false); pub static OAUTH_IS_ROLLING_OVER: AtomicBool = AtomicBool::new(false);
static URL_PAIRS: [(&str, &str); 2] = [ const URL_PAIRS: [(&str, &str); 2] = [
(ALTERNATIVE_REDDIT_URL_BASE, ALTERNATIVE_REDDIT_URL_BASE_HOST), (ALTERNATIVE_REDDIT_URL_BASE, ALTERNATIVE_REDDIT_URL_BASE_HOST),
(REDDIT_SHORT_URL_BASE, REDDIT_SHORT_URL_BASE_HOST), (REDDIT_SHORT_URL_BASE, REDDIT_SHORT_URL_BASE_HOST),
]; ];
@ -218,40 +218,28 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
// Construct the hyper client from the HTTPS connector. // Construct the hyper client from the HTTPS connector.
let client: &Lazy<Client<_, Body>> = &CLIENT; let client: &Lazy<Client<_, Body>> = &CLIENT;
let (token, vendor_id, device_id, user_agent, loid) = {
let client = OAUTH_CLIENT.load_full();
(
client.token.clone(),
client.headers_map.get("Client-Vendor-Id").cloned().unwrap_or_default(),
client.headers_map.get("X-Reddit-Device-Id").cloned().unwrap_or_default(),
client.headers_map.get("User-Agent").cloned().unwrap_or_default(),
client.headers_map.get("x-reddit-loid").cloned().unwrap_or_default(),
)
};
// Build request to Reddit. When making a GET, request gzip compression. // Build request to Reddit. When making a GET, request gzip compression.
// (Reddit doesn't do brotli yet.) // (Reddit doesn't do brotli yet.)
let mut headers = vec![ let mut headers: Vec<(String, String)> = vec![
("User-Agent", user_agent), ("Host".into(), host.into()),
("Client-Vendor-Id", vendor_id), ("Accept-Encoding".into(), if method == Method::GET { "gzip".into() } else { "identity".into() }),
("X-Reddit-Device-Id", device_id),
("x-reddit-loid", loid),
("Host", host.to_string()),
("Authorization", format!("Bearer {token}")),
("Accept-Encoding", if method == Method::GET { "gzip".into() } else { "identity".into() }),
( (
"Cookie", "Cookie".into(),
if quarantine { if quarantine {
"_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D".into() "_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D".into()
} else { } else {
"".into() "".into()
}, },
), ),
("X-Reddit-Width", fastrand::u32(300..500).to_string()),
("X-Reddit-DPR", "2".to_owned()),
("Device-Name", format!("Android {}", fastrand::u8(9..=14))),
]; ];
{
let client = OAUTH_CLIENT.load_full();
for (key, value) in client.headers_map.clone() {
headers.push((key, value));
}
}
// shuffle headers: https://github.com/redlib-org/redlib/issues/324 // shuffle headers: https://github.com/redlib-org/redlib/issues/324
fastrand::shuffle(&mut headers); fastrand::shuffle(&mut headers);
@ -274,7 +262,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
return Ok(response); return Ok(response);
}; };
let location_header = response.headers().get(header::LOCATION); let location_header = response.headers().get(header::LOCATION);
if location_header == Some(&HeaderValue::from_static("https://www.reddit.com/")) { if location_header == Some(&HeaderValue::from_static(ALTERNATIVE_REDDIT_URL_BASE)) {
return Err("Reddit response was invalid".to_string()); return Err("Reddit response was invalid".to_string());
} }
return request( return request(
@ -400,6 +388,12 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
"Ratelimit remaining: Header says {remaining}, we have {current_rate_limit}. Resets in {reset}. Rollover: {}. Ratelimit used: {used}", "Ratelimit remaining: Header says {remaining}, we have {current_rate_limit}. Resets in {reset}. Rollover: {}. Ratelimit used: {used}",
if is_rolling_over { "yes" } else { "no" }, if is_rolling_over { "yes" } else { "no" },
); );
// If can parse remaining as a float, round to a u16 and save
if let Ok(val) = remaining.parse::<f32>() {
OAUTH_RATELIMIT_REMAINING.store(val.round() as u16, Ordering::SeqCst);
}
Some(reset) Some(reset)
} else { } else {
None None
@ -484,8 +478,57 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
} }
} }
async fn self_check(sub: &str) -> Result<(), String> {
let query = format!("/r/{sub}/hot.json?&raw_json=1");
match Post::fetch(&query, true).await {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
}
pub async fn rate_limit_check() -> Result<(), String> {
// First, check a subreddit.
self_check("reddit").await?;
// This will reduce the rate limit to 99. Assert this check.
if OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst) != 99 {
return Err(format!("Rate limit check failed: expected 99, got {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst)));
}
// Now, we switch out the OAuth client.
// This checks for the IP rate limit association.
force_refresh_token().await;
// Now, check a new sub to break cache.
self_check("rust").await?;
// Again, assert the rate limit check.
if OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst) != 99 {
return Err(format!("Rate limit check failed: expected 99, got {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst)));
}
Ok(())
}
#[cfg(test)] #[cfg(test)]
static POPULAR_URL: &str = "/r/popular/hot.json?&raw_json=1&geo_filter=GLOBAL"; use {crate::config::get_setting, sealed_test::prelude::*};
#[tokio::test(flavor = "multi_thread")]
async fn test_rate_limit_check() {
rate_limit_check().await.unwrap();
}
#[test]
#[sealed_test(env = [("REDLIB_DEFAULT_SUBSCRIPTIONS", "rust")])]
fn test_default_subscriptions() {
tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap().block_on(async {
let subscriptions = get_setting("REDLIB_DEFAULT_SUBSCRIPTIONS");
assert!(subscriptions.is_some());
// check rate limit
rate_limit_check().await.unwrap();
});
}
#[cfg(test)]
const POPULAR_URL: &str = "/r/popular/hot.json?&raw_json=1&geo_filter=GLOBAL";
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_localization_popular() { async fn test_localization_popular() {
@ -501,12 +544,6 @@ async fn test_obfuscated_share_link() {
assert_eq!(canonical_path(share_link, 3).await, Ok(Some(canonical_link))); assert_eq!(canonical_path(share_link, 3).await, Ok(Some(canonical_link)));
} }
#[tokio::test(flavor = "multi_thread")]
async fn test_share_link_strip_json() {
let link = "/17krzvz".into();
let canonical_link = "/comments/17krzvz".into();
assert_eq!(canonical_path(link, 3).await, Ok(Some(canonical_link)));
}
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_private_sub() { async fn test_private_sub() {
let link = json("/r/suicide/about.json?raw_json=1".into(), true).await; let link = json("/r/suicide/about.json?raw_json=1".into(), true).await;

View File

@ -125,6 +125,9 @@ pub struct Config {
#[serde(rename = "REDLIB_FULL_URL")] #[serde(rename = "REDLIB_FULL_URL")]
pub(crate) full_url: Option<String>, pub(crate) full_url: Option<String>,
#[serde(rename = "REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS")]
pub(crate) default_remove_default_feeds: Option<String>,
} }
impl Config { impl Config {
@ -176,6 +179,7 @@ impl Config {
pushshift: parse("REDLIB_PUSHSHIFT_FRONTEND"), pushshift: parse("REDLIB_PUSHSHIFT_FRONTEND"),
enable_rss: parse("REDLIB_ENABLE_RSS"), enable_rss: parse("REDLIB_ENABLE_RSS"),
full_url: parse("REDLIB_FULL_URL"), full_url: parse("REDLIB_FULL_URL"),
default_remove_default_feeds: parse("REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS"),
} }
} }
} }
@ -209,6 +213,7 @@ fn get_setting_from_config(name: &str, config: &Config) -> Option<String> {
"REDLIB_PUSHSHIFT_FRONTEND" => config.pushshift.clone(), "REDLIB_PUSHSHIFT_FRONTEND" => config.pushshift.clone(),
"REDLIB_ENABLE_RSS" => config.enable_rss.clone(), "REDLIB_ENABLE_RSS" => config.enable_rss.clone(),
"REDLIB_FULL_URL" => config.full_url.clone(), "REDLIB_FULL_URL" => config.full_url.clone(),
"REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS" => config.default_remove_default_feeds.clone(),
_ => None, _ => None,
} }
} }

View File

@ -128,6 +128,7 @@ impl InstanceInfo {
["Pushshift frontend", &convert(&self.config.pushshift)], ["Pushshift frontend", &convert(&self.config.pushshift)],
["RSS enabled", &convert(&self.config.enable_rss)], ["RSS enabled", &convert(&self.config.enable_rss)],
["Full URL", &convert(&self.config.full_url)], ["Full URL", &convert(&self.config.full_url)],
["Remove default feeds", &convert(&self.config.default_remove_default_feeds)],
//TODO: fallback to crate::config::DEFAULT_PUSHSHIFT_FRONTEND //TODO: fallback to crate::config::DEFAULT_PUSHSHIFT_FRONTEND
]) ])
.with_header_row(["Settings"]), .with_header_row(["Settings"]),
@ -172,6 +173,7 @@ impl InstanceInfo {
Pushshift frontend: {:?}\n Pushshift frontend: {:?}\n
RSS enabled: {:?}\n RSS enabled: {:?}\n
Full URL: {:?}\n Full URL: {:?}\n
Remove default feeds: {:?}\n
Config:\n Config:\n
Banner: {:?}\n Banner: {:?}\n
Hide awards: {:?}\n Hide awards: {:?}\n
@ -201,6 +203,7 @@ impl InstanceInfo {
self.config.sfw_only, self.config.sfw_only,
self.config.enable_rss, self.config.enable_rss,
self.config.full_url, self.config.full_url,
self.config.default_remove_default_feeds,
self.config.pushshift, self.config.pushshift,
self.config.banner, self.config.banner,
self.config.default_hide_awards, self.config.default_hide_awards,

View File

@ -9,9 +9,9 @@ use std::str::FromStr;
use futures_lite::FutureExt; use futures_lite::FutureExt;
use hyper::Uri; use hyper::Uri;
use hyper::{header::HeaderValue, Body, Request, Response}; use hyper::{header::HeaderValue, Body, Request, Response};
use log::info; use log::{info, warn};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use redsunlib::client::{canonical_path, proxy, CLIENT}; use redsunlib::client::{canonical_path, proxy, rate_limit_check, CLIENT};
use redsunlib::server::{self, RequestExt}; use redsunlib::server::{self, RequestExt};
use redsunlib::utils::{error, redirect, MascotAssets, ThemeAssets}; use redsunlib::utils::{error, redirect, MascotAssets, ThemeAssets};
use redsunlib::{config, duplicates, headers, instance_info, post, search, settings, subreddit, user}; use redsunlib::{config, duplicates, headers, instance_info, post, search, settings, subreddit, user};
@ -75,6 +75,17 @@ async fn ffmpeg() -> Result<Response<Body>, String> {
) )
} }
async fn opensearch() -> Result<Response<Body>, String> {
Ok(
Response::builder()
.status(200)
.header("content-type", "application/opensearchdescription+xml")
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
.body(include_bytes!("../static/opensearch.xml").as_ref().into())
.unwrap_or_default(),
)
}
async fn resource(body: &str, content_type: &str, cache: bool) -> Result<Response<Body>, String> { async fn resource(body: &str, content_type: &str, cache: bool) -> Result<Response<Body>, String> {
let mut res = Response::builder() let mut res = Response::builder()
.status(200) .status(200)
@ -132,6 +143,8 @@ async fn main() {
let matches = Command::new("Redlib") let matches = Command::new("Redlib")
.version(env!("CARGO_PKG_VERSION")) .version(env!("CARGO_PKG_VERSION"))
.about("Private front-end for Reddit written in Rust ") .about("Private front-end for Reddit written in Rust ")
.arg(Arg::new("ipv4-only").short('4').long("ipv4-only").help("Listen on IPv4 only").num_args(0))
.arg(Arg::new("ipv6-only").short('6').long("ipv6-only").help("Listen on IPv6 only").num_args(0))
.arg( .arg(
Arg::new("redirect-https") Arg::new("redirect-https")
.short('r') .short('r')
@ -170,11 +183,34 @@ async fn main() {
) )
.get_matches(); .get_matches();
match rate_limit_check().await {
Ok(()) => {
info!("[✅] Rate limit check passed");
}
Err(e) => {
let mut message = format!("Rate limit check failed: {}", e);
message += "\nThis may cause issues with the rate limit.";
message += "\nPlease report this error with the above information.";
message += "\nhttps://github.com/redlib-org/redlib/issues/new?assignees=sigaloid&labels=bug&title=%F0%9F%90%9B+Bug+Report%3A+Rate+limit+mismatch";
warn!("{}", message);
eprintln!("{}", message);
}
}
let address = matches.get_one::<String>("address").unwrap(); let address = matches.get_one::<String>("address").unwrap();
let port = matches.get_one::<String>("port").unwrap(); let port = matches.get_one::<String>("port").unwrap();
let hsts = matches.get_one("hsts").map(|m: &String| m.as_str()); let hsts = matches.get_one("hsts").map(|m: &String| m.as_str());
let listener = [address, ":", port].concat(); let ipv4_only = std::env::var("IPV4_ONLY").is_ok() || matches.get_flag("ipv4-only");
let ipv6_only = std::env::var("IPV6_ONLY").is_ok() || matches.get_flag("ipv6-only");
let listener = if ipv4_only {
format!("0.0.0.0:{}", port)
} else if ipv6_only {
format!("[::]:{}", port)
} else {
[address, ":", port].concat()
};
println!("Starting Redsunlib..."); println!("Starting Redsunlib...");
@ -233,6 +269,7 @@ async fn main() {
app.at("/Inter.var.woff2").get(|_| font().boxed()); app.at("/Inter.var.woff2").get(|_| font().boxed());
app.at("/touch-icon-iphone.png").get(|_| iphone_logo().boxed()); app.at("/touch-icon-iphone.png").get(|_| iphone_logo().boxed());
app.at("/apple-touch-icon.png").get(|_| iphone_logo().boxed()); app.at("/apple-touch-icon.png").get(|_| iphone_logo().boxed());
app.at("/opensearch.xml").get(|_| opensearch().boxed());
app app
.at("/videoUtils.js") .at("/videoUtils.js")
.get(|_| resource(include_str!("../static/videoUtils.js"), "text/javascript", false).boxed()); .get(|_| resource(include_str!("../static/videoUtils.js"), "text/javascript", false).boxed());
@ -245,6 +282,7 @@ async fn main() {
app app
.at("/check_update.js") .at("/check_update.js")
.get(|_| resource(include_str!("../static/check_update.js"), "text/javascript", false).boxed()); .get(|_| resource(include_str!("../static/check_update.js"), "text/javascript", false).boxed());
app.at("/copy.js").get(|_| resource(include_str!("../static/copy.js"), "text/javascript", false).boxed());
app.at("/commits.json").get(|_| async move { proxy_commit_info().await }.boxed()); app.at("/commits.json").get(|_| async move { proxy_commit_info().await }.boxed());
app.at("/instances.json").get(|_| async move { proxy_instances().await }.boxed()); app.at("/instances.json").get(|_| async move { proxy_instances().await }.boxed());
@ -304,6 +342,7 @@ async fn main() {
// Configure settings // Configure settings
app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed()); app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed());
app.at("/settings/restore").get(|r| settings::restore(r).boxed()); app.at("/settings/restore").get(|r| settings::restore(r).boxed());
app.at("/settings/encoded-restore").post(|r| settings::encoded_restore(r).boxed());
app.at("/settings/update").get(|r| settings::update(r).boxed()); app.at("/settings/update").get(|r| settings::update(r).boxed());
// Mascots // Mascots
@ -405,7 +444,7 @@ async fn main() {
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await, Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await,
// Short link for post // Short link for post
Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/{id}"), 3).await { Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/comments/{id}"), 3).await {
Ok(path_opt) => match path_opt { Ok(path_opt) => match path_opt {
Some(path) => Ok(redirect(&path)), Some(path) => Ok(redirect(&path)),
None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await, None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,

View File

@ -9,11 +9,14 @@ use hyper::{client, Body, Method, Request};
use log::{debug, error, info, trace}; use log::{debug, error, info, trace};
use serde_json::json; use serde_json::json;
use tegen::tegen::TextGenerator;
use tokio::time::{error::Elapsed, timeout}; use tokio::time::{error::Elapsed, timeout};
static REDDIT_ANDROID_OAUTH_CLIENT_ID: &str = "ohXpoqrZYub1kg"; const REDDIT_ANDROID_OAUTH_CLIENT_ID: &str = "ohXpoqrZYub1kg";
static AUTH_ENDPOINT: &str = "https://www.reddit.com"; const AUTH_ENDPOINT: &str = "https://www.reddit.com";
const OAUTH_TIMEOUT: Duration = Duration::from_secs(5);
// Spoofed client for Android devices // Spoofed client for Android devices
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
@ -32,24 +35,30 @@ impl Oauth {
loop { loop {
let attempt = Self::new_with_timeout().await; let attempt = Self::new_with_timeout().await;
match attempt { match attempt {
Ok(Some(oauth)) => { Ok(Ok(oauth)) => {
info!("[✅] Successfully created OAuth client"); info!("[✅] Successfully created OAuth client");
return oauth; return oauth;
} }
Ok(None) => { Ok(Err(e)) => {
error!("Failed to create OAuth client. Retrying in 5 seconds..."); error!("Failed to create OAuth client: {}. Retrying in 5 seconds...", {
continue; match e {
AuthError::Hyper(error) => error.to_string(),
AuthError::SerdeDeserialize(error) => error.to_string(),
AuthError::Field((value, error)) => format!("{error}\n{value}"),
} }
Err(duration) => { });
error!("Failed to create OAuth client in {duration:?}. Retrying in 5 seconds..."); }
Err(_) => {
error!("Failed to create OAuth client before timeout. Retrying in 5 seconds...");
} }
} }
tokio::time::sleep(OAUTH_TIMEOUT).await;
} }
} }
async fn new_with_timeout() -> Result<Option<Self>, Elapsed> { async fn new_with_timeout() -> Result<Result<Self, AuthError>, Elapsed> {
let mut oauth = Self::default(); let mut oauth = Self::default();
timeout(Duration::from_secs(5), oauth.login()).await.map(|result| result.map(|_| oauth)) timeout(OAUTH_TIMEOUT, oauth.login()).await.map(|result: Result<(), AuthError>| result.map(|_| oauth))
} }
pub(crate) fn default() -> Self { pub(crate) fn default() -> Self {
@ -66,7 +75,7 @@ impl Oauth {
device, device,
} }
} }
async fn login(&mut self) -> Option<()> { async fn login(&mut self) -> Result<(), AuthError> {
// Construct URL for OAuth token // Construct URL for OAuth token
let url = format!("{AUTH_ENDPOINT}/auth/v2/oauth/access-token/loid"); let url = format!("{AUTH_ENDPOINT}/auth/v2/oauth/access-token/loid");
let mut builder = Request::builder().method(Method::POST).uri(&url); let mut builder = Request::builder().method(Method::POST).uri(&url);
@ -84,20 +93,21 @@ impl Oauth {
// Set JSON body. I couldn't tell you what this means. But that's what the client sends // Set JSON body. I couldn't tell you what this means. But that's what the client sends
let json = json!({ let json = json!({
"scopes": ["*","email"] "scopes": ["*","email", "pii"]
}); });
let body = Body::from(json.to_string()); let body = Body::from(json.to_string());
// Build request // Build request
let request = builder.body(body).unwrap(); let request = builder.body(body).unwrap();
trace!("Sending token request..."); trace!("Sending token request...\n\n{request:?}");
// Send request // Send request
let client: &once_cell::sync::Lazy<client::Client<_, Body>> = &CLIENT; let client: &once_cell::sync::Lazy<client::Client<_, Body>> = &CLIENT;
let resp = client.request(request).await.ok()?; let resp = client.request(request).await?;
trace!("Received response with status {} and length {:?}", resp.status(), resp.headers().get("content-length")); trace!("Received response with status {} and length {:?}", resp.status(), resp.headers().get("content-length"));
trace!("OAuth headers: {:#?}", resp.headers());
// Parse headers - loid header _should_ be saved sent on subsequent token refreshes. // Parse headers - loid header _should_ be saved sent on subsequent token refreshes.
// Technically it's not needed, but it's easy for Reddit API to check for this. // Technically it's not needed, but it's easy for Reddit API to check for this.
@ -105,30 +115,58 @@ impl Oauth {
// Not worried about the privacy implications, since this is randomly changed // Not worried about the privacy implications, since this is randomly changed
// and really only as privacy-concerning as the OAuth token itself. // and really only as privacy-concerning as the OAuth token itself.
if let Some(header) = resp.headers().get("x-reddit-loid") { if let Some(header) = resp.headers().get("x-reddit-loid") {
self.headers_map.insert("x-reddit-loid".to_owned(), header.to_str().ok()?.to_string()); self.headers_map.insert("x-reddit-loid".to_owned(), header.to_str().unwrap().to_string());
} }
// Same with x-reddit-session // Same with x-reddit-session
if let Some(header) = resp.headers().get("x-reddit-session") { if let Some(header) = resp.headers().get("x-reddit-session") {
self.headers_map.insert("x-reddit-session".to_owned(), header.to_str().ok()?.to_string()); self.headers_map.insert("x-reddit-session".to_owned(), header.to_str().unwrap().to_string());
} }
trace!("Serializing response..."); trace!("Serializing response...");
// Serialize response // Serialize response
let body_bytes = hyper::body::to_bytes(resp.into_body()).await.ok()?; let body_bytes = hyper::body::to_bytes(resp.into_body()).await?;
let json: serde_json::Value = serde_json::from_slice(&body_bytes).ok()?; let json: serde_json::Value = serde_json::from_slice(&body_bytes)?;
trace!("Accessing relevant fields..."); trace!("Accessing relevant fields...");
// Save token and expiry // Save token and expiry
self.token = json.get("access_token")?.as_str()?.to_string(); self.token = json
self.expires_in = json.get("expires_in")?.as_u64()?; .get("access_token")
.ok_or_else(|| AuthError::Field((json.clone(), "access_token")))?
.as_str()
.ok_or_else(|| AuthError::Field((json.clone(), "access_token: as_str")))?
.to_string();
self.expires_in = json
.get("expires_in")
.ok_or_else(|| AuthError::Field((json.clone(), "expires_in")))?
.as_u64()
.ok_or_else(|| AuthError::Field((json.clone(), "expires_in: as_u64")))?;
self.headers_map.insert("Authorization".to_owned(), format!("Bearer {}", self.token)); self.headers_map.insert("Authorization".to_owned(), format!("Bearer {}", self.token));
info!("[✅] Success - Retrieved token \"{}...\", expires in {}", &self.token[..32], self.expires_in); info!("[✅] Success - Retrieved token \"{}...\", expires in {}", &self.token[..32], self.expires_in);
Some(()) Ok(())
}
}
#[derive(Debug)]
enum AuthError {
Hyper(hyper::Error),
SerdeDeserialize(serde_json::Error),
Field((serde_json::Value, &'static str)),
}
impl From<hyper::Error> for AuthError {
fn from(err: hyper::Error) -> Self {
AuthError::Hyper(err)
}
}
impl From<serde_json::Error> for AuthError {
fn from(err: serde_json::Error) -> Self {
AuthError::SerdeDeserialize(err)
} }
} }
@ -185,11 +223,22 @@ impl Device {
let android_user_agent = format!("Reddit/{android_app_version}/Android {android_version}"); let android_user_agent = format!("Reddit/{android_app_version}/Android {android_version}");
let qos = fastrand::u32(1000..=100_000);
let qos: f32 = qos as f32 / 1000.0;
let qos = format!("{:.3}", qos);
let codecs = TextGenerator::new().generate("available-codecs=video/avc, video/hevc{, video/x-vnd.on2.vp9|}");
// Android device headers // Android device headers
let headers = HashMap::from([ let headers: HashMap<String, String> = HashMap::from([
("Client-Vendor-Id".into(), uuid.clone()),
("X-Reddit-Device-Id".into(), uuid.clone()),
("User-Agent".into(), android_user_agent), ("User-Agent".into(), android_user_agent),
("x-reddit-retry".into(), "algo=no-retries".into()),
("x-reddit-compression".into(), "1".into()),
("x-reddit-qos".into(), qos),
("x-reddit-media-codecs".into(), codecs),
("Content-Type".into(), "application/json; charset=UTF-8".into()),
("client-vendor-id".into(), uuid.clone()),
("X-Reddit-Device-Id".into(), uuid.clone()),
]); ]);
info!("[🔄] Spoofing Android client with headers: {headers:?}, uuid: \"{uuid}\", and OAuth ID \"{REDDIT_ANDROID_OAUTH_CLIENT_ID}\""); info!("[🔄] Spoofing Android client with headers: {headers:?}, uuid: \"{uuid}\", and OAuth ID \"{REDDIT_ANDROID_OAUTH_CLIENT_ID}\"");

View File

@ -2,8 +2,38 @@
// Rerun scripts/update_oauth_resources.sh to update this file // Rerun scripts/update_oauth_resources.sh to update this file
// Please do not edit manually // Please do not edit manually
// Filled in with real app versions // Filled in with real app versions
pub static _IOS_APP_VERSION_LIST: &[&str; 1] = &[""]; pub const _IOS_APP_VERSION_LIST: &[&str; 1] = &[""];
pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ pub const ANDROID_APP_VERSION_LIST: &[&str; 150] = &[
"Version 2024.22.1/Build 1652272",
"Version 2024.23.1/Build 1665606",
"Version 2024.24.1/Build 1682520",
"Version 2024.25.0/Build 1693595",
"Version 2024.25.2/Build 1700401",
"Version 2024.25.3/Build 1703490",
"Version 2024.26.0/Build 1710470",
"Version 2024.26.1/Build 1717435",
"Version 2024.28.0/Build 1737665",
"Version 2024.28.1/Build 1741165",
"Version 2024.30.0/Build 1770787",
"Version 2024.31.0/Build 1786202",
"Version 2024.32.0/Build 1809095",
"Version 2024.32.1/Build 1813258",
"Version 2024.33.0/Build 1819908",
"Version 2024.34.0/Build 1837909",
"Version 2024.35.0/Build 1861437",
"Version 2024.36.0/Build 1875012",
"Version 2024.37.0/Build 1888053",
"Version 2024.38.0/Build 1902791",
"Version 2024.39.0/Build 1916713",
"Version 2024.40.0/Build 1928580",
"Version 2024.41.0/Build 1941199",
"Version 2024.41.1/Build 1947805",
"Version 2024.42.0/Build 1952440",
"Version 2024.43.0/Build 1972250",
"Version 2024.44.0/Build 1988458",
"Version 2024.45.0/Build 2001943",
"Version 2024.46.0/Build 2012731",
"Version 2024.47.0/Build 2029755",
"Version 2023.48.0/Build 1319123", "Version 2023.48.0/Build 1319123",
"Version 2023.49.0/Build 1321715", "Version 2023.49.0/Build 1321715",
"Version 2023.49.1/Build 1322281", "Version 2023.49.1/Build 1322281",
@ -31,9 +61,9 @@ pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[
"Version 2024.20.0/Build 1612800", "Version 2024.20.0/Build 1612800",
"Version 2024.20.1/Build 1615586", "Version 2024.20.1/Build 1615586",
"Version 2024.20.2/Build 1624969", "Version 2024.20.2/Build 1624969",
"Version 2024.20.3/Build 1624970",
"Version 2024.21.0/Build 1631686", "Version 2024.21.0/Build 1631686",
"Version 2024.22.0/Build 1645257", "Version 2024.22.0/Build 1645257",
"Version 2024.22.1/Build 1652272",
"Version 2023.21.0/Build 956283", "Version 2023.21.0/Build 956283",
"Version 2023.22.0/Build 968223", "Version 2023.22.0/Build 968223",
"Version 2023.23.0/Build 983896", "Version 2023.23.0/Build 983896",
@ -124,35 +154,5 @@ pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[
"Version 2022.40.0/Build 624782", "Version 2022.40.0/Build 624782",
"Version 2022.41.0/Build 630468", "Version 2022.41.0/Build 630468",
"Version 2022.41.1/Build 634168", "Version 2022.41.1/Build 634168",
"Version 2021.39.1/Build 372418",
"Version 2021.41.0/Build 376052",
"Version 2021.42.0/Build 378193",
"Version 2021.43.0/Build 382019",
"Version 2021.44.0/Build 385129",
"Version 2021.45.0/Build 387663",
"Version 2021.46.0/Build 392043",
"Version 2021.47.0/Build 394342",
"Version 2022.10.0/Build 429896",
"Version 2022.1.0/Build 402829",
"Version 2022.11.0/Build 433004",
"Version 2022.12.0/Build 436848",
"Version 2022.13.0/Build 442084",
"Version 2022.13.1/Build 444621",
"Version 2022.14.1/Build 452742",
"Version 2022.15.0/Build 455453",
"Version 2022.16.0/Build 462377",
"Version 2022.17.0/Build 468480",
"Version 2022.18.0/Build 473740",
"Version 2022.19.1/Build 482464",
"Version 2022.2.0/Build 405543",
"Version 2022.3.0/Build 408637",
"Version 2022.4.0/Build 411368",
"Version 2022.5.0/Build 414731",
"Version 2022.6.0/Build 418391",
"Version 2022.6.1/Build 419585",
"Version 2022.6.2/Build 420562",
"Version 2022.7.0/Build 420849",
"Version 2022.8.0/Build 423906",
"Version 2022.9.0/Build 426592",
]; ];
pub static _IOS_OS_VERSION_LIST: &[&str; 1] = &[""]; pub const _IOS_OS_VERSION_LIST: &[&str; 1] = &[""];

View File

@ -75,7 +75,11 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
return Ok(redirect(&format!("/{query}"))); return Ok(redirect(&format!("/{query}")));
} }
if query.starts_with("u/") { if query.starts_with("R/") {
return Ok(redirect(&format!("/r{}", &query[1..])));
}
if query.starts_with("u/") || query.starts_with("U/") {
return Ok(redirect(&format!("/user{}", &query[1..]))); return Ok(redirect(&format!("/user{}", &query[1..])));
} }

View File

@ -25,7 +25,7 @@ use std::{
str::{from_utf8, Split}, str::{from_utf8, Split},
string::ToString, string::ToString,
}; };
use time::Duration; use time::OffsetDateTime;
use crate::dbg_msg; use crate::dbg_msg;
@ -170,10 +170,8 @@ impl ResponseExt for Response<Body> {
} }
fn remove_cookie(&mut self, name: String) { fn remove_cookie(&mut self, name: String) {
let mut cookie = Cookie::from(name); let removal_cookie = Cookie::build(name).path("/").http_only(true).expires(OffsetDateTime::now_utc());
cookie.set_path("/"); if let Ok(val) = header::HeaderValue::from_str(&removal_cookie.to_string()) {
cookie.set_max_age(Duration::seconds(1));
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
self.headers_mut().append("Set-Cookie", val); self.headers_mut().append("Set-Cookie", val);
} }
} }
@ -240,8 +238,14 @@ impl Server {
path.pop(); path.pop();
} }
// Replace HEAD with GET for routing
let (method, is_head) = match req.method() {
&Method::HEAD => (&Method::GET, true),
method => (method, false),
};
// Match the visited path with an added route // Match the visited path with an added route
match router.recognize(&format!("/{}{}", req.method().as_str(), path)) { match router.recognize(&format!("/{}{}", method.as_str(), path)) {
// If a route was configured for this path // If a route was configured for this path
Ok(found) => { Ok(found) => {
let mut parammed = req; let mut parammed = req;
@ -253,17 +257,21 @@ impl Server {
match func.await { match func.await {
Ok(mut res) => { Ok(mut res) => {
res.headers_mut().extend(def_headers); res.headers_mut().extend(def_headers);
if is_head {
*res.body_mut() = Body::empty();
} else {
let _ = compress_response(&req_headers, &mut res).await; let _ = compress_response(&req_headers, &mut res).await;
}
Ok(res) Ok(res)
} }
Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await, Err(msg) => new_boilerplate(def_headers, req_headers, 500, if is_head { Body::empty() } else { Body::from(msg) }).await,
} }
} }
.boxed() .boxed()
} }
// If there was a routing error // If there was a routing error
Err(e) => new_boilerplate(def_headers, req_headers, 404, e.into()).boxed(), Err(e) => new_boilerplate(def_headers, req_headers, 404, if is_head { Body::empty() } else { e.into() }).boxed(),
} }
})) }))
} }
@ -274,8 +282,19 @@ impl Server {
// Bind server to address specified above. Gracefully shut down if CTRL+C is pressed // Bind server to address specified above. Gracefully shut down if CTRL+C is pressed
let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async { let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async {
#[cfg(windows)]
// Wait for the CTRL+C signal // Wait for the CTRL+C signal
tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler"); tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler");
#[cfg(unix)]
{
// Wait for CTRL+C or SIGTERM signals
let mut signal_terminate = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()).expect("Failed to install SIGTERM signal handler");
tokio::select! {
_ = tokio::signal::ctrl_c() => (),
_ = signal_terminate.recv() => ()
}
}
}); });
server.boxed() server.boxed()

View File

@ -4,12 +4,15 @@ use std::collections::HashMap;
// CRATES // CRATES
use crate::server::ResponseExt; use crate::server::ResponseExt;
use crate::utils::{redirect, template, Preferences}; use crate::subreddit::join_until_size_limit;
use crate::utils::{deflate_decompress, redirect, template, Preferences};
use cookie::Cookie; use cookie::Cookie;
use futures_lite::StreamExt; use futures_lite::StreamExt;
use hyper::{Body, Request, Response}; use hyper::{Body, Request, Response};
use rinja::Template; use rinja::Template;
use time::{Duration, OffsetDateTime}; use time::{Duration, OffsetDateTime};
use tokio::time::timeout;
use url::form_urlencoded;
// STRUCTS // STRUCTS
#[derive(Template)] #[derive(Template)]
@ -21,7 +24,7 @@ struct SettingsTemplate {
// CONSTANTS // CONSTANTS
const PREFS: [&str; 22] = [ const PREFS: [&str; 23] = [
"theme", "theme",
"mascot", "mascot",
"redsunlib_colorway", "redsunlib_colorway",
@ -44,6 +47,7 @@ const PREFS: [&str; 22] = [
"hide_score", "hide_score",
"disable_visit_reddit_confirmation", "disable_visit_reddit_confirmation",
"video_quality", "video_quality",
"remove_default_feeds",
]; ];
// FUNCTIONS // FUNCTIONS
@ -140,6 +144,119 @@ fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body
}; };
} }
// Get subscriptions/filters to restore from query string
let subscriptions = form.get("subscriptions");
let filters = form.get("filters");
// We can't search through the cookies directly like in subreddit.rs, so instead we have to make a string out of the request's headers to search through
let cookies_string = parts
.headers
.get("cookie")
.map(|hv| hv.to_str().unwrap_or("").to_string()) // Return String
.unwrap_or_else(String::new); // Return an empty string if None
// If there are subscriptions to restore set them and delete any old subscriptions cookies, otherwise delete them all
if subscriptions.is_some() {
let sub_list: Vec<String> = subscriptions.expect("Subscriptions").split('+').map(str::to_string).collect();
// Start at 0 to keep track of what number we need to start deleting old subscription cookies from
let mut subscriptions_number_to_delete_from = 0;
// Starting at 0 so we handle the subscription cookie without a number first
for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() {
let subscriptions_cookie = if subscriptions_number == 0 {
"subscriptions".to_string()
} else {
format!("subscriptions{}", subscriptions_number)
};
response.insert_cookie(
Cookie::build((subscriptions_cookie, list))
.path("/")
.http_only(true)
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
.into(),
);
subscriptions_number_to_delete_from += 1;
}
// While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie
while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) {
// Remove that subscriptions cookie
response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}"));
// Increment subscriptions cookie number
subscriptions_number_to_delete_from += 1;
}
} else {
// Remove unnumbered subscriptions cookie
response.remove_cookie("subscriptions".to_string());
// Starts at one to deal with the first numbered subscription cookie and onwards
let mut subscriptions_number_to_delete_from = 1;
// While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie
while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) {
// Remove that subscriptions cookie
response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}"));
// Increment subscriptions cookie number
subscriptions_number_to_delete_from += 1;
}
}
// If there are filters to restore set them and delete any old filters cookies, otherwise delete them all
if filters.is_some() {
let filters_list: Vec<String> = filters.expect("Filters").split('+').map(str::to_string).collect();
// Start at 0 to keep track of what number we need to start deleting old subscription cookies from
let mut filters_number_to_delete_from = 0;
// Starting at 0 so we handle the subscription cookie without a number first
for (filters_number, list) in join_until_size_limit(&filters_list).into_iter().enumerate() {
let filters_cookie = if filters_number == 0 {
"filters".to_string()
} else {
format!("filters{}", filters_number)
};
response.insert_cookie(
Cookie::build((filters_cookie, list))
.path("/")
.http_only(true)
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
.into(),
);
filters_number_to_delete_from += 1;
}
// While filtersNUMBER= is in the string of cookies add a response removing that cookie
while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) {
// Remove that filters cookie
response.remove_cookie(format!("filters{filters_number_to_delete_from}"));
// Increment filters cookie number
filters_number_to_delete_from += 1;
}
} else {
// Remove unnumbered filters cookie
response.remove_cookie("filters".to_string());
// Starts at one to deal with the first numbered subscription cookie and onwards
let mut filters_number_to_delete_from = 1;
// While filtersNUMBER= is in the string of cookies add a response removing that cookie
while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) {
// Remove that sfilters cookie
response.remove_cookie(format!("filters{filters_number_to_delete_from}"));
// Increment filters cookie number
filters_number_to_delete_from += 1;
}
}
response response
} }
@ -151,3 +268,35 @@ pub async fn restore(req: Request<Body>) -> Result<Response<Body>, String> {
pub async fn update(req: Request<Body>) -> Result<Response<Body>, String> { pub async fn update(req: Request<Body>) -> Result<Response<Body>, String> {
Ok(set_cookies_method(req, false)) Ok(set_cookies_method(req, false))
} }
pub async fn encoded_restore(req: Request<Body>) -> Result<Response<Body>, String> {
let body = hyper::body::to_bytes(req.into_body())
.await
.map_err(|e| format!("Failed to get bytes from request body: {}", e))?;
if body.len() > 1024 * 1024 {
return Err("Request body too large".to_string());
}
let encoded_prefs = form_urlencoded::parse(&body)
.find(|(key, _)| key == "encoded_prefs")
.map(|(_, value)| value)
.ok_or_else(|| "encoded_prefs parameter not found in request body".to_string())?;
let bytes = base2048::decode(&encoded_prefs).ok_or_else(|| "Failed to decode base2048 encoded preferences".to_string())?;
let out = timeout(std::time::Duration::from_secs(1), async { deflate_decompress(bytes) })
.await
.map_err(|e| format!("Failed to decompress bytes: {}", e))??;
let mut prefs: Preferences = timeout(std::time::Duration::from_secs(1), async { bincode::deserialize(&out) })
.await
.map_err(|e| format!("Failed to deserialize preferences: {}", e))?
.map_err(|e| format!("Failed to deserialize bytes into Preferences struct: {}", e))?;
prefs.available_themes = vec![];
let url = format!("/settings/restore/?{}", prefs.to_urlencoded()?);
Ok(redirect(&url))
}

View File

@ -5,6 +5,8 @@ use crate::client::json;
use crate::server::RequestExt; use crate::server::RequestExt;
use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User}; use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User};
use crate::{config, utils}; use crate::{config, utils};
use chrono::DateTime;
use htmlescape::decode_html;
use hyper::{Body, Request, Response}; use hyper::{Body, Request, Response};
use log::trace; use log::trace;
use rinja::Template; use rinja::Template;
@ -165,9 +167,10 @@ pub async fn rss(req: Request<Body>) -> Result<Response<Body>, String> {
.into_iter() .into_iter()
.map(|post| Item { .map(|post| Item {
title: Some(post.title.to_string()), title: Some(post.title.to_string()),
link: Some(utils::get_post_url(&post)), link: Some(format_url(&utils::get_post_url(&post))),
author: Some(post.author.name), author: Some(post.author.name),
content: Some(rewrite_urls(&post.body)), pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()),
content: Some(rewrite_urls(&decode_html(&post.body).unwrap())),
..Default::default() ..Default::default()
}) })
.collect::<Vec<_>>(), .collect::<Vec<_>>(),

View File

@ -8,16 +8,19 @@ use crate::config::{self, get_setting};
use crate::{client::json, server::RequestExt}; use crate::{client::json, server::RequestExt};
use cookie::Cookie; use cookie::Cookie;
use hyper::{Body, Request, Response}; use hyper::{Body, Request, Response};
use libflate::deflate::{Decoder, Encoder};
use log::error; use log::error;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use revision::revisioned;
use rinja::Template; use rinja::Template;
use rust_embed::RustEmbed; use rust_embed::RustEmbed;
use serde::Serialize; use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value; use serde_json::Value;
use serde_json_path::{JsonPath, JsonPathExt}; use serde_json_path::{JsonPath, JsonPathExt};
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::env; use std::env;
use std::io::{Read, Write};
use std::str::FromStr; use std::str::FromStr;
use std::string::ToString; use std::string::ToString;
use time::{macros::format_description, Duration, OffsetDateTime}; use time::{macros::format_description, Duration, OffsetDateTime};
@ -233,6 +236,14 @@ impl Media {
// If this post contains a gallery of images // If this post contains a gallery of images
gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]); gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]);
("gallery", &data["url"], None)
} else if data["crosspost_parent_list"][0]["is_gallery"].as_bool().unwrap_or_default() {
// If this post contains a gallery of images
gallery = GalleryMedia::parse(
&data["crosspost_parent_list"][0]["gallery_data"]["items"],
&data["crosspost_parent_list"][0]["media_metadata"],
);
("gallery", &data["url"], None) ("gallery", &data["url"], None)
} else if data["is_reddit_media_domain"].as_bool().unwrap_or_default() && data["domain"] == "i.redd.it" { } else if data["is_reddit_media_domain"].as_bool().unwrap_or_default() && data["domain"] == "i.redd.it" {
// If this post contains a reddit media (image) URL. // If this post contains a reddit media (image) URL.
@ -542,6 +553,14 @@ pub struct ErrorTemplate {
pub url: String, pub url: String,
} }
#[derive(Template)]
#[template(path = "info.html")]
pub struct InfoTemplate {
pub msg: String,
pub prefs: Preferences,
pub url: String,
}
/// Template for NSFW landing page. The landing page is displayed when a page's /// Template for NSFW landing page. The landing page is displayed when a page's
/// content is wholly NSFW, but a user has not enabled the option to view NSFW /// content is wholly NSFW, but a user has not enabled the option to view NSFW
/// posts. /// posts.
@ -603,35 +622,86 @@ pub struct Params {
pub before: Option<String>, pub before: Option<String>,
} }
#[derive(Default)] #[derive(Default, Serialize, Deserialize, Debug, PartialEq, Eq)]
#[revisioned(revision = 1)]
pub struct Preferences { pub struct Preferences {
#[revision(start = 1)]
#[serde(skip_serializing, skip_deserializing)]
pub available_themes: Vec<String>, pub available_themes: Vec<String>,
#[revision(start = 1)]
pub available_mascots: Vec<String>, pub available_mascots: Vec<String>,
#[revision(start = 1)]
pub theme: String, pub theme: String,
#[revision(start = 1)]
pub mascot: String, pub mascot: String,
#[revision(start = 1)]
pub redsunlib_colorway: String, pub redsunlib_colorway: String,
#[revision(start = 1)]
pub front_page: String, pub front_page: String,
#[revision(start = 1)]
pub layout: String, pub layout: String,
#[revision(start = 1)]
pub wide: String, pub wide: String,
#[revision(start = 1)]
pub blur_spoiler: String, pub blur_spoiler: String,
#[revision(start = 1)]
pub show_nsfw: String, pub show_nsfw: String,
#[revision(start = 1)]
pub blur_nsfw: String, pub blur_nsfw: String,
#[revision(start = 1)]
pub hide_hls_notification: String, pub hide_hls_notification: String,
#[revision(start = 1)]
pub video_quality: String, pub video_quality: String,
#[revision(start = 1)]
pub hide_sidebar_and_summary: String, pub hide_sidebar_and_summary: String,
#[revision(start = 1)]
pub hide_banner: String, pub hide_banner: String,
#[revision(start = 1)]
pub use_hls: String, pub use_hls: String,
#[revision(start = 1)]
pub ffmpeg_video_downloads: String, pub ffmpeg_video_downloads: String,
#[revision(start = 1)]
pub autoplay_videos: String, pub autoplay_videos: String,
#[revision(start = 1)]
pub fixed_navbar: String, pub fixed_navbar: String,
#[revision(start = 1)]
pub disable_visit_reddit_confirmation: String, pub disable_visit_reddit_confirmation: String,
#[revision(start = 1)]
pub comment_sort: String, pub comment_sort: String,
#[revision(start = 1)]
pub post_sort: String, pub post_sort: String,
#[revision(start = 1)]
#[serde(serialize_with = "serialize_vec_with_plus", deserialize_with = "deserialize_vec_with_plus")]
pub subscriptions: Vec<String>, pub subscriptions: Vec<String>,
#[revision(start = 1)]
pub quicklist: Vec<String>, pub quicklist: Vec<String>,
#[revision(start = 1)]
#[serde(serialize_with = "serialize_vec_with_plus", deserialize_with = "deserialize_vec_with_plus")]
pub filters: Vec<String>, pub filters: Vec<String>,
#[revision(start = 1)]
pub hide_awards: String, pub hide_awards: String,
#[revision(start = 1)]
pub hide_score: String, pub hide_score: String,
#[revision(start = 1)]
pub remove_default_feeds: String,
}
fn serialize_vec_with_plus<S>(vec: &[String], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&vec.join("+"))
}
fn deserialize_vec_with_plus<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
D: Deserializer<'de>,
{
let string = String::deserialize(deserializer)?;
if string.is_empty() {
return Ok(Vec::new());
}
Ok(string.split('+').map(|s| s.to_string()).collect())
} }
#[derive(RustEmbed)] #[derive(RustEmbed)]
@ -689,8 +759,36 @@ impl Preferences {
quicklist: setting(req, "quicklist").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), quicklist: setting(req, "quicklist").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
hide_awards: setting(req, "hide_awards"), hide_awards: setting(req, "hide_awards"),
hide_score: setting(req, "hide_score"), hide_score: setting(req, "hide_score"),
remove_default_feeds: setting(req, "remove_default_feeds"),
} }
} }
pub fn to_urlencoded(&self) -> Result<String, String> {
serde_urlencoded::to_string(self).map_err(|e| e.to_string())
}
pub fn to_bincode(&self) -> Result<Vec<u8>, String> {
bincode::serialize(self).map_err(|e| e.to_string())
}
pub fn to_compressed_bincode(&self) -> Result<Vec<u8>, String> {
deflate_compress(self.to_bincode()?)
}
pub fn to_bincode_str(&self) -> Result<String, String> {
Ok(base2048::encode(&self.to_compressed_bincode()?))
}
}
pub fn deflate_compress(i: Vec<u8>) -> Result<Vec<u8>, String> {
let mut e = Encoder::new(Vec::new());
e.write_all(&i).map_err(|e| e.to_string())?;
e.finish().into_result().map_err(|e| e.to_string())
}
pub fn deflate_decompress(i: Vec<u8>) -> Result<Vec<u8>, String> {
let mut decoder = Decoder::new(&i[..]);
let mut out = Vec::new();
decoder.read_to_end(&mut out).map_err(|e| format!("Failed to read from gzip decoder: {}", e))?;
Ok(out)
} }
/// Gets a `HashSet` of filters from the cookie in the given `Request`. /// Gets a `HashSet` of filters from the cookie in the given `Request`.
@ -745,8 +843,16 @@ pub async fn parse_post(post: &Value) -> Post {
"<div class=\"md\"><p>[removed] — <a href=\"https://{}{permalink}\">view removed post</a></p></div>", "<div class=\"md\"><p>[removed] — <a href=\"https://{}{permalink}\">view removed post</a></p></div>",
get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or_else(|| String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)), get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or_else(|| String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)),
) )
} else {
let selftext = val(post, "selftext");
if selftext.contains("```") {
let mut html_output = String::new();
let parser = pulldown_cmark::Parser::new(&selftext);
pulldown_cmark::html::push_html(&mut html_output, parser);
rewrite_urls(&html_output)
} else { } else {
rewrite_urls(&val(post, "selftext_html")) rewrite_urls(&val(post, "selftext_html"))
}
}; };
// Build a post using data parsed from Reddit post API // Build a post using data parsed from Reddit post API
@ -837,6 +943,59 @@ pub fn param(path: &str, value: &str) -> Option<String> {
// Retrieve the value of a setting by name // Retrieve the value of a setting by name
pub fn setting(req: &Request<Body>, name: &str) -> String { pub fn setting(req: &Request<Body>, name: &str) -> String {
// Parse a cookie value from request // Parse a cookie value from request
// If this was called with "subscriptions" and the "subscriptions" cookie has a value
if name == "subscriptions" && req.cookie("subscriptions").is_some() {
// Create subscriptions string
let mut subscriptions = String::new();
// Default subscriptions cookie
if req.cookie("subscriptions").is_some() {
subscriptions.push_str(req.cookie("subscriptions").unwrap().value());
}
// Start with first numbered subscription cookie
let mut subscriptions_number = 1;
// While whatever subscriptionsNUMBER cookie we're looking at has a value
while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() {
// Push whatever subscriptionsNUMBER cookie we're looking at into the subscriptions string
subscriptions.push_str(req.cookie(&format!("subscriptions{}", subscriptions_number)).unwrap().value());
// Increment subscription cookie number
subscriptions_number += 1;
}
// Return the subscriptions cookies as one large string
subscriptions
}
// If this was called with "filters" and the "filters" cookie has a value
else if name == "filters" && req.cookie("filters").is_some() {
// Create filters string
let mut filters = String::new();
// Default filters cookie
if req.cookie("filters").is_some() {
filters.push_str(req.cookie("filters").unwrap().value());
}
// Start with first numbered filters cookie
let mut filters_number = 1;
// While whatever filtersNUMBER cookie we're looking at has a value
while req.cookie(&format!("filters{}", filters_number)).is_some() {
// Push whatever filtersNUMBER cookie we're looking at into the filters string
filters.push_str(req.cookie(&format!("filters{}", filters_number)).unwrap().value());
// Increment filters cookie number
filters_number += 1;
}
// Return the filters cookies as one large string
filters
}
// The above two still come to this if there was no existing value
else {
req req
.cookie(name) .cookie(name)
.unwrap_or_else(|| { .unwrap_or_else(|| {
@ -849,6 +1008,7 @@ pub fn setting(req: &Request<Body>, name: &str) -> String {
}) })
.value() .value()
.to_string() .to_string()
}
} }
// Retrieve the value of a setting by name or the default value // Retrieve the value of a setting by name or the default value
@ -864,11 +1024,12 @@ pub fn setting_or_default(req: &Request<Body>, name: &str, default: String) -> S
// Detect and redirect in the event of a random subreddit // Detect and redirect in the event of a random subreddit
pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> { pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> {
if sub == "random" || sub == "randnsfw" { if sub == "random" || sub == "randnsfw" {
let new_sub = json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"] Ok(redirect(&format!(
"/r/{}{additional}",
json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"]
.as_str() .as_str()
.unwrap_or_default() .unwrap_or_default()
.to_string(); )))
Ok(redirect(&format!("/r/{new_sub}{additional}")))
} else { } else {
Err("No redirect needed".to_string()) Err("No redirect needed".to_string())
} }
@ -946,9 +1107,20 @@ pub fn format_url(url: &str) -> String {
} }
} }
static REGEX_BULLET: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?m)^- (.*)$").unwrap());
static REGEX_BULLET_CONSECUTIVE_LINES: Lazy<Regex> = Lazy::new(|| Regex::new(r"</ul>\n<ul>").unwrap());
pub fn render_bullet_lists(input_text: &str) -> String {
// ref: https://stackoverflow.com/a/4902622
// First enclose each bullet with <ul> <li> tags
let text1 = REGEX_BULLET.replace_all(input_text, "<ul><li>$1</li></ul>").to_string();
// Then remove any consecutive </ul> <ul> tags
REGEX_BULLET_CONSECUTIVE_LINES.replace_all(&text1, "").to_string()
}
// These are links we want to replace in-body // These are links we want to replace in-body
static REDDIT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|new\.|)(reddit\.com|redd\.it)/"#).unwrap()); static REDDIT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|new\.|)(reddit\.com|redd\.it)/"#).unwrap());
static REDDIT_PREVIEW_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://(external-preview|preview|i)\.redd\.it(.*)[^?]").unwrap()); static REDDIT_PREVIEW_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://(external-preview|preview|i)\.redd\.it(.*)").unwrap());
static REDDIT_EMOJI_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://(www|).redditstatic\.com/(.*)").unwrap()); static REDDIT_EMOJI_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://(www|).redditstatic\.com/(.*)").unwrap());
static REDLIB_PREVIEW_LINK_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r#"/(img|preview/)(pre|external-pre)?/(.*?)>"#).unwrap()); static REDLIB_PREVIEW_LINK_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r#"/(img|preview/)(pre|external-pre)?/(.*?)>"#).unwrap());
static REDLIB_PREVIEW_TEXT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r">(.*?)</a>").unwrap()); static REDLIB_PREVIEW_TEXT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r">(.*?)</a>").unwrap());
@ -957,8 +1129,7 @@ static REDLIB_PREVIEW_TEXT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r">(.*?)
pub fn rewrite_urls(input_text: &str) -> String { pub fn rewrite_urls(input_text: &str) -> String {
let mut text1 = let mut text1 =
// Rewrite Reddit links to Redlib // Rewrite Reddit links to Redlib
REDDIT_REGEX.replace_all(input_text, r#"href="/"#) REDDIT_REGEX.replace_all(input_text, r#"href="/"#).to_string();
.to_string();
loop { loop {
if REDDIT_EMOJI_REGEX.find(&text1).is_none() { if REDDIT_EMOJI_REGEX.find(&text1).is_none() {
@ -980,49 +1151,44 @@ pub fn rewrite_urls(input_text: &str) -> String {
} else { } else {
let formatted_url = format_url(REDDIT_PREVIEW_REGEX.find(&text1).map(|x| x.as_str()).unwrap_or_default()); let formatted_url = format_url(REDDIT_PREVIEW_REGEX.find(&text1).map(|x| x.as_str()).unwrap_or_default());
let image_url = REDLIB_PREVIEW_LINK_REGEX.find(&formatted_url).map_or("", |m| m.as_str()).to_string(); let image_url = REDLIB_PREVIEW_LINK_REGEX.find(&formatted_url).map_or("", |m| m.as_str());
let mut image_caption = REDLIB_PREVIEW_TEXT_REGEX.find(&formatted_url).map_or("", |m| m.as_str()).to_string(); let mut image_caption = REDLIB_PREVIEW_TEXT_REGEX.find(&formatted_url).map_or("", |m| m.as_str());
/* As long as image_caption isn't empty remove first and last four characters of image_text to leave us with just the text in the caption without any HTML. /* As long as image_caption isn't empty remove first and last four characters of image_text to leave us with just the text in the caption without any HTML.
This makes it possible to enclose it in a <figcaption> later on without having stray HTML breaking it */ This makes it possible to enclose it in a <figcaption> later on without having stray HTML breaking it */
if !image_caption.is_empty() { if !image_caption.is_empty() {
image_caption = image_caption[1..image_caption.len() - 4].to_string(); image_caption = &image_caption[1..image_caption.len() - 4];
} }
// image_url contains > at the end of it, and right above this we remove image_text's front >, leaving us with just a single > between them // image_url contains > at the end of it, and right above this we remove image_text's front >, leaving us with just a single > between them
let image_to_replace = format!("<a href=\"{image_url}{image_caption}</a>"); let image_to_replace = format!("<p><a href=\"{image_url}{image_caption}</a></p>");
// _image_replacement needs to be in scope for the replacement at the bottom of the loop
let mut _image_replacement = String::new();
/* We don't want to show a caption that's just the image's link, so we check if we find a Reddit preview link within the image's caption. /* We don't want to show a caption that's just the image's link, so we check if we find a Reddit preview link within the image's caption.
If we don't find one we must have actual text, so we include a <figcaption> block that contains it. If we don't find one we must have actual text, so we include a <figcaption> block that contains it.
Otherwise we don't include the <figcaption> block as we don't need it. */ Otherwise we don't include the <figcaption> block as we don't need it. */
if REDDIT_PREVIEW_REGEX.find(&image_caption).is_none() { let _image_replacement = if REDDIT_PREVIEW_REGEX.find(image_caption).is_none() {
// Without this " would show as \" instead. "\&quot;" is how the quotes are formatted within image_text beforehand // Without this " would show as \" instead. "\&quot;" is how the quotes are formatted within image_text beforehand
image_caption = image_caption.replace("\\&quot;", "\""); format!(
"<figure><a href=\"{image_url}<img loading=\"lazy\" src=\"{image_url}</a><figcaption>{}</figcaption></figure>",
_image_replacement = format!("<figure><a href=\"{image_url}<img loading=\"lazy\" src=\"{image_url}</a><figcaption>{image_caption}</figcaption></figure>"); image_caption.replace("\\&quot;", "\"")
)
} else { } else {
_image_replacement = format!("<figure><a href=\"{image_url}<img loading=\"lazy\" src=\"{image_url}</a></figure>"); format!("<figure><a href=\"{image_url}<img loading=\"lazy\" src=\"{image_url}</a></figure>")
} };
/* In order to know if we're dealing with a normal or external preview we need to take a look at the first capture group of REDDIT_PREVIEW_REGEX /* In order to know if we're dealing with a normal or external preview we need to take a look at the first capture group of REDDIT_PREVIEW_REGEX
if it's preview we're dealing with something that needs /preview/pre, external-preview is /preview/external-pre, and i is /img */ if it's preview we're dealing with something that needs /preview/pre, external-preview is /preview/external-pre, and i is /img */
let reddit_preview_regex_capture = REDDIT_PREVIEW_REGEX.captures(&text1).unwrap().get(1).map_or("", |m| m.as_str()).to_string(); let reddit_preview_regex_capture = REDDIT_PREVIEW_REGEX.captures(&text1).unwrap().get(1).map_or("", |m| m.as_str());
let mut _preview_type = String::new();
if reddit_preview_regex_capture == "preview" { let _preview_type = match reddit_preview_regex_capture {
_preview_type = "/preview/pre".to_string(); "preview" => "/preview/pre",
} else if reddit_preview_regex_capture == "external-preview" { "external-preview" => "/preview/external-pre",
_preview_type = "/preview/external-pre".to_string(); _ => "/img",
} else { };
_preview_type = "/img".to_string();
}
text1 = REDDIT_PREVIEW_REGEX text1 = REDDIT_PREVIEW_REGEX
.replace(&text1, format!("{_preview_type}$2")) .replace(&text1, format!("{_preview_type}$2"))
.replace(&image_to_replace, &_image_replacement) .replace(&image_to_replace, &_image_replacement)
.to_string()
} }
} }
} }
@ -1096,10 +1262,14 @@ pub fn rewrite_emotes(media_metadata: &Value, comment: String) -> String {
); );
// Inside the comment replace the ID we found with the string that will embed the image // Inside the comment replace the ID we found with the string that will embed the image
comment = comment.replace(&id, &to_replace_with).to_string(); comment = comment.replace(&id, &to_replace_with);
} }
} }
} }
// render bullet (unordered) lists
comment = render_bullet_lists(&comment);
// Call rewrite_urls() to transform any other Reddit links // Call rewrite_urls() to transform any other Reddit links
rewrite_urls(&comment) rewrite_urls(&comment)
} }
@ -1196,6 +1366,20 @@ pub async fn error(req: Request<Body>, msg: &str) -> Result<Response<Body>, Stri
Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default()) Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
} }
/// Renders a generic info landing page.
pub async fn info(req: Request<Body>, msg: &str) -> Result<Response<Body>, String> {
let url = req.uri().to_string();
let body = InfoTemplate {
msg: msg.to_string(),
prefs: Preferences::new(&req),
url,
}
.render()
.unwrap_or_default();
Ok(Response::builder().status(200).header("content-type", "text/html").body(body.into()).unwrap_or_default())
}
/// Returns true if the config/env variable `REDLIB_SFW_ONLY` carries the /// Returns true if the config/env variable `REDLIB_SFW_ONLY` carries the
/// value `on`. /// value `on`.
/// ///
@ -1283,7 +1467,7 @@ pub fn url_path_basename(path: &str) -> String {
let mut url = url_result.unwrap(); let mut url = url_result.unwrap();
url.path_segments_mut().unwrap().pop_if_empty(); url.path_segments_mut().unwrap().pop_if_empty();
url.path_segments().unwrap().last().unwrap().to_string() url.path_segments().unwrap().next_back().unwrap().to_string()
} }
} }
@ -1303,7 +1487,7 @@ pub fn get_post_url(post: &Post) -> String {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{format_num, format_url, rewrite_urls}; use super::{format_num, format_url, rewrite_urls, Preferences};
#[test] #[test]
fn format_num_works() { fn format_num_works() {
@ -1370,6 +1554,36 @@ mod tests {
assert_eq!(format_url("nsfw"), ""); assert_eq!(format_url("nsfw"), "");
assert_eq!(format_url("spoiler"), ""); assert_eq!(format_url("spoiler"), "");
} }
#[test]
fn serialize_prefs() {
let prefs = Preferences {
available_themes: vec![],
theme: "laserwave".to_owned(),
front_page: "default".to_owned(),
layout: "compact".to_owned(),
wide: "on".to_owned(),
blur_spoiler: "on".to_owned(),
show_nsfw: "off".to_owned(),
blur_nsfw: "on".to_owned(),
hide_hls_notification: "off".to_owned(),
video_quality: "best".to_owned(),
hide_sidebar_and_summary: "off".to_owned(),
use_hls: "on".to_owned(),
autoplay_videos: "on".to_owned(),
fixed_navbar: "on".to_owned(),
disable_visit_reddit_confirmation: "on".to_owned(),
comment_sort: "confidence".to_owned(),
post_sort: "top".to_owned(),
subscriptions: vec!["memes".to_owned(), "mildlyinteresting".to_owned()],
filters: vec![],
hide_awards: "off".to_owned(),
hide_score: "off".to_owned(),
remove_default_feeds: "off".to_owned(),
};
let urlencoded = serde_urlencoded::to_string(prefs).expect("Failed to serialize Prefs");
assert_eq!(urlencoded, "theme=laserwave&front_page=default&layout=compact&wide=on&blur_spoiler=on&show_nsfw=off&blur_nsfw=on&hide_hls_notification=off&video_quality=best&hide_sidebar_and_summary=off&use_hls=on&autoplay_videos=on&fixed_navbar=on&disable_visit_reddit_confirmation=on&comment_sort=confidence&post_sort=top&subscriptions=memes%2Bmildlyinteresting&filters=&hide_awards=off&hide_score=off&remove_default_feeds=off");
}
} }
#[test] #[test]
@ -1388,7 +1602,10 @@ async fn test_fetching_subreddit_quarantined() {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_fetching_nsfw_subreddit() { async fn test_fetching_nsfw_subreddit() {
let subreddit = Post::fetch("/r/randnsfw", false).await; // Gonwild is a place for closed, Euclidean Geometric shapes to exchange their nth terms for karma; showing off their edges in a comfortable environment without pressure.
// Find a good sub that is tagged NSFW but that actually isn't in case my future employers are watching (they probably are)
// switched from randnsfw as it is no longer functional.
let subreddit = Post::fetch("/r/gonwild", false).await;
assert!(subreddit.is_ok()); assert!(subreddit.is_ok());
assert!(!subreddit.unwrap().0.is_empty()); assert!(!subreddit.unwrap().0.is_empty());
} }
@ -1406,7 +1623,7 @@ async fn test_fetching_ws() {
fn test_rewriting_image_links() { fn test_rewriting_image_links() {
let input = let input =
r#"<p><a href="https://preview.redd.it/6awags382xo31.png?width=2560&amp;format=png&amp;auto=webp&amp;s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc">caption 1</a></p>"#; r#"<p><a href="https://preview.redd.it/6awags382xo31.png?width=2560&amp;format=png&amp;auto=webp&amp;s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc">caption 1</a></p>"#;
let output = r#"<p><figure><a href="/preview/pre/6awags382xo31.png?width=2560&amp;format=png&amp;auto=webp&amp;s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"><img loading="lazy" src="/preview/pre/6awags382xo31.png?width=2560&amp;format=png&amp;auto=webp&amp;s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"></a><figcaption>caption 1</figcaption></figure></p"#; let output = r#"<figure><a href="/preview/pre/6awags382xo31.png?width=2560&amp;format=png&amp;auto=webp&amp;s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"><img loading="lazy" src="/preview/pre/6awags382xo31.png?width=2560&amp;format=png&amp;auto=webp&amp;s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"></a><figcaption>caption 1</figcaption></figure>"#;
assert_eq!(rewrite_urls(input), output); assert_eq!(rewrite_urls(input), output);
} }
@ -1433,3 +1650,77 @@ fn test_rewriting_emotes() {
let output = r#"<div class="comment_body "><div class="md"><p><img loading="lazy" src="/emote/t5_31hpy/PW6WsOaLcd.png" width="60" height="60" style="vertical-align:text-bottom"></p></div></div>"#; let output = r#"<div class="comment_body "><div class="md"><p><img loading="lazy" src="/emote/t5_31hpy/PW6WsOaLcd.png" width="60" height="60" style="vertical-align:text-bottom"></p></div></div>"#;
assert_eq!(rewrite_emotes(&json_input, comment_input.to_string()), output); assert_eq!(rewrite_emotes(&json_input, comment_input.to_string()), output);
} }
#[test]
fn test_rewriting_bullet_list() {
let input = r#"<div class="md"><p>Hi, I&#39;ve bought this very same monitor and found no calibration whatsoever. I have an ICC profile that has been set up since I&#39;ve installed its driver from the LG website and it works ok. I also used <a href="http://www.lagom.nl/lcd-test/">http://www.lagom.nl/lcd-test/</a> to calibrate it. After some good tinkering I&#39;ve found the following settings + the color profile from the driver gets me past all the tests perfectly:
- Brightness 50 (still have to settle on this one, it&#39;s personal preference, it controls the backlight, not the colors)
- Contrast 70 (which for me was the default one)
- Picture mode Custom
- Super resolution + Off (it looks horrible anyway)
- Sharpness 50 (default one I think)
- Black level High (low messes up gray colors)
- DFC Off
- Response Time Middle (personal preference, <a href="https://www.blurbusters.com/">https://www.blurbusters.com/</a> show horrible overdrive with it on high)
- Freesync doesn&#39;t matter
- Black stabilizer 50
- Gamma setting on 0
- Color Temp Medium
How`s your monitor by the way? Any IPS bleed whatsoever? I either got lucky or the panel is pretty good, 0 bleed for me, just the usual IPS glow. How about the pixels? I see the pixels even at one meter away, especially on Microsoft Edge&#39;s icon for example, the blue background is just blocky, don&#39;t know why.</p>
</div>"#;
let output = r#"<div class="md"><p>Hi, I&#39;ve bought this very same monitor and found no calibration whatsoever. I have an ICC profile that has been set up since I&#39;ve installed its driver from the LG website and it works ok. I also used <a href="http://www.lagom.nl/lcd-test/">http://www.lagom.nl/lcd-test/</a> to calibrate it. After some good tinkering I&#39;ve found the following settings + the color profile from the driver gets me past all the tests perfectly:
<ul><li>Brightness 50 (still have to settle on this one, it&#39;s personal preference, it controls the backlight, not the colors)</li><li>Contrast 70 (which for me was the default one)</li><li>Picture mode Custom</li><li>Super resolution + Off (it looks horrible anyway)</li><li>Sharpness 50 (default one I think)</li><li>Black level High (low messes up gray colors)</li><li>DFC Off </li><li>Response Time Middle (personal preference, <a href="https://www.blurbusters.com/">https://www.blurbusters.com/</a> show horrible overdrive with it on high)</li><li>Freesync doesn&#39;t matter</li><li>Black stabilizer 50</li><li>Gamma setting on 0 </li><li>Color Temp Medium</li></ul>
How`s your monitor by the way? Any IPS bleed whatsoever? I either got lucky or the panel is pretty good, 0 bleed for me, just the usual IPS glow. How about the pixels? I see the pixels even at one meter away, especially on Microsoft Edge&#39;s icon for example, the blue background is just blocky, don&#39;t know why.</p>
</div>"#;
assert_eq!(render_bullet_lists(input), output);
}
#[test]
fn test_default_prefs_serialization_loop_json() {
let prefs = Preferences::default();
let serialized = serde_json::to_string(&prefs).unwrap();
let deserialized: Preferences = serde_json::from_str(&serialized).unwrap();
assert_eq!(prefs, deserialized);
}
#[test]
fn test_default_prefs_serialization_loop_bincode() {
let prefs = Preferences::default();
test_round_trip(&prefs, false);
test_round_trip(&prefs, true);
}
static KNOWN_GOOD_CONFIGS: &[&str] = &[
"ఴӅβØØҞÉဏႢձĬ༧ȒʯऌԔӵ୮༏",
"ਧՊΥÀÃǎƱГ۸ඣമĖฤ႙ʟาúໜϾௐɥঀĜໃહཞઠѫҲɂఙ࿔DzઉƲӟӻĻฅΜδ໖ԜǗဖငƦơ৶Ą௩ԹʛใЛʃශаΏ",
"ਧԩΥÀÃΊ౭൩ඔႠϼҭöҪƸռઇԾॐნɔາǒՍҰच௨ಖມŃЉŐདƦ๙ϩএఠȝഽйʮჯඒϰळՋ௮ສ৵ऎΦѧਹಧଟƙŃ३î༦ŌပղयƟแҜ།",
];
#[test]
fn test_known_good_configs_deserialization() {
for config in KNOWN_GOOD_CONFIGS {
let bytes = base2048::decode(config).unwrap();
let decompressed = deflate_decompress(bytes).unwrap();
assert!(bincode::deserialize::<Preferences>(&decompressed).is_ok());
}
}
#[test]
fn test_known_good_configs_full_round_trip() {
for config in KNOWN_GOOD_CONFIGS {
let bytes = base2048::decode(config).unwrap();
let decompressed = deflate_decompress(bytes).unwrap();
let prefs: Preferences = bincode::deserialize(&decompressed).unwrap();
test_round_trip(&prefs, false);
test_round_trip(&prefs, true);
}
}
fn test_round_trip(input: &Preferences, compression: bool) {
let serialized = bincode::serialize(input).unwrap();
let compressed = if compression { deflate_compress(serialized).unwrap() } else { serialized };
let decompressed = if compression { deflate_decompress(compressed).unwrap() } else { compressed };
let deserialized: Preferences = bincode::deserialize(&decompressed).unwrap();
assert_eq!(*input, deserialized);
}

View File

@ -30,7 +30,7 @@ async function checkInstanceUpdateStatus() {
document.getElementById('update-status').innerText = statusMessage; document.getElementById('update-status').innerText = statusMessage;
} catch (error) { } catch (error) {
console.error('Error fetching commits:', error); console.error('Error fetching commits:', error);
document.getElementById('update-status').innerText = '⚠️ Error checking update status.'; document.getElementById('update-status').innerText = '⚠️ Error checking update status: ' + error;
} }
} }
@ -45,7 +45,7 @@ async function checkOtherInstances() {
//document.getElementById('random-instance').innerText = "Visit Random Instance"; //document.getElementById('random-instance').innerText = "Visit Random Instance";
} catch (error) { } catch (error) {
console.error('Error fetching instances:', error); console.error('Error fetching instances:', error);
document.getElementById('update-status').innerText = '⚠️ Error checking update status.'; document.getElementById('update-status').innerText = '⚠️ Error checking other instances: ' + error;
} }
} }

9
static/copy.js Normal file
View File

@ -0,0 +1,9 @@
async function copy() {
await navigator.clipboard.writeText(document.getElementById('bincode_str').value);
}
async function set_listener() {
document.getElementById('copy').addEventListener('click', copy);
}
window.addEventListener('load', set_listener);

View File

@ -3,9 +3,9 @@
<ShortName>Search Redlib</ShortName> <ShortName>Search Redlib</ShortName>
<Description>Search for whatever you want on Redlib, awesome Reddit frontend</Description> <Description>Search for whatever you want on Redlib, awesome Reddit frontend</Description>
<InputEncoding>UTF-8</InputEncoding> <InputEncoding>UTF-8</InputEncoding>
<Image width="32" height="32" type="image/x-icon">/favicon.ico</Image> <Image width="32" height="32" type="image/x-icon">https://localhost:8080/favicon.ico</Image>
<Url type="text/html" template="/search"> <Url type="text/html" template="https://localhost:8080/search">
<Param name="q" value="{searchTerms}"/> <Param name="q" value="{searchTerms}"/>
</Url> </Url>
<moz:SearchForm>/search</moz:SearchForm> <moz:SearchForm>https://localhost:8080/search</moz:SearchForm>
</OpenSearchDescription> </OpenSearchDescription>

View File

@ -571,14 +571,22 @@ aside {
.filter, .filter,
.unquick, .unquick,
.quick, .quick,
.unfilter { .unfilter,
.copy,
.import {
padding: 10px 20px; padding: 10px 20px;
border-radius: 5px; border-radius: 5px;
cursor: pointer; cursor: pointer;
} }
.copy,
.import {
margin: 5px;
}
.subscribe, .subscribe,
.filter { .filter,
.copy,
.import {
color: var(--foreground); color: var(--foreground);
background-color: var(--accent); background-color: var(--accent);
} }
@ -1227,6 +1235,13 @@ a.search_subreddit:hover {
overflow-wrap: anywhere; overflow-wrap: anywhere;
} }
.post_body pre {
background: var(--background);
overflow-x: auto;
margin: 10px 0;
padding: 10px;
}
.post_body img { .post_body img {
max-width: 100%; max-width: 100%;
display: block; display: block;

View File

@ -0,0 +1,14 @@
/* midnightpurple theme setting */
.midnightPurple{
--accent: #be6ede;
--green: #268F02;
--text: white;
--foreground: #222;
--background: #000000;
--outside: #1f1f1f;
--post: #000000;
--panel-border: 1px solid #4E1764;
--highlighted: #333;
--visited: #aaa;
--shadow: 0 1px 3px rgba(0, 0, 0, 0.5);
}

View File

@ -41,7 +41,7 @@
<div class="comment_body {% if highlighted %}highlighted{% endif %}">{{ body|safe }}</div> <div class="comment_body {% if highlighted %}highlighted{% endif %}">{{ body|safe }}</div>
{% endif %} {% endif %}
<blockquote class="replies">{% for c in replies -%}{{ c.render().unwrap()|safe }}{%- endfor %} <blockquote class="replies">{% for c in replies -%}{{ c.render().unwrap()|safe }}{%- endfor %}
</bockquote> </blockquote>
</details> </details>
</div> </div>
{% endif %} {% endif %}

20
templates/info.html Normal file
View File

@ -0,0 +1,20 @@
{% extends "base.html" %}
{% import "utils.html" as utils %}
{% block title %}Info: {{ msg }}{% endblock %}
{% block sortstyle %}{% endblock %}
{% block subscriptions %}
{% call utils::sub_list("") %}
{% endblock %}
{% block search %}
{% call utils::search("".to_owned(), "") %}
{% endblock %}
{% block content %}
<div id="error">
<h2>{{ msg }}</h2>
<br />
</div>
{% endblock %}

View File

@ -1,7 +1,13 @@
{% extends "base.html" %} {% extends "base.html" %}
{% import "utils.html" as utils %} {% import "utils.html" as utils %}
{% block title %}{{ post.title }} - r/{{ post.community }}{% endblock %} {% block title %}
{% if single_thread %}
{{ comments[0].author.name }} comments on {{ post.title }} - r/{{ post.community }}
{% else %}
{{ post.title }} - r/{{ post.community }}
{% endif %}
{% endblock %}
{% block search %} {% block search %}
{% call utils::search(["/r/", post.community.as_str()].concat(), "") %} {% call utils::search(["/r/", post.community.as_str()].concat(), "") %}

View File

@ -4,15 +4,15 @@
{% block title %}Redlib Settings{% endblock %} {% block title %}Redlib Settings{% endblock %}
{% block subscriptions %} {% block subscriptions %}
{% call utils::sub_list("") %} {% call utils::sub_list("") %}
{% endblock %} {% endblock %}
{% block search %} {% block search %}
{% call utils::search("".to_owned(), "") %} {% call utils::search("".to_owned(), "") %}
{% endblock %} {% endblock %}
{% block content %} {% block content %}
<div id="settings"> <div id="settings">
<form action="/settings" method="POST"> <form action="/settings" method="POST">
<div class="prefs"> <div class="prefs">
<fieldset> <fieldset>
@ -37,6 +37,12 @@
</fieldset> </fieldset>
<fieldset> <fieldset>
<legend>Interface</legend> <legend>Interface</legend>
<div class="prefs-group">
<label for="remove_default_feeds">Remove default feeds</label>
<input type="hidden" value="off" name="remove_default_feeds">
<input type="checkbox" name="remove_default_feeds" id="remove_default_feeds" {% if
prefs.remove_default_feeds=="on" %}checked{% endif %}>
</div>
<div class="prefs-group"> <div class="prefs-group">
<label for="front_page">Front page:</label> <label for="front_page">Front page:</label>
<select name="front_page" id="front_page"> <select name="front_page" id="front_page">
@ -78,33 +84,44 @@
</fieldset> </fieldset>
<fieldset> <fieldset>
<legend>Content</legend> <legend>Content</legend>
<div class="prefs-group">
<label for="video_quality">Video quality:</label>
<select name="video_quality" id="video_quality">
{% call utils::options(prefs.video_quality, ["best", "medium", "worst"], "best") %}
</select>
</div>
<div class="prefs-group"> <div class="prefs-group">
<label for="post_sort" title="Applies only to subreddit feeds">Default subreddit post sort:</label> <label for="post_sort" title="Applies only to subreddit feeds">Default subreddit post sort:</label>
<select name="post_sort"> <select name="post_sort">
{% call utils::options(prefs.post_sort, ["hot", "new", "top", "rising", "controversial"], "hot") %} {% call utils::options(prefs.post_sort, ["hot", "new", "top", "rising", "controversial"], "hot")
%}
</select> </select>
</div> </div>
<div class="prefs-group"> <div class="prefs-group">
<label for="comment_sort">Default comment sort:</label> <label for="comment_sort">Default comment sort:</label>
<select name="comment_sort" id="comment_sort"> <select name="comment_sort" id="comment_sort">
{% call utils::options(prefs.comment_sort, ["confidence", "top", "new", "controversial", "old"], "confidence") %} {% call utils::options(prefs.comment_sort, ["confidence", "top", "new", "controversial", "old"],
"confidence") %}
</select> </select>
</div> </div>
<div class="prefs-group"> <div class="prefs-group">
<label for="blur_spoiler">Blur spoiler previews:</label> <label for="blur_spoiler">Blur spoiler previews:</label>
<input type="hidden" value="off" name="blur_spoiler"> <input type="hidden" value="off" name="blur_spoiler">
<input type="checkbox" name="blur_spoiler" id="blur_spoiler" {% if prefs.blur_spoiler == "on" %}checked{% endif %}> <input type="checkbox" name="blur_spoiler" id="blur_spoiler" {% if prefs.blur_spoiler=="on"
%}checked{% endif %}>
</div> </div>
{% if !crate::utils::sfw_only() %} {% if !crate::utils::sfw_only() %}
<div class="prefs-group"> <div class="prefs-group">
<label for="show_nsfw">Show NSFW posts:</label> <label for="show_nsfw">Show NSFW posts:</label>
<input type="hidden" value="off" name="show_nsfw"> <input type="hidden" value="off" name="show_nsfw">
<input type="checkbox" name="show_nsfw" id="show_nsfw" {% if prefs.show_nsfw == "on" %}checked{% endif %}> <input type="checkbox" name="show_nsfw" id="show_nsfw" {% if prefs.show_nsfw=="on" %}checked{% endif
%}>
</div> </div>
<div class="prefs-group"> <div class="prefs-group">
<label for="blur_nsfw">Blur NSFW previews:</label> <label for="blur_nsfw">Blur NSFW previews:</label>
<input type="hidden" value="off" name="blur_nsfw"> <input type="hidden" value="off" name="blur_nsfw">
<input type="checkbox" name="blur_nsfw" id="blur_nsfw" {% if prefs.blur_nsfw == "on" %}checked{% endif %}> <input type="checkbox" name="blur_nsfw" id="blur_nsfw" {% if prefs.blur_nsfw=="on" %}checked{% endif
%}>
</div> </div>
{% endif %} {% endif %}
<div class="prefs-group"> <div class="prefs-group">
@ -129,14 +146,17 @@
<div class="prefs-group"> <div class="prefs-group">
<label for="autoplay_videos">Autoplay videos</label> <label for="autoplay_videos">Autoplay videos</label>
<input type="hidden" value="off" name="autoplay_videos"> <input type="hidden" value="off" name="autoplay_videos">
<input type="checkbox" name="autoplay_videos" id="autoplay_videos" {% if prefs.autoplay_videos == "on" %}checked{% endif %}> <input type="checkbox" name="autoplay_videos" id="autoplay_videos" {% if prefs.autoplay_videos=="on"
%}checked{% endif %}>
</div> </div>
<div class="prefs-group"> <div class="prefs-group">
<label for="use_hls">Use HLS for videos</label> <label for="use_hls">Use HLS for videos</label>
{% if prefs.ffmpeg_video_downloads != "on" %} {% if prefs.ffmpeg_video_downloads != "on" %}
<details id="feeds"> <details id="feeds">
<summary>Why?</summary> <summary>Why?</summary>
<div id="feed_list" class="helper">Reddit videos require JavaScript (via HLS.js) to be enabled to be played with audio. Therefore, this toggle lets you either use Redlib JS-free or utilize this feature.</div> <div id="feed_list" class="helper">Reddit videos require JavaScript (via HLS.js) to be enabled
to be played with audio. Therefore, this toggle lets you either use Redlib JS-free or
utilize this feature.</div>
</details> </details>
{% endif %} {% endif %}
{% if prefs.ffmpeg_video_downloads == "on" %}<u>ⓘ HLS is required for downloads</u>{% endif %} {% if prefs.ffmpeg_video_downloads == "on" %}<u>ⓘ HLS is required for downloads</u>{% endif %}
@ -155,7 +175,8 @@
<div class="prefs-group"> <div class="prefs-group">
<label for="hide_hls_notification">Hide notification about possible HLS usage</label> <label for="hide_hls_notification">Hide notification about possible HLS usage</label>
<input type="hidden" value="off" name="hide_hls_notification"> <input type="hidden" value="off" name="hide_hls_notification">
<input type="checkbox" name="hide_hls_notification" id="hide_hls_notification" {% if prefs.hide_hls_notification == "on" %}checked{% endif %}> <input type="checkbox" name="hide_hls_notification" id="hide_hls_notification" {% if
prefs.hide_hls_notification=="on" %}checked{% endif %}>
</div> </div>
</fieldset> </fieldset>
<input id="save" type="submit" value="Save"> <input id="save" type="submit" value="Save">
@ -171,7 +192,8 @@
{% for sub in prefs.subscriptions %} {% for sub in prefs.subscriptions %}
<div> <div>
{% let feed -%} {% let feed -%}
{% if sub.starts_with("u_") -%}{% let feed = format!("u/{}", &sub[2..]) -%}{% else -%}{% let feed = format!("r/{}", sub) -%}{% endif -%} {% if sub.starts_with("u_") -%}{% let feed = format!("u/{}", &sub[2..]) -%}{% else -%}{% let feed =
format!("r/{}", sub) -%}{% endif -%}
<a href="/{{ feed }}">{{ feed }}</a> <a href="/{{ feed }}">{{ feed }}</a>
<form action="/r/{{ sub }}/unsubscribe/?redirect=settings" method="POST"> <form action="/r/{{ sub }}/unsubscribe/?redirect=settings" method="POST">
<button class="unsubscribe">Unsubscribe</button> <button class="unsubscribe">Unsubscribe</button>
@ -186,7 +208,8 @@
{% for sub in prefs.filters %} {% for sub in prefs.filters %}
<div> <div>
{% let feed -%} {% let feed -%}
{% if sub.starts_with("u_") -%}{% let feed = format!("u/{}", &sub[2..]) -%}{% else -%}{% let feed = format!("r/{}", sub) -%}{% endif -%} {% if sub.starts_with("u_") -%}{% let feed = format!("u/{}", &sub[2..]) -%}{% else -%}{% let feed =
format!("r/{}", sub) -%}{% endif -%}
<a href="/{{ feed }}">{{ feed }}</a> <a href="/{{ feed }}">{{ feed }}</a>
<form action="/r/{{ sub }}/unfilter/?redirect=settings" method="POST"> <form action="/r/{{ sub }}/unfilter/?redirect=settings" method="POST">
<button class="unfilter">Unfilter</button> <button class="unfilter">Unfilter</button>
@ -195,6 +218,37 @@
{% endfor %} {% endfor %}
</div> </div>
{% endif %} {% endif %}
<div id="settings_note">
<p><b>Note:</b> settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.
</p>
<br>
{% match prefs.to_urlencoded() %}
{% when Ok with (encoded_prefs) %}
<p>You can restore your current settings and subscriptions after clearing your cookies using <a
href="/settings/restore/?{{ encoded_prefs }}">this link</a>.</p>
{% when Err with (err) %}
<p>There was an error creating your restore link: {{ err }}</p>
<p>Please report this issue</p>
{% endmatch %}
<br />
<div>
<script src="/copy.js"></script>
<label for="bincode_str">Or, export/import here (be sure to save first):</label>
<br />
<input type="text" id="bincode_str" name="bincode_str"
value="{% match prefs.to_bincode_str() %}{% when Ok with (bincode_str) %}{{ bincode_str }}{% when Err with (err) %}Error: {{ err }}{% endmatch %}"
readonly>
<button id="copy" class="copy">Copy</button>
<br />
<form action="/settings/encoded-restore/" method="POST">
<input type="text" id="encoded_prefs" name="encoded_prefs" value=""
placeholder="Paste your encoded settings here">
<button class="import" type="submit">Import</button>
</form>
</div>
</div>
</div> </div>
{% endblock %} {% endblock %}

View File

@ -49,8 +49,10 @@
{% endif %} {% endif %}
<p>MAIN FEEDS</p> <p>MAIN FEEDS</p>
<a href="/">Home</a> <a href="/">Home</a>
{% if prefs.remove_default_feeds != "on" %}
<a href="/r/popular">Popular</a> <a href="/r/popular">Popular</a>
<a href="/r/all">All</a> <a href="/r/all">All</a>
{% endif %}
{% if prefs.subscriptions.len() > 0 %} {% if prefs.subscriptions.len() > 0 %}
<p>REDDIT FEEDS</p> <p>REDDIT FEEDS</p>
{% for sub in prefs.subscriptions %} {% for sub in prefs.subscriptions %}