From 95ab6c53856291a4d9cebb9fa5c5045031836120 Mon Sep 17 00:00:00 2001 From: Matthew Esposito Date: Wed, 20 Nov 2024 18:50:06 -0500 Subject: [PATCH 01/49] fix(oauth): update oauth resources and script --- scripts/update_oauth_resources.sh | 10 ++--- src/oauth_resources.rs | 70 ++++++++++++++++--------------- 2 files changed, 42 insertions(+), 38 deletions(-) diff --git a/scripts/update_oauth_resources.sh b/scripts/update_oauth_resources.sh index 1d6b486..a3014ae 100755 --- a/scripts/update_oauth_resources.sh +++ b/scripts/update_oauth_resources.sh @@ -39,12 +39,12 @@ done echo "];" >> "$filename" # Fetch Android app versions -page_1=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions/" | rg "" -r "https://apkcombo.com\$1" | sort | uniq) +page_1=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions/" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') # Append with pages -page_2=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=2" | rg "" -r "https://apkcombo.com\$1" | sort | uniq) -page_3=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=3" | rg "" -r "https://apkcombo.com\$1" | sort | uniq) -page_4=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=4" | rg "" -r "https://apkcombo.com\$1" | sort | uniq) -page_5=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=5" | rg "" -r "https://apkcombo.com\$1" | sort | uniq) +page_2=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=2" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') +page_3=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=3" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') +page_4=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=4" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') +page_5=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=5" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') # Concatenate all pages versions="${page_1}" diff --git a/src/oauth_resources.rs b/src/oauth_resources.rs index 3272939..a5dc2f3 100644 --- a/src/oauth_resources.rs +++ b/src/oauth_resources.rs @@ -2,8 +2,40 @@ // Rerun scripts/update_oauth_resources.sh to update this file // Please do not edit manually // Filled in with real app versions -pub static _IOS_APP_VERSION_LIST: &[&str; 1] = &[""]; +pub static _IOS_APP_VERSION_LIST: &[&str; 1] = &[ + "", +]; pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ + "Version 2024.22.1/Build 1652272", + "Version 2024.23.1/Build 1665606", + "Version 2024.24.1/Build 1682520", + "Version 2024.25.0/Build 1693595", + "Version 2024.25.2/Build 1700401", + "Version 2024.25.3/Build 1703490", + "Version 2024.26.0/Build 1710470", + "Version 2024.26.1/Build 1717435", + "Version 2024.28.0/Build 1737665", + "Version 2024.28.1/Build 1741165", + "Version 2024.30.0/Build 1770787", + "Version 2024.31.0/Build 1786202", + "Version 2024.32.0/Build 1809095", + "Version 2024.32.1/Build 1813258", + "Version 2024.33.0/Build 1819908", + "Version 2024.34.0/Build 1837909", + "Version 2024.35.0/Build 1861437", + "Version 2024.36.0/Build 1875012", + "Version 2024.37.0/Build 1888053", + "Version 2024.38.0/Build 1902791", + "Version 2024.39.0/Build 1916713", + "Version 2024.40.0/Build 1928580", + "Version 2024.41.0/Build 1941199", + "Version 2024.41.1/Build 1947805", + "Version 2024.42.0/Build 1952440", + "Version 2024.43.0/Build 1972250", + "Version 2024.44.0/Build 1988458", + "Version 2024.45.0/Build 2001943", + "Version 2024.46.0/Build 2012731", + "Version 2024.47.0/Build 2029755", "Version 2023.48.0/Build 1319123", "Version 2023.49.0/Build 1321715", "Version 2023.49.1/Build 1322281", @@ -31,9 +63,9 @@ pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ "Version 2024.20.0/Build 1612800", "Version 2024.20.1/Build 1615586", "Version 2024.20.2/Build 1624969", + "Version 2024.20.3/Build 1624970", "Version 2024.21.0/Build 1631686", "Version 2024.22.0/Build 1645257", - "Version 2024.22.1/Build 1652272", "Version 2023.21.0/Build 956283", "Version 2023.22.0/Build 968223", "Version 2023.23.0/Build 983896", @@ -124,35 +156,7 @@ pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ "Version 2022.40.0/Build 624782", "Version 2022.41.0/Build 630468", "Version 2022.41.1/Build 634168", - "Version 2021.39.1/Build 372418", - "Version 2021.41.0/Build 376052", - "Version 2021.42.0/Build 378193", - "Version 2021.43.0/Build 382019", - "Version 2021.44.0/Build 385129", - "Version 2021.45.0/Build 387663", - "Version 2021.46.0/Build 392043", - "Version 2021.47.0/Build 394342", - "Version 2022.10.0/Build 429896", - "Version 2022.1.0/Build 402829", - "Version 2022.11.0/Build 433004", - "Version 2022.12.0/Build 436848", - "Version 2022.13.0/Build 442084", - "Version 2022.13.1/Build 444621", - "Version 2022.14.1/Build 452742", - "Version 2022.15.0/Build 455453", - "Version 2022.16.0/Build 462377", - "Version 2022.17.0/Build 468480", - "Version 2022.18.0/Build 473740", - "Version 2022.19.1/Build 482464", - "Version 2022.2.0/Build 405543", - "Version 2022.3.0/Build 408637", - "Version 2022.4.0/Build 411368", - "Version 2022.5.0/Build 414731", - "Version 2022.6.0/Build 418391", - "Version 2022.6.1/Build 419585", - "Version 2022.6.2/Build 420562", - "Version 2022.7.0/Build 420849", - "Version 2022.8.0/Build 423906", - "Version 2022.9.0/Build 426592", ]; -pub static _IOS_OS_VERSION_LIST: &[&str; 1] = &[""]; +pub static _IOS_OS_VERSION_LIST: &[&str; 1] = &[ + "", +]; From 6be6f892a4eb159f5a27ce48f0ce615298071eac Mon Sep 17 00:00:00 2001 From: Matthew Esposito Date: Wed, 20 Nov 2024 19:19:29 -0500 Subject: [PATCH 02/49] feat(oauth): better oauth client matching --- Cargo.lock | 15 +++++++++++++++ Cargo.toml | 1 + src/client.rs | 36 +++++++++++++----------------------- src/oauth.rs | 20 +++++++++++++++----- src/oauth_resources.rs | 8 ++------ 5 files changed, 46 insertions(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4143b80..057234f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1327,6 +1327,8 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ + "libc", + "rand_chacha", "rand_core", ] @@ -1345,6 +1347,9 @@ name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] [[package]] name = "redlib" @@ -1380,6 +1385,7 @@ dependencies = [ "serde_json", "serde_json_path", "serde_yaml", + "tegen", "time", "tokio", "toml", @@ -1895,6 +1901,15 @@ dependencies = [ "syn", ] +[[package]] +name = "tegen" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a2d5a357b7c859b410139734a875136473c3b18b1bbd8d5bdc1769d9002acd" +dependencies = [ + "rand", +] + [[package]] name = "tempfile" version = "3.14.0" diff --git a/Cargo.toml b/Cargo.toml index 7bb7e93..616d8e9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -49,6 +49,7 @@ serde_json_path = "0.7.1" async-recursion = "1.1.1" common-words-all = { version = "0.0.2", default-features = false, features = ["english", "one"] } hyper-rustls = { version = "0.24.2", features = [ "http2" ] } +tegen = "0.1.4" [dev-dependencies] diff --git a/src/client.rs b/src/client.rs index 248fc88..1e1661d 100644 --- a/src/client.rs +++ b/src/client.rs @@ -218,40 +218,30 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo // Construct the hyper client from the HTTPS connector. let client: &Lazy> = &CLIENT; - let (token, vendor_id, device_id, user_agent, loid) = { - let client = OAUTH_CLIENT.load_full(); - ( - client.token.clone(), - client.headers_map.get("Client-Vendor-Id").cloned().unwrap_or_default(), - client.headers_map.get("X-Reddit-Device-Id").cloned().unwrap_or_default(), - client.headers_map.get("User-Agent").cloned().unwrap_or_default(), - client.headers_map.get("x-reddit-loid").cloned().unwrap_or_default(), - ) - }; - // Build request to Reddit. When making a GET, request gzip compression. // (Reddit doesn't do brotli yet.) - let mut headers = vec![ - ("User-Agent", user_agent), - ("Client-Vendor-Id", vendor_id), - ("X-Reddit-Device-Id", device_id), - ("x-reddit-loid", loid), - ("Host", host.to_string()), - ("Authorization", format!("Bearer {token}")), - ("Accept-Encoding", if method == Method::GET { "gzip".into() } else { "identity".into() }), + let mut headers: Vec<(String, String)> = vec![ + ("Host".into(), host.into()), + ("Accept-Encoding".into(), if method == Method::GET { "gzip".into() } else { "identity".into() }), ( - "Cookie", + "Cookie".into(), if quarantine { "_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D".into() } else { "".into() }, ), - ("X-Reddit-Width", fastrand::u32(300..500).to_string()), - ("X-Reddit-DPR", "2".to_owned()), - ("Device-Name", format!("Android {}", fastrand::u8(9..=14))), ]; + { + let client = OAUTH_CLIENT.load_full(); + for (key, value) in client.initial_headers.clone() { + headers.push((key, value)); + } + } + + trace!("Headers: {:#?}", headers); + // shuffle headers: https://github.com/redlib-org/redlib/issues/324 fastrand::shuffle(&mut headers); diff --git a/src/oauth.rs b/src/oauth.rs index 80bf318..576b647 100644 --- a/src/oauth.rs +++ b/src/oauth.rs @@ -7,8 +7,8 @@ use crate::{ use base64::{engine::general_purpose, Engine as _}; use hyper::{client, Body, Method, Request}; use log::{error, info, trace}; - use serde_json::json; +use tegen::tegen::TextGenerator; use tokio::time::{error::Elapsed, timeout}; static REDDIT_ANDROID_OAUTH_CLIENT_ID: &str = "ohXpoqrZYub1kg"; @@ -84,7 +84,7 @@ impl Oauth { // Set JSON body. I couldn't tell you what this means. But that's what the client sends let json = json!({ - "scopes": ["*","email"] + "scopes": ["*","email", "pii"] }); let body = Body::from(json.to_string()); @@ -185,11 +185,21 @@ impl Device { let android_user_agent = format!("Reddit/{android_app_version}/Android {android_version}"); + let qos = fastrand::u32(1000..=100_000); + let qos: f32 = qos as f32 / 1000.0; + let qos = format!("{:.3}", qos); + + let codecs = TextGenerator::new().generate("available-codecs=video/avc, video/hevc{, video/x-vnd.on2.vp9|}"); + // Android device headers - let headers = HashMap::from([ - ("Client-Vendor-Id".into(), uuid.clone()), - ("X-Reddit-Device-Id".into(), uuid.clone()), + let headers: HashMap = HashMap::from([ ("User-Agent".into(), android_user_agent), + ("x-reddit-retry".into(), "algo=no-retries".into()), + ("x-reddit-compression".into(), "1".into()), + ("x-reddit-qos".into(), qos), + ("x-reddit-media-codecs".into(), codecs), + ("Content-Type".into(), "application/json; charset=UTF-8".into()), + ("client-vendor-id".into(), uuid.clone()), ]); info!("[🔄] Spoofing Android client with headers: {headers:?}, uuid: \"{uuid}\", and OAuth ID \"{REDDIT_ANDROID_OAUTH_CLIENT_ID}\""); diff --git a/src/oauth_resources.rs b/src/oauth_resources.rs index a5dc2f3..faf7873 100644 --- a/src/oauth_resources.rs +++ b/src/oauth_resources.rs @@ -2,9 +2,7 @@ // Rerun scripts/update_oauth_resources.sh to update this file // Please do not edit manually // Filled in with real app versions -pub static _IOS_APP_VERSION_LIST: &[&str; 1] = &[ - "", -]; +pub static _IOS_APP_VERSION_LIST: &[&str; 1] = &[""]; pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ "Version 2024.22.1/Build 1652272", "Version 2024.23.1/Build 1665606", @@ -157,6 +155,4 @@ pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ "Version 2022.41.0/Build 630468", "Version 2022.41.1/Build 634168", ]; -pub static _IOS_OS_VERSION_LIST: &[&str; 1] = &[ - "", -]; +pub static _IOS_OS_VERSION_LIST: &[&str; 1] = &[""]; From 100a7b65a6a79968cbc8548f4ce12d722dfb0cba Mon Sep 17 00:00:00 2001 From: Matthew Esposito Date: Sat, 23 Nov 2024 21:17:52 -0500 Subject: [PATCH 03/49] fix(client): update headers management, add self check (fix #334, fix #318) --- src/client.rs | 41 ++++++++++++++++++++++++++++++++++++++--- src/main.rs | 12 +++++++++++- src/oauth.rs | 6 ++++-- 3 files changed, 53 insertions(+), 6 deletions(-) diff --git a/src/client.rs b/src/client.rs index 1e1661d..0e2c301 100644 --- a/src/client.rs +++ b/src/client.rs @@ -19,6 +19,7 @@ use std::{io, result::Result}; use crate::dbg_msg; use crate::oauth::{force_refresh_token, token_daemon, Oauth}; use crate::server::RequestExt; +use crate::subreddit::community; use crate::utils::format_url; const REDDIT_URL_BASE: &str = "https://oauth.reddit.com"; @@ -235,13 +236,11 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo { let client = OAUTH_CLIENT.load_full(); - for (key, value) in client.initial_headers.clone() { + for (key, value) in client.headers_map.clone() { headers.push((key, value)); } } - trace!("Headers: {:#?}", headers); - // shuffle headers: https://github.com/redlib-org/redlib/issues/324 fastrand::shuffle(&mut headers); @@ -390,6 +389,12 @@ pub async fn json(path: String, quarantine: bool) -> Result { "Ratelimit remaining: Header says {remaining}, we have {current_rate_limit}. Resets in {reset}. Rollover: {}. Ratelimit used: {used}", if is_rolling_over { "yes" } else { "no" }, ); + + // If can parse remaining as a float, round to a u16 and save + if let Ok(val) = remaining.parse::() { + OAUTH_RATELIMIT_REMAINING.store(val.round() as u16, Ordering::SeqCst); + } + Some(reset) } else { None @@ -474,6 +479,36 @@ pub async fn json(path: String, quarantine: bool) -> Result { } } +async fn self_check(sub: &str) -> Result<(), String> { + let request = Request::get(format!("/r/{sub}/")).body(Body::empty()).unwrap(); + + match community(request).await { + Ok(sub) if sub.status().is_success() => Ok(()), + Ok(sub) => Err(sub.status().to_string()), + Err(e) => Err(e), + } +} + +pub async fn rate_limit_check() -> Result<(), String> { + // First, check a subreddit. + self_check("reddit").await?; + // This will reduce the rate limit to 99. Assert this check. + if OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst) != 99 { + return Err(format!("Rate limit check failed: expected 99, got {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst))); + } + // Now, we switch out the OAuth client. + // This checks for the IP rate limit association. + force_refresh_token().await; + // Now, check a new sub to break cache. + self_check("rust").await?; + // Again, assert the rate limit check. + if OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst) != 99 { + return Err(format!("Rate limit check failed: expected 99, got {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst))); + } + + Ok(()) +} + #[cfg(test)] static POPULAR_URL: &str = "/r/popular/hot.json?&raw_json=1&geo_filter=GLOBAL"; diff --git a/src/main.rs b/src/main.rs index 7342597..abae968 100644 --- a/src/main.rs +++ b/src/main.rs @@ -11,7 +11,7 @@ use hyper::Uri; use hyper::{header::HeaderValue, Body, Request, Response}; use log::info; use once_cell::sync::Lazy; -use redlib::client::{canonical_path, proxy, CLIENT}; +use redlib::client::{canonical_path, proxy, rate_limit_check, CLIENT}; use redlib::server::{self, RequestExt}; use redlib::utils::{error, redirect, ThemeAssets}; use redlib::{config, duplicates, headers, instance_info, post, search, settings, subreddit, user}; @@ -146,6 +146,16 @@ async fn main() { ) .get_matches(); + match rate_limit_check().await { + Ok(()) => { + info!("[✅] Rate limit check passed"); + }, + Err(e) => { + log::error!("[❌] Rate limit check failed: {}", e); + std::process::exit(1); + } + } + let address = matches.get_one::("address").unwrap(); let port = matches.get_one::("port").unwrap(); let hsts = matches.get_one("hsts").map(|m: &String| m.as_str()); diff --git a/src/oauth.rs b/src/oauth.rs index 576b647..12b0f37 100644 --- a/src/oauth.rs +++ b/src/oauth.rs @@ -38,12 +38,12 @@ impl Oauth { } Ok(None) => { error!("Failed to create OAuth client. Retrying in 5 seconds..."); - continue; } Err(duration) => { error!("Failed to create OAuth client in {duration:?}. Retrying in 5 seconds..."); } } + tokio::time::sleep(Duration::from_secs(5)).await; } } @@ -91,13 +91,14 @@ impl Oauth { // Build request let request = builder.body(body).unwrap(); - trace!("Sending token request..."); + trace!("Sending token request...\n\n{request:?}"); // Send request let client: &once_cell::sync::Lazy> = &CLIENT; let resp = client.request(request).await.ok()?; trace!("Received response with status {} and length {:?}", resp.status(), resp.headers().get("content-length")); + trace!("OAuth headers: {:#?}", resp.headers()); // Parse headers - loid header _should_ be saved sent on subsequent token refreshes. // Technically it's not needed, but it's easy for Reddit API to check for this. @@ -200,6 +201,7 @@ impl Device { ("x-reddit-media-codecs".into(), codecs), ("Content-Type".into(), "application/json; charset=UTF-8".into()), ("client-vendor-id".into(), uuid.clone()), + ("X-Reddit-Device-Id".into(), uuid.clone()), ]); info!("[🔄] Spoofing Android client with headers: {headers:?}, uuid: \"{uuid}\", and OAuth ID \"{REDDIT_ANDROID_OAUTH_CLIENT_ID}\""); From 7fe109df2267f292459c2154554c7bb40e77908d Mon Sep 17 00:00:00 2001 From: Matthew Esposito Date: Sat, 23 Nov 2024 21:41:30 -0500 Subject: [PATCH 04/49] style(clippy) --- src/main.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main.rs b/src/main.rs index abae968..9c8de97 100644 --- a/src/main.rs +++ b/src/main.rs @@ -149,9 +149,10 @@ async fn main() { match rate_limit_check().await { Ok(()) => { info!("[✅] Rate limit check passed"); - }, + } Err(e) => { - log::error!("[❌] Rate limit check failed: {}", e); + log::error!(" Rate limit check failed: {}", e); + println!("[❌] Rate limit check failed: {}", e); std::process::exit(1); } } From a4f511f67e350fb4e4792416d42dbeaa9f1d544b Mon Sep 17 00:00:00 2001 From: Matthew Esposito Date: Sun, 24 Nov 2024 10:50:21 -0500 Subject: [PATCH 05/49] fix(client): update rate limit self-check (fix #335) --- src/client.rs | 30 ++++++++++++++++++++++++------ src/subreddit.rs | 3 +++ 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/src/client.rs b/src/client.rs index 0e2c301..ba08531 100644 --- a/src/client.rs +++ b/src/client.rs @@ -19,8 +19,7 @@ use std::{io, result::Result}; use crate::dbg_msg; use crate::oauth::{force_refresh_token, token_daemon, Oauth}; use crate::server::RequestExt; -use crate::subreddit::community; -use crate::utils::format_url; +use crate::utils::{format_url, Post}; const REDDIT_URL_BASE: &str = "https://oauth.reddit.com"; const REDDIT_URL_BASE_HOST: &str = "oauth.reddit.com"; @@ -480,11 +479,10 @@ pub async fn json(path: String, quarantine: bool) -> Result { } async fn self_check(sub: &str) -> Result<(), String> { - let request = Request::get(format!("/r/{sub}/")).body(Body::empty()).unwrap(); + let query = format!("/r/{sub}/hot.json?&raw_json=1"); - match community(request).await { - Ok(sub) if sub.status().is_success() => Ok(()), - Ok(sub) => Err(sub.status().to_string()), + match Post::fetch(&query, true).await { + Ok(_) => Ok(()), Err(e) => Err(e), } } @@ -509,6 +507,26 @@ pub async fn rate_limit_check() -> Result<(), String> { Ok(()) } +#[cfg(test)] +use {crate::config::get_setting, sealed_test::prelude::*}; + +#[tokio::test(flavor = "multi_thread")] +async fn test_rate_limit_check() { + rate_limit_check().await.unwrap(); +} + +#[test] +#[sealed_test(env = [("REDLIB_DEFAULT_SUBSCRIPTIONS", "rust")])] +fn test_default_subscriptions() { + tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap().block_on(async { + let subscriptions = get_setting("REDLIB_DEFAULT_SUBSCRIPTIONS"); + assert!(subscriptions.is_some()); + + // check rate limit + rate_limit_check().await.unwrap(); + }); +} + #[cfg(test)] static POPULAR_URL: &str = "/r/popular/hot.json?&raw_json=1&geo_filter=GLOBAL"; diff --git a/src/subreddit.rs b/src/subreddit.rs index 3a07bdc..88aa542 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -8,6 +8,7 @@ use crate::utils::{ use crate::{client::json, server::RequestExt, server::ResponseExt}; use cookie::Cookie; use hyper::{Body, Request, Response}; +use log::{debug, trace}; use rinja::Template; use once_cell::sync::Lazy; @@ -62,6 +63,7 @@ pub async fn community(req: Request) -> Result, String> { // Build Reddit API path let root = req.uri().path() == "/"; let query = req.uri().query().unwrap_or_default().to_string(); + trace!("query: {}", query); let subscribed = setting(&req, "subscriptions"); let front_page = setting(&req, "front_page"); let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string()); @@ -123,6 +125,7 @@ pub async fn community(req: Request) -> Result, String> { } let path = format!("/r/{}/{sort}.json?{}{params}", sub_name.replace('+', "%2B"), req.uri().query().unwrap_or_default()); + debug!("Path: {}", path); let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str())); let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B"); let filters = get_filters(&req); From 9f6b08cbb2d0f43644a34f5d0210ac32b9add30c Mon Sep 17 00:00:00 2001 From: Matthew Esposito Date: Tue, 26 Nov 2024 22:55:48 -0500 Subject: [PATCH 06/49] fix(main): reduce rate limit check fail to warned error --- src/main.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/main.rs b/src/main.rs index 9c8de97..8732d20 100644 --- a/src/main.rs +++ b/src/main.rs @@ -9,7 +9,7 @@ use std::str::FromStr; use futures_lite::FutureExt; use hyper::Uri; use hyper::{header::HeaderValue, Body, Request, Response}; -use log::info; +use log::{info, warn}; use once_cell::sync::Lazy; use redlib::client::{canonical_path, proxy, rate_limit_check, CLIENT}; use redlib::server::{self, RequestExt}; @@ -151,9 +151,12 @@ async fn main() { info!("[✅] Rate limit check passed"); } Err(e) => { - log::error!(" Rate limit check failed: {}", e); - println!("[❌] Rate limit check failed: {}", e); - std::process::exit(1); + let mut message = format!("Rate limit check failed: {}", e); + message += "\nThis may cause issues with the rate limit."; + message += "\nPlease report this error with the above information."; + message += "\nhttps://github.com/redlib-org/redlib/issues/new?assignees=sigaloid&labels=bug&title=%F0%9F%90%9B+Bug+Report%3A+Rate+limit+mismatch"; + warn!("{}", message); + eprintln!("{}", message); } } From e4fc22cf906b7e8213e0b96108106a9ad34c0e29 Mon Sep 17 00:00:00 2001 From: Integral Date: Tue, 3 Dec 2024 00:28:31 +0800 Subject: [PATCH 07/49] refactor: replace static with const for global constants (#340) --- scripts/update_oauth_resources.sh | 6 +++--- src/client.rs | 6 +++--- src/oauth.rs | 4 ++-- src/oauth_resources.rs | 6 +++--- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/scripts/update_oauth_resources.sh b/scripts/update_oauth_resources.sh index a3014ae..7eeb959 100755 --- a/scripts/update_oauth_resources.sh +++ b/scripts/update_oauth_resources.sh @@ -24,7 +24,7 @@ echo "// Please do not edit manually" >> "$filename" echo "// Filled in with real app versions" >> "$filename" # Open the array in the source file -echo "pub static _IOS_APP_VERSION_LIST: &[&str; $ios_app_count] = &[" >> "$filename" +echo "pub const _IOS_APP_VERSION_LIST: &[&str; $ios_app_count] = &[" >> "$filename" num=0 @@ -63,7 +63,7 @@ android_count=$(echo "$versions" | wc -l) echo -e "Fetching \e[32m$android_count Android app versions...\e[0m" # Append to the source file -echo "pub static ANDROID_APP_VERSION_LIST: &[&str; $android_count] = &[" >> "$filename" +echo "pub const ANDROID_APP_VERSION_LIST: &[&str; $android_count] = &[" >> "$filename" num=0 @@ -89,7 +89,7 @@ ios_count=$(echo "$table" | wc -l) echo -e "Fetching \e[34m$ios_count iOS versions...\e[0m" # Append to the source file -echo "pub static _IOS_OS_VERSION_LIST: &[&str; $ios_count] = &[" >> "$filename" +echo "pub const _IOS_OS_VERSION_LIST: &[&str; $ios_count] = &[" >> "$filename" num=0 diff --git a/src/client.rs b/src/client.rs index ba08531..fa32fc0 100644 --- a/src/client.rs +++ b/src/client.rs @@ -45,7 +45,7 @@ pub static OAUTH_RATELIMIT_REMAINING: AtomicU16 = AtomicU16::new(99); pub static OAUTH_IS_ROLLING_OVER: AtomicBool = AtomicBool::new(false); -static URL_PAIRS: [(&str, &str); 2] = [ +const URL_PAIRS: [(&str, &str); 2] = [ (ALTERNATIVE_REDDIT_URL_BASE, ALTERNATIVE_REDDIT_URL_BASE_HOST), (REDDIT_SHORT_URL_BASE, REDDIT_SHORT_URL_BASE_HOST), ]; @@ -262,7 +262,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo return Ok(response); }; let location_header = response.headers().get(header::LOCATION); - if location_header == Some(&HeaderValue::from_static("https://www.reddit.com/")) { + if location_header == Some(&HeaderValue::from_static(ALTERNATIVE_REDDIT_URL_BASE)) { return Err("Reddit response was invalid".to_string()); } return request( @@ -528,7 +528,7 @@ fn test_default_subscriptions() { } #[cfg(test)] -static POPULAR_URL: &str = "/r/popular/hot.json?&raw_json=1&geo_filter=GLOBAL"; +const POPULAR_URL: &str = "/r/popular/hot.json?&raw_json=1&geo_filter=GLOBAL"; #[tokio::test(flavor = "multi_thread")] async fn test_localization_popular() { diff --git a/src/oauth.rs b/src/oauth.rs index 12b0f37..5627900 100644 --- a/src/oauth.rs +++ b/src/oauth.rs @@ -11,9 +11,9 @@ use serde_json::json; use tegen::tegen::TextGenerator; use tokio::time::{error::Elapsed, timeout}; -static REDDIT_ANDROID_OAUTH_CLIENT_ID: &str = "ohXpoqrZYub1kg"; +const REDDIT_ANDROID_OAUTH_CLIENT_ID: &str = "ohXpoqrZYub1kg"; -static AUTH_ENDPOINT: &str = "https://www.reddit.com"; +const AUTH_ENDPOINT: &str = "https://www.reddit.com"; // Spoofed client for Android devices #[derive(Debug, Clone, Default)] diff --git a/src/oauth_resources.rs b/src/oauth_resources.rs index faf7873..01928c3 100644 --- a/src/oauth_resources.rs +++ b/src/oauth_resources.rs @@ -2,8 +2,8 @@ // Rerun scripts/update_oauth_resources.sh to update this file // Please do not edit manually // Filled in with real app versions -pub static _IOS_APP_VERSION_LIST: &[&str; 1] = &[""]; -pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ +pub const _IOS_APP_VERSION_LIST: &[&str; 1] = &[""]; +pub const ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ "Version 2024.22.1/Build 1652272", "Version 2024.23.1/Build 1665606", "Version 2024.24.1/Build 1682520", @@ -155,4 +155,4 @@ pub static ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ "Version 2022.41.0/Build 630468", "Version 2022.41.1/Build 634168", ]; -pub static _IOS_OS_VERSION_LIST: &[&str; 1] = &[""]; +pub const _IOS_OS_VERSION_LIST: &[&str; 1] = &[""]; From d7ec07cd0d713fc308e1004663b0053db8f00a0f Mon Sep 17 00:00:00 2001 From: Jeidnx Date: Mon, 2 Dec 2024 17:29:57 +0100 Subject: [PATCH 08/49] Implement a serializer for user preferences (#336) --- Cargo.lock | 13 +++++++++++ Cargo.toml | 1 + src/utils.rs | 49 ++++++++++++++++++++++++++++++++++++++--- templates/settings.html | 12 ++++++++-- 4 files changed, 70 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 057234f..819d4bc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1384,6 +1384,7 @@ dependencies = [ "serde", "serde_json", "serde_json_path", + "serde_urlencoded", "serde_yaml", "tegen", "time", @@ -1797,6 +1798,18 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + [[package]] name = "serde_yaml" version = "0.9.34+deprecated" diff --git a/Cargo.toml b/Cargo.toml index 616d8e9..a1d3ec0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -50,6 +50,7 @@ async-recursion = "1.1.1" common-words-all = { version = "0.0.2", default-features = false, features = ["english", "one"] } hyper-rustls = { version = "0.24.2", features = [ "http2" ] } tegen = "0.1.4" +serde_urlencoded = "0.7.1" [dev-dependencies] diff --git a/src/utils.rs b/src/utils.rs index 1edb528..c15dcea 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -13,7 +13,7 @@ use once_cell::sync::Lazy; use regex::Regex; use rinja::Template; use rust_embed::RustEmbed; -use serde::Serialize; +use serde::{Serialize, Serializer}; use serde_json::Value; use serde_json_path::{JsonPath, JsonPathExt}; use std::collections::{HashMap, HashSet}; @@ -601,8 +601,9 @@ pub struct Params { pub before: Option, } -#[derive(Default)] +#[derive(Default, Serialize)] pub struct Preferences { + #[serde(skip)] pub available_themes: Vec, pub theme: String, pub front_page: String, @@ -620,12 +621,21 @@ pub struct Preferences { pub disable_visit_reddit_confirmation: String, pub comment_sort: String, pub post_sort: String, + #[serde(serialize_with = "serialize_vec_with_plus")] pub subscriptions: Vec, + #[serde(serialize_with = "serialize_vec_with_plus")] pub filters: Vec, pub hide_awards: String, pub hide_score: String, } +fn serialize_vec_with_plus(vec: &Vec, serializer: S) -> Result +where + S: Serializer, +{ + serializer.serialize_str(&vec.join("+")) +} + #[derive(RustEmbed)] #[folder = "static/themes/"] #[include = "*.css"] @@ -665,6 +675,10 @@ impl Preferences { hide_score: setting(req, "hide_score"), } } + + pub fn to_urlencoded(&self) -> Result { + serde_urlencoded::to_string(self).map_err(|e| e.to_string()) + } } /// Gets a `HashSet` of filters from the cookie in the given `Request`. @@ -1277,7 +1291,7 @@ pub fn get_post_url(post: &Post) -> String { #[cfg(test)] mod tests { - use super::{format_num, format_url, rewrite_urls}; + use super::{format_num, format_url, rewrite_urls, Preferences}; #[test] fn format_num_works() { @@ -1344,6 +1358,35 @@ mod tests { assert_eq!(format_url("nsfw"), ""); assert_eq!(format_url("spoiler"), ""); } + #[test] + fn serialize_prefs() { + let prefs = Preferences { + available_themes: vec![], + theme: "laserwave".to_owned(), + front_page: "default".to_owned(), + layout: "compact".to_owned(), + wide: "on".to_owned(), + blur_spoiler: "on".to_owned(), + show_nsfw: "off".to_owned(), + blur_nsfw: "on".to_owned(), + hide_hls_notification: "off".to_owned(), + video_quality: "best".to_owned(), + hide_sidebar_and_summary: "off".to_owned(), + use_hls: "on".to_owned(), + autoplay_videos: "on".to_owned(), + fixed_navbar: "on".to_owned(), + disable_visit_reddit_confirmation: "on".to_owned(), + comment_sort: "confidence".to_owned(), + post_sort: "top".to_owned(), + subscriptions: vec!["memes".to_owned(), "mildlyinteresting".to_owned()], + filters: vec![], + hide_awards: "off".to_owned(), + hide_score: "off".to_owned(), + }; + let urlencoded = serde_urlencoded::to_string(prefs).expect("Failed to serialize Prefs"); + + assert_eq!(urlencoded, "theme=laserwave&front_page=default&layout=compact&wide=on&blur_spoiler=on&show_nsfw=off&blur_nsfw=on&hide_hls_notification=off&video_quality=best&hide_sidebar_and_summary=off&use_hls=on&autoplay_videos=on&fixed_navbar=on&disable_visit_reddit_confirmation=on&comment_sort=confidence&post_sort=top&subscriptions=memes%2Bmildlyinteresting&filters=&hide_awards=off&hide_score=off") + } } #[test] diff --git a/templates/settings.html b/templates/settings.html index a7d6615..fef91cf 100644 --- a/templates/settings.html +++ b/templates/settings.html @@ -161,8 +161,16 @@ {% endif %} From 5c1e15c359e7b01316baab27d410130f1557cdf8 Mon Sep 17 00:00:00 2001 From: Butter Cat Date: Sun, 2 Feb 2025 21:48:46 -0500 Subject: [PATCH 09/49] Make subscription and filter cookies split into multiple cookies if they're too large (#288) * Split subscriptions and filters cookies into multiple cookies and make old cookies properly delete * Cleanup * Fix mispelling for removing subscription cookies * Fix many subscription misspellings * Fix subreddits and filters that were at the end and beginning of the cookies getting merged * Make join_until_size_limit take the +'s into account when calculating length * Start cookies without number to be backwards compatible * Fix old split cookies not being removed and subreddits/filters between cookies occasionally getting merged * Make updating subscription/filters cookies safer * Small cleanup * Make restore properly add new subscriptions/filters cookies and delete old unused subscriptions/filters cookies * Fix misspellings on variable name --- src/server.rs | 8 ++- src/settings.rs | 116 ++++++++++++++++++++++++++++++++++++++- src/subreddit.rs | 138 +++++++++++++++++++++++++++++++++++++++++------ src/utils.rs | 78 ++++++++++++++++++++++----- 4 files changed, 307 insertions(+), 33 deletions(-) diff --git a/src/server.rs b/src/server.rs index 15c56ad..e1f464d 100644 --- a/src/server.rs +++ b/src/server.rs @@ -25,7 +25,7 @@ use std::{ str::{from_utf8, Split}, string::ToString, }; -use time::Duration; +use time::OffsetDateTime; use crate::dbg_msg; @@ -170,10 +170,8 @@ impl ResponseExt for Response { } fn remove_cookie(&mut self, name: String) { - let mut cookie = Cookie::from(name); - cookie.set_path("/"); - cookie.set_max_age(Duration::seconds(1)); - if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) { + let removal_cookie = Cookie::build(name).path("/").http_only(true).expires(OffsetDateTime::now_utc()); + if let Ok(val) = header::HeaderValue::from_str(&removal_cookie.to_string()) { self.headers_mut().append("Set-Cookie", val); } } diff --git a/src/settings.rs b/src/settings.rs index 4404912..34718c2 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -4,6 +4,7 @@ use std::collections::HashMap; // CRATES use crate::server::ResponseExt; +use crate::subreddit::join_until_size_limit; use crate::utils::{redirect, template, Preferences}; use cookie::Cookie; use futures_lite::StreamExt; @@ -119,7 +120,7 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response response.insert_cookie( Cookie::build((name.to_owned(), value.clone())) @@ -136,6 +137,119 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response = subscriptions.expect("Subscriptions").split('+').map(str::to_string).collect(); + + // Start at 0 to keep track of what number we need to start deleting old subscription cookies from + let mut subscriptions_number_to_delete_from = 0; + + // Starting at 0 so we handle the subscription cookie without a number first + for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() { + let subscriptions_cookie = if subscriptions_number == 0 { + "subscriptions".to_string() + } else { + format!("subscriptions{}", subscriptions_number) + }; + + response.insert_cookie( + Cookie::build((subscriptions_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + subscriptions_number_to_delete_from += 1; + } + + // While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}")); + + // Increment subscriptions cookie number + subscriptions_number_to_delete_from += 1; + } + } else { + // Remove unnumbered subscriptions cookie + response.remove_cookie("subscriptions".to_string()); + + // Starts at one to deal with the first numbered subscription cookie and onwards + let mut subscriptions_number_to_delete_from = 1; + + // While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}")); + + // Increment subscriptions cookie number + subscriptions_number_to_delete_from += 1; + } + } + + // If there are filters to restore set them and delete any old filters cookies, otherwise delete them all + if filters.is_some() { + let filters_list: Vec = filters.expect("Filters").split('+').map(str::to_string).collect(); + + // Start at 0 to keep track of what number we need to start deleting old subscription cookies from + let mut filters_number_to_delete_from = 0; + + // Starting at 0 so we handle the subscription cookie without a number first + for (filters_number, list) in join_until_size_limit(&filters_list).into_iter().enumerate() { + let filters_cookie = if filters_number == 0 { + "filters".to_string() + } else { + format!("filters{}", filters_number) + }; + + response.insert_cookie( + Cookie::build((filters_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + filters_number_to_delete_from += 1; + } + + // While filtersNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) { + // Remove that filters cookie + response.remove_cookie(format!("filters{filters_number_to_delete_from}")); + + // Increment filters cookie number + filters_number_to_delete_from += 1; + } + } else { + // Remove unnumbered filters cookie + response.remove_cookie("filters".to_string()); + + // Starts at one to deal with the first numbered subscription cookie and onwards + let mut filters_number_to_delete_from = 1; + + // While filtersNUMBER= is in the string of cookies add a response removing that cookie + while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) { + // Remove that sfilters cookie + response.remove_cookie(format!("filters{filters_number_to_delete_from}")); + + // Increment filters cookie number + filters_number_to_delete_from += 1; + } + } + response } diff --git a/src/subreddit.rs b/src/subreddit.rs index 88aa542..2362a12 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -214,6 +214,41 @@ pub fn can_access_quarantine(req: &Request, sub: &str) -> bool { setting(req, &format!("allow_quaran_{}", sub.to_lowercase())).parse().unwrap_or_default() } +// Join items in chunks of 4000 bytes in length for cookies +pub fn join_until_size_limit(vec: &[T]) -> Vec { + let mut result = Vec::new(); + let mut list = String::new(); + let mut current_size = 0; + + for item in vec { + // Size in bytes + let item_size = item.to_string().len(); + // Use 4000 bytes to leave us some headroom because the name and options of the cookie count towards the 4096 byte cap + if current_size + item_size > 4000 { + // If last item add a seperator on the end of the list so it's interpreted properly in tanden with the next cookie + list.push('+'); + + // Push current list to result vector + result.push(list); + + // Reset the list variable so we can continue with only new items + list = String::new(); + } + // Add separator if not the first item + if !list.is_empty() { + list.push('+'); + } + // Add current item to list + list.push_str(&item.to_string()); + current_size = list.len() + item_size; + } + // Make sure to push whatever the remaining subreddits are there into the result vector + result.push(list); + + // Return resulting vector + result +} + // Sub, filter, unfilter, or unsub by setting subscription cookie using response "Set-Cookie" header pub async fn subscriptions_filters(req: Request) -> Result, String> { let sub = req.param("sub").unwrap_or_default(); @@ -306,28 +341,101 @@ pub async fn subscriptions_filters(req: Request) -> Result, let mut response = redirect(&path); - // Delete cookie if empty, else set + // If sub_list is empty remove all subscriptions cookies, otherwise update them and remove old ones if sub_list.is_empty() { + // Remove subscriptions cookie response.remove_cookie("subscriptions".to_string()); + + // Start with first numbered subscriptions cookie + let mut subscriptions_number = 1; + + // While whatever subscriptionsNUMBER cookie we're looking at has a value + while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{}", subscriptions_number)); + + // Increment subscriptions cookie number + subscriptions_number += 1; + } } else { - response.insert_cookie( - Cookie::build(("subscriptions", sub_list.join("+"))) - .path("/") - .http_only(true) - .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) - .into(), - ); + // Start at 0 to keep track of what number we need to start deleting old subscription cookies from + let mut subscriptions_number_to_delete_from = 0; + + // Starting at 0 so we handle the subscription cookie without a number first + for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() { + let subscriptions_cookie = if subscriptions_number == 0 { + "subscriptions".to_string() + } else { + format!("subscriptions{}", subscriptions_number) + }; + + response.insert_cookie( + Cookie::build((subscriptions_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + subscriptions_number_to_delete_from += 1; + } + + // While whatever subscriptionsNUMBER cookie we're looking at has a value + while req.cookie(&format!("subscriptions{}", subscriptions_number_to_delete_from)).is_some() { + // Remove that subscriptions cookie + response.remove_cookie(format!("subscriptions{}", subscriptions_number_to_delete_from)); + + // Increment subscriptions cookie number + subscriptions_number_to_delete_from += 1; + } } + + // If filters is empty remove all filters cookies, otherwise update them and remove old ones if filters.is_empty() { + // Remove filters cookie response.remove_cookie("filters".to_string()); + + // Start with first numbered filters cookie + let mut filters_number = 1; + + // While whatever filtersNUMBER cookie we're looking at has a value + while req.cookie(&format!("filters{}", filters_number)).is_some() { + // Remove that filters cookie + response.remove_cookie(format!("filters{}", filters_number)); + + // Increment filters cookie number + filters_number += 1; + } } else { - response.insert_cookie( - Cookie::build(("filters", filters.join("+"))) - .path("/") - .http_only(true) - .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) - .into(), - ); + // Start at 0 to keep track of what number we need to start deleting old filters cookies from + let mut filters_number_to_delete_from = 0; + + for (filters_number, list) in join_until_size_limit(&filters).into_iter().enumerate() { + let filters_cookie = if filters_number == 0 { + "filters".to_string() + } else { + format!("filters{}", filters_number) + }; + + response.insert_cookie( + Cookie::build((filters_cookie, list)) + .path("/") + .http_only(true) + .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) + .into(), + ); + + filters_number_to_delete_from += 1; + } + + // While whatever filtersNUMBER cookie we're looking at has a value + while req.cookie(&format!("filters{}", filters_number_to_delete_from)).is_some() { + // Remove that filters cookie + response.remove_cookie(format!("filters{}", filters_number_to_delete_from)); + + // Increment filters cookie number + filters_number_to_delete_from += 1; + } } Ok(response) diff --git a/src/utils.rs b/src/utils.rs index c15dcea..e2cefd1 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -825,18 +825,72 @@ pub fn param(path: &str, value: &str) -> Option { // Retrieve the value of a setting by name pub fn setting(req: &Request, name: &str) -> String { // Parse a cookie value from request - req - .cookie(name) - .unwrap_or_else(|| { - // If there is no cookie for this setting, try receiving a default from the config - if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) { - Cookie::new(name, default) - } else { - Cookie::from(name) - } - }) - .value() - .to_string() + + // If this was called with "subscriptions" and the "subscriptions" cookie has a value + if name == "subscriptions" && req.cookie("subscriptions").is_some() { + // Create subscriptions string + let mut subscriptions = String::new(); + + // Default subscriptions cookie + if req.cookie("subscriptions").is_some() { + subscriptions.push_str(req.cookie("subscriptions").unwrap().value()); + } + + // Start with first numbered subscription cookie + let mut subscriptions_number = 1; + + // While whatever subscriptionsNUMBER cookie we're looking at has a value + while req.cookie(&format!("subscriptions{}", subscriptions_number)).is_some() { + // Push whatever subscriptionsNUMBER cookie we're looking at into the subscriptions string + subscriptions.push_str(req.cookie(&format!("subscriptions{}", subscriptions_number)).unwrap().value()); + + // Increment subscription cookie number + subscriptions_number += 1; + } + + // Return the subscriptions cookies as one large string + subscriptions + } + // If this was called with "filters" and the "filters" cookie has a value + else if name == "filters" && req.cookie("filters").is_some() { + // Create filters string + let mut filters = String::new(); + + // Default filters cookie + if req.cookie("filters").is_some() { + filters.push_str(req.cookie("filters").unwrap().value()); + } + + // Start with first numbered filters cookie + let mut filters_number = 1; + + // While whatever filtersNUMBER cookie we're looking at has a value + while req.cookie(&format!("filters{}", filters_number)).is_some() { + // Push whatever filtersNUMBER cookie we're looking at into the filters string + filters.push_str(req.cookie(&format!("filters{}", filters_number)).unwrap().value()); + + // Increment filters cookie number + filters_number += 1; + } + + // Return the filters cookies as one large string + filters + } + // The above two still come to this if there was no existing value + else { + req + .cookie(name) + .unwrap_or_else(|| { + // If there is no cookie for this setting, try receiving a default from the config + if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) { + Cookie::new(name, default) + } else { + Cookie::from(name) + } + }) + .value() + .to_string() + } } // Retrieve the value of a setting by name or the default value From 9e47bc37c7e3d1b5b929926d84459d5ca4a244a9 Mon Sep 17 00:00:00 2001 From: Kot C Date: Sun, 2 Feb 2025 20:49:46 -0600 Subject: [PATCH 10/49] Support HEAD requests (resolves #292) (#363) * Support HEAD requests * Remove body from error responses too --- src/server.rs | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/server.rs b/src/server.rs index e1f464d..5297c22 100644 --- a/src/server.rs +++ b/src/server.rs @@ -238,8 +238,14 @@ impl Server { path.pop(); } + // Replace HEAD with GET for routing + let (method, is_head) = match req.method() { + &Method::HEAD => (&Method::GET, true), + method => (method, false), + }; + // Match the visited path with an added route - match router.recognize(&format!("/{}{}", req.method().as_str(), path)) { + match router.recognize(&format!("/{}{}", method.as_str(), path)) { // If a route was configured for this path Ok(found) => { let mut parammed = req; @@ -251,17 +257,21 @@ impl Server { match func.await { Ok(mut res) => { res.headers_mut().extend(def_headers); - let _ = compress_response(&req_headers, &mut res).await; + if is_head { + *res.body_mut() = Body::empty(); + } else { + let _ = compress_response(&req_headers, &mut res).await; + } Ok(res) } - Err(msg) => new_boilerplate(def_headers, req_headers, 500, Body::from(msg)).await, + Err(msg) => new_boilerplate(def_headers, req_headers, 500, if is_head { Body::empty() } else { Body::from(msg) }).await, } } .boxed() } // If there was a routing error - Err(e) => new_boilerplate(def_headers, req_headers, 404, e.into()).boxed(), + Err(e) => new_boilerplate(def_headers, req_headers, 404, if is_head { Body::empty() } else { e.into() }).boxed(), } })) } From adf25cb15b61984581422ac798cc7c1364ad8e75 Mon Sep 17 00:00:00 2001 From: Martin Lindhe Date: Mon, 3 Feb 2025 03:56:47 +0100 Subject: [PATCH 11/49] unescape selftext_html from json api, fixes #354 (#357) * unescape selftext_html from json api, fixes #354 * fix(fmt) --------- Co-authored-by: Matthew Esposito --- Cargo.lock | 7 +++++++ Cargo.toml | 1 + src/utils.rs | 3 ++- 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 819d4bc..20d528b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -770,6 +770,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" +[[package]] +name = "htmlescape" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9025058dae765dee5070ec375f591e2ba14638c63feff74f13805a72e523163" + [[package]] name = "http" version = "0.2.12" @@ -1367,6 +1373,7 @@ dependencies = [ "dotenvy", "fastrand", "futures-lite", + "htmlescape", "hyper", "hyper-rustls", "libflate", diff --git a/Cargo.toml b/Cargo.toml index a1d3ec0..a4d0170 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,6 +51,7 @@ common-words-all = { version = "0.0.2", default-features = false, features = ["e hyper-rustls = { version = "0.24.2", features = [ "http2" ] } tegen = "0.1.4" serde_urlencoded = "0.7.1" +htmlescape = "0.3.1" [dev-dependencies] diff --git a/src/utils.rs b/src/utils.rs index e2cefd1..ea14dac 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -7,6 +7,7 @@ use crate::config::{self, get_setting}; // use crate::{client::json, server::RequestExt}; use cookie::Cookie; +use htmlescape::decode_html; use hyper::{Body, Request, Response}; use log::error; use once_cell::sync::Lazy; @@ -376,7 +377,7 @@ impl Post { let awards = Awards::parse(&data["all_awardings"]); // selftext_html is set for text posts when browsing. - let mut body = rewrite_urls(&val(post, "selftext_html")); + let mut body = rewrite_urls(&decode_html(&val(post, "selftext_html")).unwrap()); if body.is_empty() { body = rewrite_urls(&val(post, "body_html")); } From fd1c32f5552cc116e1cb4c95fcd7cc7a7b069335 Mon Sep 17 00:00:00 2001 From: Martin Lindhe Date: Mon, 3 Feb 2025 04:00:44 +0100 Subject: [PATCH 12/49] rss: add field, fixes #356 (#358) * rss: add field, fixes #356 * rss: also add pub_date on user feed * fix(fmt) --------- Co-authored-by: Matthew Esposito --- Cargo.lock | 5 +++-- Cargo.toml | 1 + src/subreddit.rs | 2 ++ src/user.rs | 2 ++ 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 20d528b..24791b4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -274,9 +274,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "num-traits", ] @@ -1367,6 +1367,7 @@ dependencies = [ "brotli", "build_html", "cached", + "chrono", "clap", "common-words-all", "cookie", diff --git a/Cargo.toml b/Cargo.toml index a4d0170..843b9c9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,6 +51,7 @@ common-words-all = { version = "0.0.2", default-features = false, features = ["e hyper-rustls = { version = "0.24.2", features = [ "http2" ] } tegen = "0.1.4" serde_urlencoded = "0.7.1" +chrono = { version = "0.4.39", default-features = false, features = [ "std" ] } htmlescape = "0.3.1" diff --git a/src/subreddit.rs b/src/subreddit.rs index 2362a12..d5d5196 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -11,6 +11,7 @@ use hyper::{Body, Request, Response}; use log::{debug, trace}; use rinja::Template; +use chrono::DateTime; use once_cell::sync::Lazy; use regex::Regex; use time::{Duration, OffsetDateTime}; @@ -607,6 +608,7 @@ pub async fn rss(req: Request) -> Result, String> { link: Some(utils::get_post_url(&post)), author: Some(post.author.name), content: Some(rewrite_urls(&post.body)), + pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()), description: Some(format!( "Comments", config::get_setting("REDLIB_FULL_URL").unwrap_or_default(), diff --git a/src/user.rs b/src/user.rs index 50a4daa..2fb8b0d 100644 --- a/src/user.rs +++ b/src/user.rs @@ -5,6 +5,7 @@ use crate::client::json; use crate::server::RequestExt; use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User}; use crate::{config, utils}; +use chrono::DateTime; use hyper::{Body, Request, Response}; use rinja::Template; use time::{macros::format_description, OffsetDateTime}; @@ -165,6 +166,7 @@ pub async fn rss(req: Request) -> Result, String> { title: Some(post.title.to_string()), link: Some(utils::get_post_url(&post)), author: Some(post.author.name), + pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()), content: Some(rewrite_urls(&post.body)), ..Default::default() }) From cb659cc8a3fa8e85e7fcd6c5e96d67fa83081c7b Mon Sep 17 00:00:00 2001 From: Martin Lindhe Date: Mon, 3 Feb 2025 04:00:58 +0100 Subject: [PATCH 13/49] rss: proxy links in users and subreddit feeds, fixes #359 (#361) --- src/subreddit.rs | 2 +- src/user.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/subreddit.rs b/src/subreddit.rs index d5d5196..0db4f77 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -605,7 +605,7 @@ pub async fn rss(req: Request) -> Result, String> { .into_iter() .map(|post| Item { title: Some(post.title.to_string()), - link: Some(utils::get_post_url(&post)), + link: Some(format_url(&utils::get_post_url(&post))), author: Some(post.author.name), content: Some(rewrite_urls(&post.body)), pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()), diff --git a/src/user.rs b/src/user.rs index 2fb8b0d..818f368 100644 --- a/src/user.rs +++ b/src/user.rs @@ -164,7 +164,7 @@ pub async fn rss(req: Request) -> Result, String> { .into_iter() .map(|post| Item { title: Some(post.title.to_string()), - link: Some(utils::get_post_url(&post)), + link: Some(format_url(&utils::get_post_url(&post))), author: Some(post.author.name), pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()), content: Some(rewrite_urls(&post.body)), From 0703fa103611786637328bf569928c0619aff759 Mon Sep 17 00:00:00 2001 From: Vivek Date: Sun, 2 Feb 2025 19:10:12 -0800 Subject: [PATCH 14/49] [build] add new dockerfiles for building from source (#244) * add new dockerfiles * update default ubuntu base images * updates * update comment * update cargo command Co-authored-by: Pim * update cargo command Co-authored-by: Pim * specify binary * use label instead of maintainer --------- Co-authored-by: Pim --- Dockerfile.alpine | 45 +++++++++++++++++++++++++++++++++++++++++ Dockerfile.ubuntu | 51 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 96 insertions(+) create mode 100644 Dockerfile.alpine create mode 100644 Dockerfile.ubuntu diff --git a/Dockerfile.alpine b/Dockerfile.alpine new file mode 100644 index 0000000..051476a --- /dev/null +++ b/Dockerfile.alpine @@ -0,0 +1,45 @@ +# supported versions here: https://hub.docker.com/_/rust +ARG ALPINE_VERSION=3.20 + +######################## +## builder image +######################## +FROM rust:alpine${ALPINE_VERSION} AS builder + +RUN apk add --no-cache musl-dev + +WORKDIR /redlib + +# download (most) dependencies in their own layer +COPY Cargo.lock Cargo.toml ./ +RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs +RUN cargo build --release --locked --bin redlib +RUN rm ./src/main.rs && rmdir ./src + +# copy the source and build the redlib binary +COPY . ./ +RUN cargo build --release --locked --bin redlib +RUN echo "finished building redlib!" + +######################## +## release image +######################## +FROM alpine:${ALPINE_VERSION} AS release + +# Import redlib binary from builder +COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib + +# Add non-root user for running redlib +RUN adduser --home /nonexistent --no-create-home --disabled-password redlib +USER redlib + +# Document that we intend to expose port 8080 to whoever runs the container +EXPOSE 8080 + +# Run a healthcheck every minute to make sure redlib is functional +HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1 + +# Add container metadata +LABEL org.opencontainers.image.authors="sigaloid" + +CMD ["redlib"] diff --git a/Dockerfile.ubuntu b/Dockerfile.ubuntu new file mode 100644 index 0000000..2e277c5 --- /dev/null +++ b/Dockerfile.ubuntu @@ -0,0 +1,51 @@ +# supported versions here: https://hub.docker.com/_/rust +ARG RUST_BUILDER_VERSION=slim-bookworm +ARG UBUNTU_RELEASE_VERSION=noble + +######################## +## builder image +######################## +FROM rust:${RUST_BUILDER_VERSION} AS builder + +WORKDIR /redlib + +# download (most) dependencies in their own layer +COPY Cargo.lock Cargo.toml ./ +RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs +RUN cargo build --release --locked --bin redlib +RUN rm ./src/main.rs && rmdir ./src + +# copy the source and build the redlib binary +COPY . ./ +RUN cargo build --release --locked --bin redlib +RUN echo "finished building redlib!" + +######################## +## release image +######################## +FROM ubuntu:${UBUNTU_RELEASE_VERSION} AS release + +# Install ca-certificates +RUN apt-get update && apt-get install -y ca-certificates + +# Import redlib binary from builder +COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib + +# Add non-root user for running redlib +RUN useradd \ + --no-create-home \ + --password "!" \ + --comment "user for running redlib" \ + redlib +USER redlib + +# Document that we intend to expose port 8080 to whoever runs the container +EXPOSE 8080 + +# Run a healthcheck every minute to make sure redlib is functional +HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider --q http://localhost:8080/settings || exit 1 + +# Add container metadata +LABEL org.opencontainers.image.authors="sigaloid" + +CMD ["redlib"] From 9e39a75e82cbf0c83b09e051c13073fa4a5e3f5a Mon Sep 17 00:00:00 2001 From: Joel Koen Date: Mon, 3 Feb 2025 14:16:59 +1000 Subject: [PATCH 15/49] build(nix): update deps (#331) --- flake.lock | 32 ++++++++++++-------------------- flake.nix | 10 ++-------- 2 files changed, 14 insertions(+), 28 deletions(-) diff --git a/flake.lock b/flake.lock index 4569244..2b0b585 100644 --- a/flake.lock +++ b/flake.lock @@ -1,17 +1,12 @@ { "nodes": { "crane": { - "inputs": { - "nixpkgs": [ - "nixpkgs" - ] - }, "locked": { - "lastModified": 1717025063, - "narHash": "sha256-dIubLa56W9sNNz0e8jGxrX3CAkPXsq7snuFA/Ie6dn8=", + "lastModified": 1731974733, + "narHash": "sha256-enYSSZVVl15FI5p+0Y5/Ckf5DZAvXe6fBrHxyhA/njc=", "owner": "ipetkov", "repo": "crane", - "rev": "480dff0be03dac0e51a8dfc26e882b0d123a450e", + "rev": "3cb338ce81076ce5e461cf77f7824476addb0e1c", "type": "github" }, "original": { @@ -25,11 +20,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { @@ -40,11 +35,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1717112898, - "narHash": "sha256-7R2ZvOnvd9h8fDd65p0JnB7wXfUvreox3xFdYWd1BnY=", + "lastModified": 1731890469, + "narHash": "sha256-D1FNZ70NmQEwNxpSSdTXCSklBH1z2isPR84J6DQrJGs=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "6132b0f6e344ce2fe34fc051b72fb46e34f668e0", + "rev": "5083ec887760adfe12af64830a66807423a859a7", "type": "github" }, "original": { @@ -64,19 +59,16 @@ }, "rust-overlay": { "inputs": { - "flake-utils": [ - "flake-utils" - ], "nixpkgs": [ "nixpkgs" ] }, "locked": { - "lastModified": 1717121863, - "narHash": "sha256-/3sxIe7MZqF/jw1RTQCSmgTjwVod43mmrk84m50MJQ4=", + "lastModified": 1732069891, + "narHash": "sha256-moKx8AVJrViCSdA0e0nSsG8b1dAsObI4sRAtbqbvBY8=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "2a7b53172ed08f856b8382d7dcfd36a4e0cbd866", + "rev": "8509a51241c407d583b1963d5079585a992506e8", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 8bcacf6..0180c8d 100644 --- a/flake.nix +++ b/flake.nix @@ -4,19 +4,13 @@ inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; - crane = { - url = "github:ipetkov/crane"; - inputs.nixpkgs.follows = "nixpkgs"; - }; + crane.url = "github:ipetkov/crane"; flake-utils.url = "github:numtide/flake-utils"; rust-overlay = { url = "github:oxalica/rust-overlay"; - inputs = { - nixpkgs.follows = "nixpkgs"; - flake-utils.follows = "flake-utils"; - }; + inputs.nixpkgs.follows = "nixpkgs"; }; }; From 96ad7bf1632ef4bafeba148288422ffe4cf9bfb2 Mon Sep 17 00:00:00 2001 From: mooons <10822203+mooons@users.noreply.github.com> Date: Sun, 2 Feb 2025 20:26:36 -0800 Subject: [PATCH 16/49] feat: render bullet lists (#321) * feat: render bullet lists * tests: add tests --------- Co-authored-by: Matthew Esposito --- src/utils.rs | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/src/utils.rs b/src/utils.rs index ea14dac..1bc70b9 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -989,6 +989,17 @@ pub fn format_url(url: &str) -> String { } } +static REGEX_BULLET: Lazy = Lazy::new(|| Regex::new(r"(?m)^- (.*)$").unwrap()); +static REGEX_BULLET_CONSECUTIVE_LINES: Lazy = Lazy::new(|| Regex::new(r"\n
-

Note: settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.


-

You can restore your current settings and subscriptions after clearing your cookies using this link.

+

Note: settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.

+
+ {% match prefs.to_urlencoded() %} + {% when Ok with (encoded_prefs) %} +

You can restore your current settings and subscriptions after clearing your cookies using this link.

+ {% when Err with (err) %} +

There was an error creating your restore link: {{ err }}

+

Please report this issue

+ {% endmatch %}