Compare commits

..

20 Commits

Author SHA1 Message Date
7e85ae25b1 Merge remote-tracking branch 'upstream/main'
+ repository link update
2024-07-07 17:19:28 +12:00
Pim
4f21388643
fix: also use hls if possible for gifs in post_in_list macro (#177) 2024-07-05 16:33:06 -04:00
Pim
8a917fcde3
feat: add download button on image/gif/video posts (#173)
* feat: add download button on image/gif/video posts

* chore: fix formatting

* chore: dont create reference
2024-07-04 21:32:12 -04:00
Matthew Esposito
67a890cab3
fix(posts): fix sort call on new (#171) 2024-07-02 08:04:27 -04:00
Pim
366bc17f97
feat: show post link title for comments on user page (#169) 2024-07-01 17:15:50 -04:00
Matthew Esposito
d9e7681004 v0.35.1 2024-06-29 13:28:18 -04:00
Matthew Esposito
f74d1affb6
fix(posts): manually sort by flags (#168)
* fix(posts): manually sort by flags

* fix(posts): shorten sort call
2024-06-29 13:26:09 -04:00
Matthew Esposito
f44638a2cb v0.35.0 2024-06-29 12:00:34 -04:00
Matthew Esposito
beb4cf193b
fix(posts): manually sort by created date (#166) 2024-06-29 11:48:42 -04:00
Matthew Esposito
c565ebfb01 refactor(log): update some logs 2024-06-29 10:44:33 -04:00
Matthew Esposito
459a8e1245 refactor(log): shorten some logs 2024-06-29 00:20:19 -04:00
Matthew Esposito
0f7eba717e fix(client): Handle invalid reddit response of base URL location 2024-06-28 22:41:36 -04:00
Matthew Esposito
ea87ec33a1
fix(subreddit): handle plus-encoding errors even better (#163)
* fix(subreddit): handle plus-encoding errors even better

* chore(clippy): fix lint
2024-06-28 22:28:58 -04:00
Matthew Esposito
102cd2f23f
Merge pull request #162 from redlib-org/oauth_arc_swap
fix(oauth): arc_swap
2024-06-28 18:17:00 -04:00
Matthew Esposito
3b2ad212d5 fix(oauth): arc_swap 2024-06-28 18:14:47 -04:00
Matthew Esposito
4dc7ff8165
Merge pull request #160 from redlib-org/oauth_oppenheimer
fix(oauth): even more atomics to avoid simultaneous token rollover
2024-06-27 23:35:51 -04:00
Matthew Esposito
2f8a38d8c7 chore(clippy): fix lint 2024-06-27 23:34:27 -04:00
Matthew Esposito
13083e999c fix(oauth): handle extremely rare race condition by atomically compare_exchanging 2024-06-27 23:32:17 -04:00
Matthew Esposito
4e2ec3fbc9 fix(oauth): handle case where a rate limit sneaks in 2024-06-27 23:29:55 -04:00
Matthew Esposito
89313f73e6 fix(oauth): atomics to avoid simultaneous token rollover 2024-06-27 23:26:31 -04:00
9 changed files with 171 additions and 77 deletions

7
Cargo.lock generated
View File

@ -71,6 +71,12 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b"
[[package]]
name = "arc-swap"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
[[package]] [[package]]
name = "askama" name = "askama"
version = "0.12.1" version = "0.12.1"
@ -1043,6 +1049,7 @@ dependencies = [
name = "redsunlib" name = "redsunlib"
version = "0.35.1" version = "0.35.1"
dependencies = [ dependencies = [
"arc-swap",
"askama", "askama",
"base64", "base64",
"brotli", "brotli",

View File

@ -2,7 +2,7 @@
name = "redsunlib" name = "redsunlib"
description = " Alternative private front-end to Reddit" description = " Alternative private front-end to Reddit"
license = "AGPL-3.0" license = "AGPL-3.0"
repository = "https://git.stardust.wtf/iridium/redlib" repository = "https://git.stardust.wtf/iridium/redsunlib"
version = "0.35.1" version = "0.35.1"
authors = [ authors = [
"Matthew Esposito <matt+cargo@matthew.science>", "Matthew Esposito <matt+cargo@matthew.science>",
@ -42,6 +42,7 @@ fastrand = "2.0.1"
log = "0.4.20" log = "0.4.20"
pretty_env_logger = "0.5.0" pretty_env_logger = "0.5.0"
dotenvy = "0.15.7" dotenvy = "0.15.7"
arc-swap = "1.7.1"
[dev-dependencies] [dev-dependencies]
lipsum = "0.9.0" lipsum = "0.9.0"

View File

@ -1,7 +1,9 @@
use arc_swap::ArcSwap;
use cached::proc_macro::cached; use cached::proc_macro::cached;
use futures_lite::future::block_on; use futures_lite::future::block_on;
use futures_lite::{future::Boxed, FutureExt}; use futures_lite::{future::Boxed, FutureExt};
use hyper::client::HttpConnector; use hyper::client::HttpConnector;
use hyper::header::HeaderValue;
use hyper::{body, body::Buf, client, header, Body, Client, Method, Request, Response, Uri}; use hyper::{body, body::Buf, client, header, Body, Client, Method, Request, Response, Uri};
use hyper_rustls::HttpsConnector; use hyper_rustls::HttpsConnector;
use libflate::gzip; use libflate::gzip;
@ -11,9 +13,8 @@ use percent_encoding::{percent_encode, CONTROLS};
use serde_json::Value; use serde_json::Value;
use std::sync::atomic::Ordering; use std::sync::atomic::Ordering;
use std::sync::atomic::{AtomicU16, Ordering::SeqCst}; use std::sync::atomic::{AtomicBool, AtomicU16};
use std::{io, result::Result}; use std::{io, result::Result};
use tokio::sync::RwLock;
use crate::dbg_msg; use crate::dbg_msg;
use crate::oauth::{force_refresh_token, token_daemon, Oauth}; use crate::oauth::{force_refresh_token, token_daemon, Oauth};
@ -21,6 +22,7 @@ use crate::server::RequestExt;
use crate::utils::format_url; use crate::utils::format_url;
const REDDIT_URL_BASE: &str = "https://oauth.reddit.com"; const REDDIT_URL_BASE: &str = "https://oauth.reddit.com";
const ALTERNATIVE_REDDIT_URL_BASE: &str = "https://www.reddit.com";
pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| { pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| {
let https = hyper_rustls::HttpsConnectorBuilder::new() let https = hyper_rustls::HttpsConnectorBuilder::new()
@ -32,14 +34,16 @@ pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| {
client::Client::builder().build(https) client::Client::builder().build(https)
}); });
pub static OAUTH_CLIENT: Lazy<RwLock<Oauth>> = Lazy::new(|| { pub static OAUTH_CLIENT: Lazy<ArcSwap<Oauth>> = Lazy::new(|| {
let client = block_on(Oauth::new()); let client = block_on(Oauth::new());
tokio::spawn(token_daemon()); tokio::spawn(token_daemon());
RwLock::new(client) ArcSwap::new(client.into())
}); });
pub static OAUTH_RATELIMIT_REMAINING: AtomicU16 = AtomicU16::new(99); pub static OAUTH_RATELIMIT_REMAINING: AtomicU16 = AtomicU16::new(99);
pub static OAUTH_IS_ROLLING_OVER: AtomicBool = AtomicBool::new(false);
/// Gets the canonical path for a resource on Reddit. This is accomplished by /// Gets the canonical path for a resource on Reddit. This is accomplished by
/// making a `HEAD` request to Reddit at the path given in `path`. /// making a `HEAD` request to Reddit at the path given in `path`.
/// ///
@ -175,7 +179,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
let client: Client<_, Body> = CLIENT.clone(); let client: Client<_, Body> = CLIENT.clone();
let (token, vendor_id, device_id, user_agent, loid) = { let (token, vendor_id, device_id, user_agent, loid) = {
let client = block_on(OAUTH_CLIENT.read()); let client = OAUTH_CLIENT.load_full();
( (
client.token.clone(), client.token.clone(),
client.headers_map.get("Client-Vendor-Id").cloned().unwrap_or_default(), client.headers_map.get("Client-Vendor-Id").cloned().unwrap_or_default(),
@ -219,12 +223,13 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
if !redirect { if !redirect {
return Ok(response); return Ok(response);
}; };
let location_header = response.headers().get(header::LOCATION);
if location_header == Some(&HeaderValue::from_static("https://www.reddit.com/")) {
return Err("Reddit response was invalid".to_string());
}
return request( return request(
method, method,
response location_header
.headers()
.get(header::LOCATION)
.map(|val| { .map(|val| {
// We need to make adjustments to the URI // We need to make adjustments to the URI
// we get back from Reddit. Namely, we // we get back from Reddit. Namely, we
@ -237,7 +242,11 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
// required. // required.
// //
// 2. Percent-encode the path. // 2. Percent-encode the path.
let new_path = percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string(); let new_path = percent_encode(val.as_bytes(), CONTROLS)
.to_string()
.trim_start_matches(REDDIT_URL_BASE)
.trim_start_matches(ALTERNATIVE_REDDIT_URL_BASE)
.to_string();
format!("{new_path}{}raw_json=1", if new_path.contains('?') { "&" } else { "?" }) format!("{new_path}{}raw_json=1", if new_path.contains('?') { "&" } else { "?" })
}) })
.unwrap_or_default() .unwrap_or_default()
@ -296,7 +305,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
} }
} }
Err(e) => { Err(e) => {
dbg_msg!("{} {}: {}", method, path, e); dbg_msg!("{method} {REDDIT_URL_BASE}{path}: {}", e);
Err(e.to_string()) Err(e.to_string())
} }
@ -318,36 +327,28 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
// First, handle rolling over the OAUTH_CLIENT if need be. // First, handle rolling over the OAUTH_CLIENT if need be.
let current_rate_limit = OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst); let current_rate_limit = OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst);
if current_rate_limit < 10 { let is_rolling_over = OAUTH_IS_ROLLING_OVER.load(Ordering::SeqCst);
if current_rate_limit < 10 && !is_rolling_over {
warn!("Rate limit {current_rate_limit} is low. Spawning force_refresh_token()"); warn!("Rate limit {current_rate_limit} is low. Spawning force_refresh_token()");
OAUTH_RATELIMIT_REMAINING.store(99, Ordering::SeqCst);
tokio::spawn(force_refresh_token()); tokio::spawn(force_refresh_token());
} }
OAUTH_RATELIMIT_REMAINING.fetch_sub(1, Ordering::SeqCst);
// Fetch the url... // Fetch the url...
match reddit_get(path.clone(), quarantine).await { match reddit_get(path.clone(), quarantine).await {
Ok(response) => { Ok(response) => {
let status = response.status(); let status = response.status();
// Ratelimit remaining let reset: Option<String> = if let (Some(remaining), Some(reset), Some(used)) = (
if let Some(Ok(remaining)) = response.headers().get("x-ratelimit-remaining").map(|val| val.to_str()) { response.headers().get("x-ratelimit-remaining").and_then(|val| val.to_str().ok().map(|s| s.to_string())),
trace!("Ratelimit remaining: {}", remaining); response.headers().get("x-ratelimit-reset").and_then(|val| val.to_str().ok().map(|s| s.to_string())),
if let Ok(remaining) = remaining.parse::<f32>().map(|f| f.round() as u16) { response.headers().get("x-ratelimit-used").and_then(|val| val.to_str().ok().map(|s| s.to_string())),
OAUTH_RATELIMIT_REMAINING.store(remaining, SeqCst); ) {
} else { trace!(
warn!("Failed to parse rate limit {remaining} from header."); "Ratelimit remaining: Header says {remaining}, we have {current_rate_limit}. Resets in {reset}. Rollover: {}. Ratelimit used: {used}",
} if is_rolling_over { "yes" } else { "no" },
} );
Some(reset)
// Ratelimit used
if let Some(Ok(used)) = response.headers().get("x-ratelimit-used").map(|val| val.to_str()) {
trace!("Ratelimit used: {}", used);
}
// Ratelimit reset
let reset = if let Some(Ok(reset)) = response.headers().get("x-ratelimit-reset").map(|val| val.to_str()) {
trace!("Ratelimit reset: {}", reset);
Some(reset.to_string())
} else { } else {
None None
}; };
@ -358,8 +359,13 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
let has_remaining = body.has_remaining(); let has_remaining = body.has_remaining();
if !has_remaining { if !has_remaining {
// Rate limited, so spawn a force_refresh_token()
tokio::spawn(force_refresh_token());
return match reset { return match reset {
Some(val) => Err(format!("Reddit rate limit exceeded. Will reset in: {val}")), Some(val) => Err(format!(
"Reddit rate limit exceeded. Try refreshing in a few seconds.\
Rate limit will reset in: {val}"
)),
None => Err("Reddit rate limit exceeded".to_string()), None => Err("Reddit rate limit exceeded".to_string()),
}; };
} }

View File

@ -1,7 +1,7 @@
use std::{collections::HashMap, sync::atomic::Ordering, time::Duration}; use std::{collections::HashMap, sync::atomic::Ordering, time::Duration};
use crate::{ use crate::{
client::{CLIENT, OAUTH_CLIENT, OAUTH_RATELIMIT_REMAINING}, client::{CLIENT, OAUTH_CLIENT, OAUTH_IS_ROLLING_OVER, OAUTH_RATELIMIT_REMAINING},
oauth_resources::ANDROID_APP_VERSION_LIST, oauth_resources::ANDROID_APP_VERSION_LIST,
}; };
use base64::{engine::general_purpose, Engine as _}; use base64::{engine::general_purpose, Engine as _};
@ -98,21 +98,13 @@ impl Oauth {
Some(()) Some(())
} }
async fn refresh(&mut self) -> Option<()> {
// Refresh is actually just a subsequent login with the same headers (without the old token
// or anything). This logic is handled in login, so we just call login again.
let refresh = self.login().await;
info!("Refreshing OAuth token... {}", if refresh.is_some() { "success" } else { "failed" });
refresh
}
} }
pub async fn token_daemon() { pub async fn token_daemon() {
// Monitor for refreshing token // Monitor for refreshing token
loop { loop {
// Get expiry time - be sure to not hold the read lock // Get expiry time - be sure to not hold the read lock
let expires_in = { OAUTH_CLIENT.read().await.expires_in }; let expires_in = { OAUTH_CLIENT.load_full().expires_in };
// sleep for the expiry time minus 2 minutes // sleep for the expiry time minus 2 minutes
let duration = Duration::from_secs(expires_in - 120); let duration = Duration::from_secs(expires_in - 120);
@ -125,14 +117,22 @@ pub async fn token_daemon() {
// Refresh token - in its own scope // Refresh token - in its own scope
{ {
OAUTH_CLIENT.write().await.refresh().await; force_refresh_token().await;
} }
} }
} }
pub async fn force_refresh_token() { pub async fn force_refresh_token() {
if OAUTH_IS_ROLLING_OVER.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst).is_err() {
trace!("Skipping refresh token roll over, already in progress");
return;
}
trace!("Rolling over refresh token. Current rate limit: {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst)); trace!("Rolling over refresh token. Current rate limit: {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst));
OAUTH_CLIENT.write().await.refresh().await; let new_client = Oauth::new().await;
OAUTH_CLIENT.swap(new_client.into());
OAUTH_RATELIMIT_REMAINING.store(99, Ordering::SeqCst);
OAUTH_IS_ROLLING_OVER.store(false, Ordering::SeqCst);
} }
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
@ -180,21 +180,21 @@ fn choose<T: Copy>(list: &[T]) -> T {
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_oauth_client() { async fn test_oauth_client() {
assert!(!OAUTH_CLIENT.read().await.token.is_empty()); assert!(!OAUTH_CLIENT.load_full().token.is_empty());
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_oauth_client_refresh() { async fn test_oauth_client_refresh() {
OAUTH_CLIENT.write().await.refresh().await.unwrap(); force_refresh_token().await;
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_oauth_token_exists() { async fn test_oauth_token_exists() {
assert!(!OAUTH_CLIENT.read().await.token.is_empty()); assert!(!OAUTH_CLIENT.load_full().token.is_empty());
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_oauth_headers_len() { async fn test_oauth_headers_len() {
assert!(OAUTH_CLIENT.read().await.headers_map.len() >= 3); assert!(OAUTH_CLIENT.load_full().headers_map.len() >= 3);
} }
#[test] #[test]

View File

@ -64,7 +64,7 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string()); let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort)); let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort));
let mut sub_name = req.param("sub").unwrap_or(if front_page == "default" || front_page.is_empty() { let sub_name = req.param("sub").unwrap_or(if front_page == "default" || front_page.is_empty() {
if subscribed.is_empty() { if subscribed.is_empty() {
"popular".to_string() "popular".to_string()
} else { } else {
@ -84,11 +84,6 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
return Ok(redirect(&["/user/", &sub_name[2..]].concat())); return Ok(redirect(&["/user/", &sub_name[2..]].concat()));
} }
// If multi-sub, replace + with url encoded +
if sub_name.contains('+') {
sub_name = sub_name.replace('+', "%2B");
}
// Request subreddit metadata // Request subreddit metadata
let sub = if !sub_name.contains('+') && sub_name != subscribed && sub_name != "popular" && sub_name != "all" { let sub = if !sub_name.contains('+') && sub_name != subscribed && sub_name != "popular" && sub_name != "all" {
// Regular subreddit // Regular subreddit
@ -124,7 +119,7 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
params.push_str(&format!("&geo_filter={geo_filter}")); params.push_str(&format!("&geo_filter={geo_filter}"));
} }
let path = format!("/r/{sub_name}/{sort}.json?{}{params}", req.uri().query().unwrap_or_default()); let path = format!("/r/{}/{sort}.json?{}{params}", sub_name.replace('+', "%2B"), req.uri().query().unwrap_or_default());
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str())); let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B"); let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B");
let filters = get_filters(&req); let filters = get_filters(&req);
@ -150,6 +145,10 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters); let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
let no_posts = posts.is_empty(); let no_posts = posts.is_empty();
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"); let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
if sort == "new" {
posts.sort_by(|a, b| b.created_ts.cmp(&a.created_ts));
posts.sort_by(|a, b| b.flags.stickied.cmp(&a.flags.stickied));
}
Ok(template(&SubredditTemplate { Ok(template(&SubredditTemplate {
sub, sub,
posts, posts,

View File

@ -169,6 +169,7 @@ pub struct Media {
pub width: i64, pub width: i64,
pub height: i64, pub height: i64,
pub poster: String, pub poster: String,
pub download_name: String,
} }
impl Media { impl Media {
@ -235,6 +236,15 @@ impl Media {
let alt_url = alt_url_val.map_or(String::new(), |val| format_url(val.as_str().unwrap_or_default())); let alt_url = alt_url_val.map_or(String::new(), |val| format_url(val.as_str().unwrap_or_default()));
let download_name = if post_type == "image" || post_type == "gif" || post_type == "video" {
let permalink_base = url_path_basename(data["permalink"].as_str().unwrap_or_default());
let media_url_base = url_path_basename(url_val.as_str().unwrap_or_default());
format!("redlib_{permalink_base}_{media_url_base}")
} else {
String::new()
};
( (
post_type.to_string(), post_type.to_string(),
Self { Self {
@ -245,6 +255,7 @@ impl Media {
width: source["width"].as_i64().unwrap_or_default(), width: source["width"].as_i64().unwrap_or_default(),
height: source["height"].as_i64().unwrap_or_default(), height: source["height"].as_i64().unwrap_or_default(),
poster: format_url(source["url"].as_str().unwrap_or_default()), poster: format_url(source["url"].as_str().unwrap_or_default()),
download_name,
}, },
gallery, gallery,
) )
@ -298,6 +309,7 @@ pub struct Post {
pub body: String, pub body: String,
pub author: Author, pub author: Author,
pub permalink: String, pub permalink: String,
pub link_title: String,
pub poll: Option<Poll>, pub poll: Option<Poll>,
pub score: (String, String), pub score: (String, String),
pub upvote_ratio: i64, pub upvote_ratio: i64,
@ -309,6 +321,7 @@ pub struct Post {
pub domain: String, pub domain: String,
pub rel_time: String, pub rel_time: String,
pub created: String, pub created: String,
pub created_ts: u64,
pub num_duplicates: u64, pub num_duplicates: u64,
pub comments: (String, String), pub comments: (String, String),
pub gallery: Vec<GalleryMedia>, pub gallery: Vec<GalleryMedia>,
@ -340,6 +353,7 @@ impl Post {
let data = &post["data"]; let data = &post["data"];
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default()); let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
let created_ts = data["created_utc"].as_f64().unwrap_or_default().round() as u64;
let score = data["score"].as_i64().unwrap_or_default(); let score = data["score"].as_i64().unwrap_or_default();
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0; let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
let title = val(post, "title"); let title = val(post, "title");
@ -386,6 +400,7 @@ impl Post {
width: data["thumbnail_width"].as_i64().unwrap_or_default(), width: data["thumbnail_width"].as_i64().unwrap_or_default(),
height: data["thumbnail_height"].as_i64().unwrap_or_default(), height: data["thumbnail_height"].as_i64().unwrap_or_default(),
poster: String::new(), poster: String::new(),
download_name: String::new(),
}, },
media, media,
domain: val(post, "domain"), domain: val(post, "domain"),
@ -409,9 +424,11 @@ impl Post {
stickied: data["stickied"].as_bool().unwrap_or_default() || data["pinned"].as_bool().unwrap_or_default(), stickied: data["stickied"].as_bool().unwrap_or_default() || data["pinned"].as_bool().unwrap_or_default(),
}, },
permalink: val(post, "permalink"), permalink: val(post, "permalink"),
link_title: val(post, "link_title"),
poll: Poll::parse(&data["poll_data"]), poll: Poll::parse(&data["poll_data"]),
rel_time, rel_time,
created, created,
created_ts,
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0), num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()), comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
gallery, gallery,
@ -420,7 +437,6 @@ impl Post {
ws_url: val(post, "websocket_url"), ws_url: val(post, "websocket_url"),
}); });
} }
Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string())) Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string()))
} }
} }
@ -691,6 +707,8 @@ pub async fn parse_post(post: &Value) -> Post {
// Determine the type of media along with the media URL // Determine the type of media along with the media URL
let (post_type, media, gallery) = Media::parse(&post["data"]).await; let (post_type, media, gallery) = Media::parse(&post["data"]).await;
let created_ts = post["data"]["created_utc"].as_f64().unwrap_or_default().round() as u64;
let awards: Awards = Awards::parse(&post["data"]["all_awardings"]); let awards: Awards = Awards::parse(&post["data"]["all_awardings"]);
let permalink = val(post, "permalink"); let permalink = val(post, "permalink");
@ -727,6 +745,7 @@ pub async fn parse_post(post: &Value) -> Post {
distinguished: val(post, "distinguished"), distinguished: val(post, "distinguished"),
}, },
permalink, permalink,
link_title: val(post, "link_title"),
poll, poll,
score: format_num(score), score: format_num(score),
upvote_ratio: ratio as i64, upvote_ratio: ratio as i64,
@ -738,6 +757,7 @@ pub async fn parse_post(post: &Value) -> Post {
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(), width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(), height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
poster: String::new(), poster: String::new(),
download_name: String::new(),
}, },
flair: Flair { flair: Flair {
flair_parts: FlairPart::parse( flair_parts: FlairPart::parse(
@ -761,6 +781,7 @@ pub async fn parse_post(post: &Value) -> Post {
domain: val(post, "domain"), domain: val(post, "domain"),
rel_time, rel_time,
created, created,
created_ts,
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0), num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()), comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
gallery, gallery,
@ -1120,6 +1141,20 @@ pub async fn nsfw_landing(req: Request<Body>, req_url: String) -> Result<Respons
Ok(Response::builder().status(403).header("content-type", "text/html").body(body.into()).unwrap_or_default()) Ok(Response::builder().status(403).header("content-type", "text/html").body(body.into()).unwrap_or_default())
} }
// Returns the last (non-empty) segment of a path string
pub fn url_path_basename(path: &str) -> String {
let url_result = Url::parse(format!("https://libredd.it/{path}").as_str());
if url_result.is_err() {
path.to_string()
} else {
let mut url = url_result.unwrap();
url.path_segments_mut().unwrap().pop_if_empty();
url.path_segments().unwrap().last().unwrap().to_string()
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{format_num, format_url, rewrite_urls}; use super::{format_num, format_url, rewrite_urls};
@ -1228,3 +1263,19 @@ fn test_rewriting_image_links() {
let output = r#"<p><figure><a href="/preview/pre/6awags382xo31.png?width=2560&amp;format=png&amp;auto=webp&amp;s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"><img loading="lazy" src="/preview/pre/6awags382xo31.png?width=2560&amp;format=png&amp;auto=webp&amp;s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"></a><figcaption>caption 1</figcaption></figure></p"#; let output = r#"<p><figure><a href="/preview/pre/6awags382xo31.png?width=2560&amp;format=png&amp;auto=webp&amp;s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"><img loading="lazy" src="/preview/pre/6awags382xo31.png?width=2560&amp;format=png&amp;auto=webp&amp;s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"></a><figcaption>caption 1</figcaption></figure></p"#;
assert_eq!(rewrite_urls(input), output); assert_eq!(rewrite_urls(input), output);
} }
#[test]
fn test_url_path_basename() {
// without trailing slash
assert_eq!(url_path_basename("/first/last"), "last");
// with trailing slash
assert_eq!(url_path_basename("/first/last/"), "last");
// with query parameters
assert_eq!(url_path_basename("/first/last/?some=query"), "last");
// file path
assert_eq!(url_path_basename("/cdn/image.jpg"), "image.jpg");
// when a full url is passed instead of just a path
assert_eq!(url_path_basename("https://doma.in/first/last"), "last");
// empty path
assert_eq!(url_path_basename("/"), "");
}

View File

@ -1110,12 +1110,12 @@ a.search_subreddit:hover {
margin-right: 15px; margin-right: 15px;
} }
#post_links > li.desktop_item { .desktop_item {
display: auto; display: auto;
} }
@media screen and (min-width: 481px) { @media screen and (min-width: 481px) {
#post_links > li.mobile_item { .mobile_item {
display: none; display: none;
} }
} }
@ -1255,10 +1255,28 @@ a.search_subreddit:hover {
min-width: 0; min-width: 0;
} }
.comment_data > * { .comment:has([id]) .comment_data > * {
margin-right: 5px; margin-right: 5px;
} }
.comment:not([id]) .comment_data {
display: inline-flex;
max-width: 100%;
}
.comment:not([id]) .comment_data > * {
flex: 0 0 auto;
}
.comment:not([id]) .comment_data > .comment_link {
display: -webkit-box;
-webkit-line-clamp: 1;
-webkit-box-orient: vertical;
word-break: break-all;
overflow: hidden;
flex: 0 1 auto;
}
.comment_image { .comment_image {
max-width: 500px; max-width: 500px;
align-self: center; align-self: center;
@ -1766,10 +1784,11 @@ td, th {
} }
#post_links > li { margin-right: 10px } #post_links > li { margin-right: 10px }
#post_links > li.desktop_item { display: none }
#post_links > li.mobile_item { display: auto }
.post_footer > p > span#upvoted { display: none } .post_footer > p > span#upvoted { display: none }
.desktop_item { display: none }
.mobile_item { display: auto }
.popup { .popup {
width: auto; width: auto;
} }

View File

@ -63,8 +63,10 @@
</div> </div>
<details class="comment_right" open> <details class="comment_right" open>
<summary class="comment_data"> <summary class="comment_data">
<a class="comment_link" href="{{ post.permalink }}">Comment on r/{{ post.community }}</a> <a class="comment_link" href="{{ post.permalink }}" title="{{ post.link_title }}">{{ post.link_title }}</a>
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span> <span class="created">&nbsp;in&nbsp;</span>
<a href="/r/{{ post.community }}">r/{{ post.community }}</a>
<span class="created" title="{{ post.created }}">&nbsp;{{ post.rel_time }}</span>
</summary> </summary>
<p class="comment_body">{{ post.body|safe }}</p> <p class="comment_body">{{ post.body|safe }}</p>
</details> </details>

View File

@ -168,13 +168,28 @@
<span class="label"> Upvotes</span></div> <span class="label"> Upvotes</span></div>
<div class="post_footer"> <div class="post_footer">
<ul id="post_links"> <ul id="post_links">
<li class="desktop_item"><a href="{{ post.permalink }}">permalink</a></li> <li>
<li class="mobile_item"><a href="{{ post.permalink }}">link</a></li> <a href="{{ post.permalink }}">
<span class="desktop_item">perma</span>link
</a>
</li>
{% if post.num_duplicates > 0 %} {% if post.num_duplicates > 0 %}
<li class="desktop_item"><a href="/r/{{ post.community }}/duplicates/{{ post.id }}">duplicates</a></li> <li>
<li class="mobile_item"><a href="/r/{{ post.community }}/duplicates/{{ post.id }}">dupes</a></li> <a href="/r/{{ post.community }}/duplicates/{{ post.id }}">
dup<span class="desktop_item">licat</span>es
</a>
</li>
{% endif %} {% endif %}
{% call external_reddit_link(post.permalink) %} {% call external_reddit_link(post.permalink) %}
{% if post.media.download_name != "" %}
<li>
<a href="{{ post.media.url }}" download="{{ post.media.download_name }}">
<span class="mobile_item">dl</span>
<span class="desktop_item">download</span>
</a>
</li>
{% endif %}
</ul> </ul>
<p>{{ post.upvote_ratio }}%<span id="upvoted"> Upvoted</span></p> <p>{{ post.upvote_ratio }}%<span id="upvoted"> Upvoted</span></p>
</div> </div>
@ -182,8 +197,7 @@
{%- endmacro %} {%- endmacro %}
{% macro external_reddit_link(permalink) %} {% macro external_reddit_link(permalink) %}
{% for dev_type in ["desktop", "mobile"] %} <li>
<li class="{{ dev_type }}_item">
<a <a
{% if prefs.disable_visit_reddit_confirmation != "on" %} {% if prefs.disable_visit_reddit_confirmation != "on" %}
href="#popup" href="#popup"
@ -197,7 +211,6 @@
{% call visit_reddit_confirmation(permalink) %} {% call visit_reddit_confirmation(permalink) %}
{% endif %} {% endif %}
</li> </li>
{% endfor %}
{% endmacro %} {% endmacro %}
{% macro post_in_list(post) -%} {% macro post_in_list(post) -%}
@ -253,11 +266,7 @@
{% endif %} {% endif %}
</a> </a>
</div> </div>
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %} {% else if (prefs.layout.is_empty() || prefs.layout == "card") && (post.post_type == "gif" || post.post_type == "video") %}
<div class="post_media_content">
<video class="post_media_video short {%if post_should_be_blurred %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
</div>
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() || prefs.ffmpeg_video_downloads == "on" && !post.media.alt_url.is_empty() %} {% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() || prefs.ffmpeg_video_downloads == "on" && !post.media.alt_url.is_empty() %}
<div class="post_media_content"> <div class="post_media_content">
<video class="post_media_video short {%if post_should_be_blurred %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" controls preload="none"> <video class="post_media_video short {%if post_should_be_blurred %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" controls preload="none">