Compare commits
No commits in common. "7e85ae25b16227d845083f0565e48c96a899d421" and "1644e68e43a52a229fbc4ef18c8bb97e78bd4f8e" have entirely different histories.
7e85ae25b1
...
1644e68e43
7
Cargo.lock
generated
7
Cargo.lock
generated
@ -71,12 +71,6 @@ version = "1.0.7"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b"
|
checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "arc-swap"
|
|
||||||
version = "1.7.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "askama"
|
name = "askama"
|
||||||
version = "0.12.1"
|
version = "0.12.1"
|
||||||
@ -1049,7 +1043,6 @@ dependencies = [
|
|||||||
name = "redsunlib"
|
name = "redsunlib"
|
||||||
version = "0.35.1"
|
version = "0.35.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arc-swap",
|
|
||||||
"askama",
|
"askama",
|
||||||
"base64",
|
"base64",
|
||||||
"brotli",
|
"brotli",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
name = "redsunlib"
|
name = "redsunlib"
|
||||||
description = " Alternative private front-end to Reddit"
|
description = " Alternative private front-end to Reddit"
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
repository = "https://git.stardust.wtf/iridium/redsunlib"
|
repository = "https://git.stardust.wtf/iridium/redlib"
|
||||||
version = "0.35.1"
|
version = "0.35.1"
|
||||||
authors = [
|
authors = [
|
||||||
"Matthew Esposito <matt+cargo@matthew.science>",
|
"Matthew Esposito <matt+cargo@matthew.science>",
|
||||||
@ -42,7 +42,6 @@ fastrand = "2.0.1"
|
|||||||
log = "0.4.20"
|
log = "0.4.20"
|
||||||
pretty_env_logger = "0.5.0"
|
pretty_env_logger = "0.5.0"
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
arc-swap = "1.7.1"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
lipsum = "0.9.0"
|
lipsum = "0.9.0"
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
use arc_swap::ArcSwap;
|
|
||||||
use cached::proc_macro::cached;
|
use cached::proc_macro::cached;
|
||||||
use futures_lite::future::block_on;
|
use futures_lite::future::block_on;
|
||||||
use futures_lite::{future::Boxed, FutureExt};
|
use futures_lite::{future::Boxed, FutureExt};
|
||||||
use hyper::client::HttpConnector;
|
use hyper::client::HttpConnector;
|
||||||
use hyper::header::HeaderValue;
|
|
||||||
use hyper::{body, body::Buf, client, header, Body, Client, Method, Request, Response, Uri};
|
use hyper::{body, body::Buf, client, header, Body, Client, Method, Request, Response, Uri};
|
||||||
use hyper_rustls::HttpsConnector;
|
use hyper_rustls::HttpsConnector;
|
||||||
use libflate::gzip;
|
use libflate::gzip;
|
||||||
@ -13,8 +11,9 @@ use percent_encoding::{percent_encode, CONTROLS};
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use std::sync::atomic::Ordering;
|
use std::sync::atomic::Ordering;
|
||||||
use std::sync::atomic::{AtomicBool, AtomicU16};
|
use std::sync::atomic::{AtomicU16, Ordering::SeqCst};
|
||||||
use std::{io, result::Result};
|
use std::{io, result::Result};
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
use crate::dbg_msg;
|
use crate::dbg_msg;
|
||||||
use crate::oauth::{force_refresh_token, token_daemon, Oauth};
|
use crate::oauth::{force_refresh_token, token_daemon, Oauth};
|
||||||
@ -22,7 +21,6 @@ use crate::server::RequestExt;
|
|||||||
use crate::utils::format_url;
|
use crate::utils::format_url;
|
||||||
|
|
||||||
const REDDIT_URL_BASE: &str = "https://oauth.reddit.com";
|
const REDDIT_URL_BASE: &str = "https://oauth.reddit.com";
|
||||||
const ALTERNATIVE_REDDIT_URL_BASE: &str = "https://www.reddit.com";
|
|
||||||
|
|
||||||
pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| {
|
pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| {
|
||||||
let https = hyper_rustls::HttpsConnectorBuilder::new()
|
let https = hyper_rustls::HttpsConnectorBuilder::new()
|
||||||
@ -34,16 +32,14 @@ pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| {
|
|||||||
client::Client::builder().build(https)
|
client::Client::builder().build(https)
|
||||||
});
|
});
|
||||||
|
|
||||||
pub static OAUTH_CLIENT: Lazy<ArcSwap<Oauth>> = Lazy::new(|| {
|
pub static OAUTH_CLIENT: Lazy<RwLock<Oauth>> = Lazy::new(|| {
|
||||||
let client = block_on(Oauth::new());
|
let client = block_on(Oauth::new());
|
||||||
tokio::spawn(token_daemon());
|
tokio::spawn(token_daemon());
|
||||||
ArcSwap::new(client.into())
|
RwLock::new(client)
|
||||||
});
|
});
|
||||||
|
|
||||||
pub static OAUTH_RATELIMIT_REMAINING: AtomicU16 = AtomicU16::new(99);
|
pub static OAUTH_RATELIMIT_REMAINING: AtomicU16 = AtomicU16::new(99);
|
||||||
|
|
||||||
pub static OAUTH_IS_ROLLING_OVER: AtomicBool = AtomicBool::new(false);
|
|
||||||
|
|
||||||
/// Gets the canonical path for a resource on Reddit. This is accomplished by
|
/// Gets the canonical path for a resource on Reddit. This is accomplished by
|
||||||
/// making a `HEAD` request to Reddit at the path given in `path`.
|
/// making a `HEAD` request to Reddit at the path given in `path`.
|
||||||
///
|
///
|
||||||
@ -179,7 +175,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
|
|||||||
let client: Client<_, Body> = CLIENT.clone();
|
let client: Client<_, Body> = CLIENT.clone();
|
||||||
|
|
||||||
let (token, vendor_id, device_id, user_agent, loid) = {
|
let (token, vendor_id, device_id, user_agent, loid) = {
|
||||||
let client = OAUTH_CLIENT.load_full();
|
let client = block_on(OAUTH_CLIENT.read());
|
||||||
(
|
(
|
||||||
client.token.clone(),
|
client.token.clone(),
|
||||||
client.headers_map.get("Client-Vendor-Id").cloned().unwrap_or_default(),
|
client.headers_map.get("Client-Vendor-Id").cloned().unwrap_or_default(),
|
||||||
@ -223,13 +219,12 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
|
|||||||
if !redirect {
|
if !redirect {
|
||||||
return Ok(response);
|
return Ok(response);
|
||||||
};
|
};
|
||||||
let location_header = response.headers().get(header::LOCATION);
|
|
||||||
if location_header == Some(&HeaderValue::from_static("https://www.reddit.com/")) {
|
|
||||||
return Err("Reddit response was invalid".to_string());
|
|
||||||
}
|
|
||||||
return request(
|
return request(
|
||||||
method,
|
method,
|
||||||
location_header
|
response
|
||||||
|
.headers()
|
||||||
|
.get(header::LOCATION)
|
||||||
.map(|val| {
|
.map(|val| {
|
||||||
// We need to make adjustments to the URI
|
// We need to make adjustments to the URI
|
||||||
// we get back from Reddit. Namely, we
|
// we get back from Reddit. Namely, we
|
||||||
@ -242,11 +237,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
|
|||||||
// required.
|
// required.
|
||||||
//
|
//
|
||||||
// 2. Percent-encode the path.
|
// 2. Percent-encode the path.
|
||||||
let new_path = percent_encode(val.as_bytes(), CONTROLS)
|
let new_path = percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string();
|
||||||
.to_string()
|
|
||||||
.trim_start_matches(REDDIT_URL_BASE)
|
|
||||||
.trim_start_matches(ALTERNATIVE_REDDIT_URL_BASE)
|
|
||||||
.to_string();
|
|
||||||
format!("{new_path}{}raw_json=1", if new_path.contains('?') { "&" } else { "?" })
|
format!("{new_path}{}raw_json=1", if new_path.contains('?') { "&" } else { "?" })
|
||||||
})
|
})
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
@ -305,7 +296,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
dbg_msg!("{method} {REDDIT_URL_BASE}{path}: {}", e);
|
dbg_msg!("{} {}: {}", method, path, e);
|
||||||
|
|
||||||
Err(e.to_string())
|
Err(e.to_string())
|
||||||
}
|
}
|
||||||
@ -327,28 +318,36 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
|||||||
|
|
||||||
// First, handle rolling over the OAUTH_CLIENT if need be.
|
// First, handle rolling over the OAUTH_CLIENT if need be.
|
||||||
let current_rate_limit = OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst);
|
let current_rate_limit = OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst);
|
||||||
let is_rolling_over = OAUTH_IS_ROLLING_OVER.load(Ordering::SeqCst);
|
if current_rate_limit < 10 {
|
||||||
if current_rate_limit < 10 && !is_rolling_over {
|
|
||||||
warn!("Rate limit {current_rate_limit} is low. Spawning force_refresh_token()");
|
warn!("Rate limit {current_rate_limit} is low. Spawning force_refresh_token()");
|
||||||
|
OAUTH_RATELIMIT_REMAINING.store(99, Ordering::SeqCst);
|
||||||
tokio::spawn(force_refresh_token());
|
tokio::spawn(force_refresh_token());
|
||||||
}
|
}
|
||||||
OAUTH_RATELIMIT_REMAINING.fetch_sub(1, Ordering::SeqCst);
|
|
||||||
|
|
||||||
// Fetch the url...
|
// Fetch the url...
|
||||||
match reddit_get(path.clone(), quarantine).await {
|
match reddit_get(path.clone(), quarantine).await {
|
||||||
Ok(response) => {
|
Ok(response) => {
|
||||||
let status = response.status();
|
let status = response.status();
|
||||||
|
|
||||||
let reset: Option<String> = if let (Some(remaining), Some(reset), Some(used)) = (
|
// Ratelimit remaining
|
||||||
response.headers().get("x-ratelimit-remaining").and_then(|val| val.to_str().ok().map(|s| s.to_string())),
|
if let Some(Ok(remaining)) = response.headers().get("x-ratelimit-remaining").map(|val| val.to_str()) {
|
||||||
response.headers().get("x-ratelimit-reset").and_then(|val| val.to_str().ok().map(|s| s.to_string())),
|
trace!("Ratelimit remaining: {}", remaining);
|
||||||
response.headers().get("x-ratelimit-used").and_then(|val| val.to_str().ok().map(|s| s.to_string())),
|
if let Ok(remaining) = remaining.parse::<f32>().map(|f| f.round() as u16) {
|
||||||
) {
|
OAUTH_RATELIMIT_REMAINING.store(remaining, SeqCst);
|
||||||
trace!(
|
} else {
|
||||||
"Ratelimit remaining: Header says {remaining}, we have {current_rate_limit}. Resets in {reset}. Rollover: {}. Ratelimit used: {used}",
|
warn!("Failed to parse rate limit {remaining} from header.");
|
||||||
if is_rolling_over { "yes" } else { "no" },
|
}
|
||||||
);
|
}
|
||||||
Some(reset)
|
|
||||||
|
// Ratelimit used
|
||||||
|
if let Some(Ok(used)) = response.headers().get("x-ratelimit-used").map(|val| val.to_str()) {
|
||||||
|
trace!("Ratelimit used: {}", used);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ratelimit reset
|
||||||
|
let reset = if let Some(Ok(reset)) = response.headers().get("x-ratelimit-reset").map(|val| val.to_str()) {
|
||||||
|
trace!("Ratelimit reset: {}", reset);
|
||||||
|
Some(reset.to_string())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
@ -359,13 +358,8 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
|||||||
let has_remaining = body.has_remaining();
|
let has_remaining = body.has_remaining();
|
||||||
|
|
||||||
if !has_remaining {
|
if !has_remaining {
|
||||||
// Rate limited, so spawn a force_refresh_token()
|
|
||||||
tokio::spawn(force_refresh_token());
|
|
||||||
return match reset {
|
return match reset {
|
||||||
Some(val) => Err(format!(
|
Some(val) => Err(format!("Reddit rate limit exceeded. Will reset in: {val}")),
|
||||||
"Reddit rate limit exceeded. Try refreshing in a few seconds.\
|
|
||||||
Rate limit will reset in: {val}"
|
|
||||||
)),
|
|
||||||
None => Err("Reddit rate limit exceeded".to_string()),
|
None => Err("Reddit rate limit exceeded".to_string()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
32
src/oauth.rs
32
src/oauth.rs
@ -1,7 +1,7 @@
|
|||||||
use std::{collections::HashMap, sync::atomic::Ordering, time::Duration};
|
use std::{collections::HashMap, sync::atomic::Ordering, time::Duration};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
client::{CLIENT, OAUTH_CLIENT, OAUTH_IS_ROLLING_OVER, OAUTH_RATELIMIT_REMAINING},
|
client::{CLIENT, OAUTH_CLIENT, OAUTH_RATELIMIT_REMAINING},
|
||||||
oauth_resources::ANDROID_APP_VERSION_LIST,
|
oauth_resources::ANDROID_APP_VERSION_LIST,
|
||||||
};
|
};
|
||||||
use base64::{engine::general_purpose, Engine as _};
|
use base64::{engine::general_purpose, Engine as _};
|
||||||
@ -98,13 +98,21 @@ impl Oauth {
|
|||||||
|
|
||||||
Some(())
|
Some(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn refresh(&mut self) -> Option<()> {
|
||||||
|
// Refresh is actually just a subsequent login with the same headers (without the old token
|
||||||
|
// or anything). This logic is handled in login, so we just call login again.
|
||||||
|
let refresh = self.login().await;
|
||||||
|
info!("Refreshing OAuth token... {}", if refresh.is_some() { "success" } else { "failed" });
|
||||||
|
refresh
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn token_daemon() {
|
pub async fn token_daemon() {
|
||||||
// Monitor for refreshing token
|
// Monitor for refreshing token
|
||||||
loop {
|
loop {
|
||||||
// Get expiry time - be sure to not hold the read lock
|
// Get expiry time - be sure to not hold the read lock
|
||||||
let expires_in = { OAUTH_CLIENT.load_full().expires_in };
|
let expires_in = { OAUTH_CLIENT.read().await.expires_in };
|
||||||
|
|
||||||
// sleep for the expiry time minus 2 minutes
|
// sleep for the expiry time minus 2 minutes
|
||||||
let duration = Duration::from_secs(expires_in - 120);
|
let duration = Duration::from_secs(expires_in - 120);
|
||||||
@ -117,22 +125,14 @@ pub async fn token_daemon() {
|
|||||||
|
|
||||||
// Refresh token - in its own scope
|
// Refresh token - in its own scope
|
||||||
{
|
{
|
||||||
force_refresh_token().await;
|
OAUTH_CLIENT.write().await.refresh().await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn force_refresh_token() {
|
pub async fn force_refresh_token() {
|
||||||
if OAUTH_IS_ROLLING_OVER.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst).is_err() {
|
|
||||||
trace!("Skipping refresh token roll over, already in progress");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
trace!("Rolling over refresh token. Current rate limit: {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst));
|
trace!("Rolling over refresh token. Current rate limit: {}", OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst));
|
||||||
let new_client = Oauth::new().await;
|
OAUTH_CLIENT.write().await.refresh().await;
|
||||||
OAUTH_CLIENT.swap(new_client.into());
|
|
||||||
OAUTH_RATELIMIT_REMAINING.store(99, Ordering::SeqCst);
|
|
||||||
OAUTH_IS_ROLLING_OVER.store(false, Ordering::SeqCst);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
@ -180,21 +180,21 @@ fn choose<T: Copy>(list: &[T]) -> T {
|
|||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_oauth_client() {
|
async fn test_oauth_client() {
|
||||||
assert!(!OAUTH_CLIENT.load_full().token.is_empty());
|
assert!(!OAUTH_CLIENT.read().await.token.is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_oauth_client_refresh() {
|
async fn test_oauth_client_refresh() {
|
||||||
force_refresh_token().await;
|
OAUTH_CLIENT.write().await.refresh().await.unwrap();
|
||||||
}
|
}
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_oauth_token_exists() {
|
async fn test_oauth_token_exists() {
|
||||||
assert!(!OAUTH_CLIENT.load_full().token.is_empty());
|
assert!(!OAUTH_CLIENT.read().await.token.is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_oauth_headers_len() {
|
async fn test_oauth_headers_len() {
|
||||||
assert!(OAUTH_CLIENT.load_full().headers_map.len() >= 3);
|
assert!(OAUTH_CLIENT.read().await.headers_map.len() >= 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -64,7 +64,7 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
|
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
|
||||||
let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort));
|
let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort));
|
||||||
|
|
||||||
let sub_name = req.param("sub").unwrap_or(if front_page == "default" || front_page.is_empty() {
|
let mut sub_name = req.param("sub").unwrap_or(if front_page == "default" || front_page.is_empty() {
|
||||||
if subscribed.is_empty() {
|
if subscribed.is_empty() {
|
||||||
"popular".to_string()
|
"popular".to_string()
|
||||||
} else {
|
} else {
|
||||||
@ -84,6 +84,11 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
return Ok(redirect(&["/user/", &sub_name[2..]].concat()));
|
return Ok(redirect(&["/user/", &sub_name[2..]].concat()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If multi-sub, replace + with url encoded +
|
||||||
|
if sub_name.contains('+') {
|
||||||
|
sub_name = sub_name.replace('+', "%2B");
|
||||||
|
}
|
||||||
|
|
||||||
// Request subreddit metadata
|
// Request subreddit metadata
|
||||||
let sub = if !sub_name.contains('+') && sub_name != subscribed && sub_name != "popular" && sub_name != "all" {
|
let sub = if !sub_name.contains('+') && sub_name != subscribed && sub_name != "popular" && sub_name != "all" {
|
||||||
// Regular subreddit
|
// Regular subreddit
|
||||||
@ -119,7 +124,7 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
params.push_str(&format!("&geo_filter={geo_filter}"));
|
params.push_str(&format!("&geo_filter={geo_filter}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let path = format!("/r/{}/{sort}.json?{}{params}", sub_name.replace('+', "%2B"), req.uri().query().unwrap_or_default());
|
let path = format!("/r/{sub_name}/{sort}.json?{}{params}", req.uri().query().unwrap_or_default());
|
||||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||||
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B");
|
let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B");
|
||||||
let filters = get_filters(&req);
|
let filters = get_filters(&req);
|
||||||
@ -145,10 +150,6 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||||
let no_posts = posts.is_empty();
|
let no_posts = posts.is_empty();
|
||||||
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
||||||
if sort == "new" {
|
|
||||||
posts.sort_by(|a, b| b.created_ts.cmp(&a.created_ts));
|
|
||||||
posts.sort_by(|a, b| b.flags.stickied.cmp(&a.flags.stickied));
|
|
||||||
}
|
|
||||||
Ok(template(&SubredditTemplate {
|
Ok(template(&SubredditTemplate {
|
||||||
sub,
|
sub,
|
||||||
posts,
|
posts,
|
||||||
|
53
src/utils.rs
53
src/utils.rs
@ -169,7 +169,6 @@ pub struct Media {
|
|||||||
pub width: i64,
|
pub width: i64,
|
||||||
pub height: i64,
|
pub height: i64,
|
||||||
pub poster: String,
|
pub poster: String,
|
||||||
pub download_name: String,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Media {
|
impl Media {
|
||||||
@ -236,15 +235,6 @@ impl Media {
|
|||||||
|
|
||||||
let alt_url = alt_url_val.map_or(String::new(), |val| format_url(val.as_str().unwrap_or_default()));
|
let alt_url = alt_url_val.map_or(String::new(), |val| format_url(val.as_str().unwrap_or_default()));
|
||||||
|
|
||||||
let download_name = if post_type == "image" || post_type == "gif" || post_type == "video" {
|
|
||||||
let permalink_base = url_path_basename(data["permalink"].as_str().unwrap_or_default());
|
|
||||||
let media_url_base = url_path_basename(url_val.as_str().unwrap_or_default());
|
|
||||||
|
|
||||||
format!("redlib_{permalink_base}_{media_url_base}")
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
(
|
(
|
||||||
post_type.to_string(),
|
post_type.to_string(),
|
||||||
Self {
|
Self {
|
||||||
@ -255,7 +245,6 @@ impl Media {
|
|||||||
width: source["width"].as_i64().unwrap_or_default(),
|
width: source["width"].as_i64().unwrap_or_default(),
|
||||||
height: source["height"].as_i64().unwrap_or_default(),
|
height: source["height"].as_i64().unwrap_or_default(),
|
||||||
poster: format_url(source["url"].as_str().unwrap_or_default()),
|
poster: format_url(source["url"].as_str().unwrap_or_default()),
|
||||||
download_name,
|
|
||||||
},
|
},
|
||||||
gallery,
|
gallery,
|
||||||
)
|
)
|
||||||
@ -309,7 +298,6 @@ pub struct Post {
|
|||||||
pub body: String,
|
pub body: String,
|
||||||
pub author: Author,
|
pub author: Author,
|
||||||
pub permalink: String,
|
pub permalink: String,
|
||||||
pub link_title: String,
|
|
||||||
pub poll: Option<Poll>,
|
pub poll: Option<Poll>,
|
||||||
pub score: (String, String),
|
pub score: (String, String),
|
||||||
pub upvote_ratio: i64,
|
pub upvote_ratio: i64,
|
||||||
@ -321,7 +309,6 @@ pub struct Post {
|
|||||||
pub domain: String,
|
pub domain: String,
|
||||||
pub rel_time: String,
|
pub rel_time: String,
|
||||||
pub created: String,
|
pub created: String,
|
||||||
pub created_ts: u64,
|
|
||||||
pub num_duplicates: u64,
|
pub num_duplicates: u64,
|
||||||
pub comments: (String, String),
|
pub comments: (String, String),
|
||||||
pub gallery: Vec<GalleryMedia>,
|
pub gallery: Vec<GalleryMedia>,
|
||||||
@ -353,7 +340,6 @@ impl Post {
|
|||||||
let data = &post["data"];
|
let data = &post["data"];
|
||||||
|
|
||||||
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
|
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
|
||||||
let created_ts = data["created_utc"].as_f64().unwrap_or_default().round() as u64;
|
|
||||||
let score = data["score"].as_i64().unwrap_or_default();
|
let score = data["score"].as_i64().unwrap_or_default();
|
||||||
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||||
let title = val(post, "title");
|
let title = val(post, "title");
|
||||||
@ -400,7 +386,6 @@ impl Post {
|
|||||||
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
|
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||||
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
|
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||||
poster: String::new(),
|
poster: String::new(),
|
||||||
download_name: String::new(),
|
|
||||||
},
|
},
|
||||||
media,
|
media,
|
||||||
domain: val(post, "domain"),
|
domain: val(post, "domain"),
|
||||||
@ -424,11 +409,9 @@ impl Post {
|
|||||||
stickied: data["stickied"].as_bool().unwrap_or_default() || data["pinned"].as_bool().unwrap_or_default(),
|
stickied: data["stickied"].as_bool().unwrap_or_default() || data["pinned"].as_bool().unwrap_or_default(),
|
||||||
},
|
},
|
||||||
permalink: val(post, "permalink"),
|
permalink: val(post, "permalink"),
|
||||||
link_title: val(post, "link_title"),
|
|
||||||
poll: Poll::parse(&data["poll_data"]),
|
poll: Poll::parse(&data["poll_data"]),
|
||||||
rel_time,
|
rel_time,
|
||||||
created,
|
created,
|
||||||
created_ts,
|
|
||||||
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
|
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
|
||||||
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
|
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
|
||||||
gallery,
|
gallery,
|
||||||
@ -437,6 +420,7 @@ impl Post {
|
|||||||
ws_url: val(post, "websocket_url"),
|
ws_url: val(post, "websocket_url"),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string()))
|
Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -707,8 +691,6 @@ pub async fn parse_post(post: &Value) -> Post {
|
|||||||
// Determine the type of media along with the media URL
|
// Determine the type of media along with the media URL
|
||||||
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
||||||
|
|
||||||
let created_ts = post["data"]["created_utc"].as_f64().unwrap_or_default().round() as u64;
|
|
||||||
|
|
||||||
let awards: Awards = Awards::parse(&post["data"]["all_awardings"]);
|
let awards: Awards = Awards::parse(&post["data"]["all_awardings"]);
|
||||||
|
|
||||||
let permalink = val(post, "permalink");
|
let permalink = val(post, "permalink");
|
||||||
@ -745,7 +727,6 @@ pub async fn parse_post(post: &Value) -> Post {
|
|||||||
distinguished: val(post, "distinguished"),
|
distinguished: val(post, "distinguished"),
|
||||||
},
|
},
|
||||||
permalink,
|
permalink,
|
||||||
link_title: val(post, "link_title"),
|
|
||||||
poll,
|
poll,
|
||||||
score: format_num(score),
|
score: format_num(score),
|
||||||
upvote_ratio: ratio as i64,
|
upvote_ratio: ratio as i64,
|
||||||
@ -757,7 +738,6 @@ pub async fn parse_post(post: &Value) -> Post {
|
|||||||
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
|
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||||
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
|
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||||
poster: String::new(),
|
poster: String::new(),
|
||||||
download_name: String::new(),
|
|
||||||
},
|
},
|
||||||
flair: Flair {
|
flair: Flair {
|
||||||
flair_parts: FlairPart::parse(
|
flair_parts: FlairPart::parse(
|
||||||
@ -781,7 +761,6 @@ pub async fn parse_post(post: &Value) -> Post {
|
|||||||
domain: val(post, "domain"),
|
domain: val(post, "domain"),
|
||||||
rel_time,
|
rel_time,
|
||||||
created,
|
created,
|
||||||
created_ts,
|
|
||||||
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
|
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
|
||||||
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
|
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
|
||||||
gallery,
|
gallery,
|
||||||
@ -1141,20 +1120,6 @@ pub async fn nsfw_landing(req: Request<Body>, req_url: String) -> Result<Respons
|
|||||||
Ok(Response::builder().status(403).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
Ok(Response::builder().status(403).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the last (non-empty) segment of a path string
|
|
||||||
pub fn url_path_basename(path: &str) -> String {
|
|
||||||
let url_result = Url::parse(format!("https://libredd.it/{path}").as_str());
|
|
||||||
|
|
||||||
if url_result.is_err() {
|
|
||||||
path.to_string()
|
|
||||||
} else {
|
|
||||||
let mut url = url_result.unwrap();
|
|
||||||
url.path_segments_mut().unwrap().pop_if_empty();
|
|
||||||
|
|
||||||
url.path_segments().unwrap().last().unwrap().to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::{format_num, format_url, rewrite_urls};
|
use super::{format_num, format_url, rewrite_urls};
|
||||||
@ -1263,19 +1228,3 @@ fn test_rewriting_image_links() {
|
|||||||
let output = r#"<p><figure><a href="/preview/pre/6awags382xo31.png?width=2560&format=png&auto=webp&s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"><img loading="lazy" src="/preview/pre/6awags382xo31.png?width=2560&format=png&auto=webp&s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"></a><figcaption>caption 1</figcaption></figure></p"#;
|
let output = r#"<p><figure><a href="/preview/pre/6awags382xo31.png?width=2560&format=png&auto=webp&s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"><img loading="lazy" src="/preview/pre/6awags382xo31.png?width=2560&format=png&auto=webp&s=9c563aed4f07a91bdd249b5a3cea43a79710dcfc"></a><figcaption>caption 1</figcaption></figure></p"#;
|
||||||
assert_eq!(rewrite_urls(input), output);
|
assert_eq!(rewrite_urls(input), output);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_url_path_basename() {
|
|
||||||
// without trailing slash
|
|
||||||
assert_eq!(url_path_basename("/first/last"), "last");
|
|
||||||
// with trailing slash
|
|
||||||
assert_eq!(url_path_basename("/first/last/"), "last");
|
|
||||||
// with query parameters
|
|
||||||
assert_eq!(url_path_basename("/first/last/?some=query"), "last");
|
|
||||||
// file path
|
|
||||||
assert_eq!(url_path_basename("/cdn/image.jpg"), "image.jpg");
|
|
||||||
// when a full url is passed instead of just a path
|
|
||||||
assert_eq!(url_path_basename("https://doma.in/first/last"), "last");
|
|
||||||
// empty path
|
|
||||||
assert_eq!(url_path_basename("/"), "");
|
|
||||||
}
|
|
||||||
|
@ -1110,12 +1110,12 @@ a.search_subreddit:hover {
|
|||||||
margin-right: 15px;
|
margin-right: 15px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.desktop_item {
|
#post_links > li.desktop_item {
|
||||||
display: auto;
|
display: auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
@media screen and (min-width: 481px) {
|
@media screen and (min-width: 481px) {
|
||||||
.mobile_item {
|
#post_links > li.mobile_item {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1255,28 +1255,10 @@ a.search_subreddit:hover {
|
|||||||
min-width: 0;
|
min-width: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.comment:has([id]) .comment_data > * {
|
.comment_data > * {
|
||||||
margin-right: 5px;
|
margin-right: 5px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.comment:not([id]) .comment_data {
|
|
||||||
display: inline-flex;
|
|
||||||
max-width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.comment:not([id]) .comment_data > * {
|
|
||||||
flex: 0 0 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.comment:not([id]) .comment_data > .comment_link {
|
|
||||||
display: -webkit-box;
|
|
||||||
-webkit-line-clamp: 1;
|
|
||||||
-webkit-box-orient: vertical;
|
|
||||||
word-break: break-all;
|
|
||||||
overflow: hidden;
|
|
||||||
flex: 0 1 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.comment_image {
|
.comment_image {
|
||||||
max-width: 500px;
|
max-width: 500px;
|
||||||
align-self: center;
|
align-self: center;
|
||||||
@ -1784,11 +1766,10 @@ td, th {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#post_links > li { margin-right: 10px }
|
#post_links > li { margin-right: 10px }
|
||||||
|
#post_links > li.desktop_item { display: none }
|
||||||
|
#post_links > li.mobile_item { display: auto }
|
||||||
.post_footer > p > span#upvoted { display: none }
|
.post_footer > p > span#upvoted { display: none }
|
||||||
|
|
||||||
.desktop_item { display: none }
|
|
||||||
.mobile_item { display: auto }
|
|
||||||
|
|
||||||
.popup {
|
.popup {
|
||||||
width: auto;
|
width: auto;
|
||||||
}
|
}
|
||||||
|
@ -63,10 +63,8 @@
|
|||||||
</div>
|
</div>
|
||||||
<details class="comment_right" open>
|
<details class="comment_right" open>
|
||||||
<summary class="comment_data">
|
<summary class="comment_data">
|
||||||
<a class="comment_link" href="{{ post.permalink }}" title="{{ post.link_title }}">{{ post.link_title }}</a>
|
<a class="comment_link" href="{{ post.permalink }}">Comment on r/{{ post.community }}</a>
|
||||||
<span class="created"> in </span>
|
<span class="created" title="{{ post.created }}">{{ post.rel_time }}</span>
|
||||||
<a href="/r/{{ post.community }}">r/{{ post.community }}</a>
|
|
||||||
<span class="created" title="{{ post.created }}"> {{ post.rel_time }}</span>
|
|
||||||
</summary>
|
</summary>
|
||||||
<p class="comment_body">{{ post.body|safe }}</p>
|
<p class="comment_body">{{ post.body|safe }}</p>
|
||||||
</details>
|
</details>
|
||||||
|
@ -168,28 +168,13 @@
|
|||||||
<span class="label"> Upvotes</span></div>
|
<span class="label"> Upvotes</span></div>
|
||||||
<div class="post_footer">
|
<div class="post_footer">
|
||||||
<ul id="post_links">
|
<ul id="post_links">
|
||||||
<li>
|
<li class="desktop_item"><a href="{{ post.permalink }}">permalink</a></li>
|
||||||
<a href="{{ post.permalink }}">
|
<li class="mobile_item"><a href="{{ post.permalink }}">link</a></li>
|
||||||
<span class="desktop_item">perma</span>link
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
{% if post.num_duplicates > 0 %}
|
{% if post.num_duplicates > 0 %}
|
||||||
<li>
|
<li class="desktop_item"><a href="/r/{{ post.community }}/duplicates/{{ post.id }}">duplicates</a></li>
|
||||||
<a href="/r/{{ post.community }}/duplicates/{{ post.id }}">
|
<li class="mobile_item"><a href="/r/{{ post.community }}/duplicates/{{ post.id }}">dupes</a></li>
|
||||||
dup<span class="desktop_item">licat</span>es
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% call external_reddit_link(post.permalink) %}
|
{% call external_reddit_link(post.permalink) %}
|
||||||
|
|
||||||
{% if post.media.download_name != "" %}
|
|
||||||
<li>
|
|
||||||
<a href="{{ post.media.url }}" download="{{ post.media.download_name }}">
|
|
||||||
<span class="mobile_item">dl</span>
|
|
||||||
<span class="desktop_item">download</span>
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
{% endif %}
|
|
||||||
</ul>
|
</ul>
|
||||||
<p>{{ post.upvote_ratio }}%<span id="upvoted"> Upvoted</span></p>
|
<p>{{ post.upvote_ratio }}%<span id="upvoted"> Upvoted</span></p>
|
||||||
</div>
|
</div>
|
||||||
@ -197,7 +182,8 @@
|
|||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
|
|
||||||
{% macro external_reddit_link(permalink) %}
|
{% macro external_reddit_link(permalink) %}
|
||||||
<li>
|
{% for dev_type in ["desktop", "mobile"] %}
|
||||||
|
<li class="{{ dev_type }}_item">
|
||||||
<a
|
<a
|
||||||
{% if prefs.disable_visit_reddit_confirmation != "on" %}
|
{% if prefs.disable_visit_reddit_confirmation != "on" %}
|
||||||
href="#popup"
|
href="#popup"
|
||||||
@ -211,6 +197,7 @@
|
|||||||
{% call visit_reddit_confirmation(permalink) %}
|
{% call visit_reddit_confirmation(permalink) %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</li>
|
</li>
|
||||||
|
{% endfor %}
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
{% macro post_in_list(post) -%}
|
{% macro post_in_list(post) -%}
|
||||||
@ -266,7 +253,11 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && (post.post_type == "gif" || post.post_type == "video") %}
|
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
||||||
|
<div class="post_media_content">
|
||||||
|
<video class="post_media_video short {%if post_should_be_blurred %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||||
|
</div>
|
||||||
|
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
||||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() || prefs.ffmpeg_video_downloads == "on" && !post.media.alt_url.is_empty() %}
|
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() || prefs.ffmpeg_video_downloads == "on" && !post.media.alt_url.is_empty() %}
|
||||||
<div class="post_media_content">
|
<div class="post_media_content">
|
||||||
<video class="post_media_video short {%if post_should_be_blurred %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" controls preload="none">
|
<video class="post_media_video short {%if post_should_be_blurred %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" controls preload="none">
|
||||||
|
Loading…
Reference in New Issue
Block a user