Add many Clippy's, fix many Clippy's
This commit is contained in:
parent
3e459f5415
commit
9f9ae45f6e
@ -90,12 +90,12 @@ pub async fn canonical_path(path: String) -> Result<Option<String>, String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, String> {
|
pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, String> {
|
||||||
let mut url = format!("{}?{}", format, req.uri().query().unwrap_or_default());
|
let mut url = format!("{format}?{}", req.uri().query().unwrap_or_default());
|
||||||
|
|
||||||
// For each parameter in request
|
// For each parameter in request
|
||||||
for (name, value) in req.params().iter() {
|
for (name, value) in &req.params() {
|
||||||
// Fill the parameter value in the url
|
// Fill the parameter value in the url
|
||||||
url = url.replace(&format!("{{{}}}", name), value);
|
url = url.replace(&format!("{{{name}}}"), value);
|
||||||
}
|
}
|
||||||
|
|
||||||
stream(&url, &req).await
|
stream(&url, &req).await
|
||||||
@ -103,12 +103,12 @@ pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, S
|
|||||||
|
|
||||||
async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String> {
|
async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String> {
|
||||||
// First parameter is target URL (mandatory).
|
// First parameter is target URL (mandatory).
|
||||||
let uri = url.parse::<Uri>().map_err(|_| "Couldn't parse URL".to_string())?;
|
let parsed_uri = url.parse::<Uri>().map_err(|_| "Couldn't parse URL".to_string())?;
|
||||||
|
|
||||||
// Build the hyper client from the HTTPS connector.
|
// Build the hyper client from the HTTPS connector.
|
||||||
let client: client::Client<_, hyper::Body> = CLIENT.clone();
|
let client: Client<_, Body> = CLIENT.clone();
|
||||||
|
|
||||||
let mut builder = Request::get(uri);
|
let mut builder = Request::get(parsed_uri);
|
||||||
|
|
||||||
// Copy useful headers from original request
|
// Copy useful headers from original request
|
||||||
for &key in &["Range", "If-Modified-Since", "Cache-Control"] {
|
for &key in &["Range", "If-Modified-Since", "Cache-Control"] {
|
||||||
@ -154,15 +154,15 @@ fn reddit_head(path: String, quarantine: bool) -> Boxed<Result<Response<Body>, S
|
|||||||
request(&Method::HEAD, path, false, quarantine)
|
request(&Method::HEAD, path, false, quarantine)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Makes a request to Reddit. If `redirect` is `true`, request_with_redirect
|
/// Makes a request to Reddit. If `redirect` is `true`, `request_with_redirect`
|
||||||
/// will recurse on the URL that Reddit provides in the Location HTTP header
|
/// will recurse on the URL that Reddit provides in the Location HTTP header
|
||||||
/// in its response.
|
/// in its response.
|
||||||
fn request(method: &'static Method, path: String, redirect: bool, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
fn request(method: &'static Method, path: String, redirect: bool, quarantine: bool) -> Boxed<Result<Response<Body>, String>> {
|
||||||
// Build Reddit URL from path.
|
// Build Reddit URL from path.
|
||||||
let url = format!("{}{}", REDDIT_URL_BASE, path);
|
let url = format!("{REDDIT_URL_BASE}{path}");
|
||||||
|
|
||||||
// Construct the hyper client from the HTTPS connector.
|
// Construct the hyper client from the HTTPS connector.
|
||||||
let client: client::Client<_, hyper::Body> = CLIENT.clone();
|
let client: Client<_, Body> = CLIENT.clone();
|
||||||
|
|
||||||
let (token, vendor_id, device_id, user_agent, loid) = {
|
let (token, vendor_id, device_id, user_agent, loid) = {
|
||||||
let client = block_on(OAUTH_CLIENT.read());
|
let client = block_on(OAUTH_CLIENT.read());
|
||||||
@ -184,7 +184,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
|
|||||||
.header("X-Reddit-Device-Id", device_id)
|
.header("X-Reddit-Device-Id", device_id)
|
||||||
.header("x-reddit-loid", loid)
|
.header("x-reddit-loid", loid)
|
||||||
.header("Host", "oauth.reddit.com")
|
.header("Host", "oauth.reddit.com")
|
||||||
.header("Authorization", &format!("Bearer {}", token))
|
.header("Authorization", &format!("Bearer {token}"))
|
||||||
.header("Accept-Encoding", if method == Method::GET { "gzip" } else { "identity" })
|
.header("Accept-Encoding", if method == Method::GET { "gzip" } else { "identity" })
|
||||||
.header("Accept-Language", "en-US,en;q=0.5")
|
.header("Accept-Language", "en-US,en;q=0.5")
|
||||||
.header("Connection", "keep-alive")
|
.header("Connection", "keep-alive")
|
||||||
@ -227,7 +227,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
|
|||||||
//
|
//
|
||||||
// 2. Percent-encode the path.
|
// 2. Percent-encode the path.
|
||||||
let new_path = percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string();
|
let new_path = percent_encode(val.as_bytes(), CONTROLS).to_string().trim_start_matches(REDDIT_URL_BASE).to_string();
|
||||||
format!("{}{}raw_json=1", new_path, if new_path.contains('?') { "&" } else { "?" })
|
format!("{new_path}{}raw_json=1", if new_path.contains('?') { "&" } else { "?" })
|
||||||
})
|
})
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.to_string(),
|
.to_string(),
|
||||||
@ -302,7 +302,7 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
|||||||
// Closure to quickly build errors
|
// Closure to quickly build errors
|
||||||
let err = |msg: &str, e: String| -> Result<Value, String> {
|
let err = |msg: &str, e: String| -> Result<Value, String> {
|
||||||
// eprintln!("{} - {}: {}", url, msg, e);
|
// eprintln!("{} - {}: {}", url, msg, e);
|
||||||
Err(format!("{}: {}", msg, e))
|
Err(format!("{msg}: {e}"))
|
||||||
};
|
};
|
||||||
|
|
||||||
// Fetch the url...
|
// Fetch the url...
|
||||||
@ -324,7 +324,7 @@ pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
|
|||||||
.as_str()
|
.as_str()
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
json["message"].as_str().unwrap_or_else(|| {
|
json["message"].as_str().unwrap_or_else(|| {
|
||||||
eprintln!("{}{} - Error parsing reddit error", REDDIT_URL_BASE, path);
|
eprintln!("{REDDIT_URL_BASE}{path} - Error parsing reddit error");
|
||||||
"Error parsing reddit error"
|
"Error parsing reddit error"
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -17,7 +17,7 @@ pub const DEFAULT_PUSHSHIFT_FRONTEND: &str = "www.unddit.com";
|
|||||||
/// config file. `Config::Default()` contains None for each setting.
|
/// config file. `Config::Default()` contains None for each setting.
|
||||||
/// When adding more config settings, add it to `Config::load`,
|
/// When adding more config settings, add it to `Config::load`,
|
||||||
/// `get_setting_from_config`, both below, as well as
|
/// `get_setting_from_config`, both below, as well as
|
||||||
/// instance_info::InstanceInfo.to_string(), README.md and app.json.
|
/// `instance_info::InstanceInfo.to_string`(), README.md and app.json.
|
||||||
#[derive(Default, Serialize, Deserialize, Clone, Debug)]
|
#[derive(Default, Serialize, Deserialize, Clone, Debug)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
#[serde(rename = "REDLIB_SFW_ONLY")]
|
#[serde(rename = "REDLIB_SFW_ONLY")]
|
||||||
@ -103,7 +103,7 @@ impl Config {
|
|||||||
new_file.ok().and_then(|new_file| toml::from_str::<Self>(&new_file).ok())
|
new_file.ok().and_then(|new_file| toml::from_str::<Self>(&new_file).ok())
|
||||||
};
|
};
|
||||||
|
|
||||||
let config = load_config("redlib.toml").or(load_config("libreddit.toml")).unwrap_or_default();
|
let config = load_config("redlib.toml").or_else(|| load_config("libreddit.toml")).unwrap_or_default();
|
||||||
|
|
||||||
// This function defines the order of preference - first check for
|
// This function defines the order of preference - first check for
|
||||||
// environment variables with "REDLIB", then check the legacy LIBREDDIT
|
// environment variables with "REDLIB", then check the legacy LIBREDDIT
|
||||||
@ -112,7 +112,7 @@ impl Config {
|
|||||||
// Return the first non-`None` value
|
// Return the first non-`None` value
|
||||||
// If all are `None`, return `None`
|
// If all are `None`, return `None`
|
||||||
let legacy_key = key.replace("REDLIB_", "LIBREDDIT_");
|
let legacy_key = key.replace("REDLIB_", "LIBREDDIT_");
|
||||||
var(key).ok().or(var(legacy_key).ok()).or(get_setting_from_config(key, &config))
|
var(key).ok().or_else(|| var(legacy_key).ok()).or_else(|| get_setting_from_config(key, &config))
|
||||||
};
|
};
|
||||||
Self {
|
Self {
|
||||||
sfw_only: parse("REDLIB_SFW_ONLY"),
|
sfw_only: parse("REDLIB_SFW_ONLY"),
|
||||||
|
@ -12,14 +12,14 @@ use std::borrow::ToOwned;
|
|||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::vec::Vec;
|
use std::vec::Vec;
|
||||||
|
|
||||||
/// DuplicatesParams contains the parameters in the URL.
|
/// `DuplicatesParams` contains the parameters in the URL.
|
||||||
struct DuplicatesParams {
|
struct DuplicatesParams {
|
||||||
before: String,
|
before: String,
|
||||||
after: String,
|
after: String,
|
||||||
sort: String,
|
sort: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// DuplicatesTemplate defines an Askama template for rendering duplicate
|
/// `DuplicatesTemplate` defines an Askama template for rendering duplicate
|
||||||
/// posts.
|
/// posts.
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(path = "duplicates.html")]
|
#[template(path = "duplicates.html")]
|
||||||
@ -59,7 +59,7 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
|
|
||||||
// Log the request in debugging mode
|
// Log the request in debugging mode
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
dbg!(req.param("id").unwrap_or_default());
|
req.param("id").unwrap_or_default();
|
||||||
|
|
||||||
// Send the GET, and await JSON.
|
// Send the GET, and await JSON.
|
||||||
match json(path, quarantined).await {
|
match json(path, quarantined).await {
|
||||||
@ -189,7 +189,7 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
Err(msg) => {
|
Err(msg) => {
|
||||||
// Abort entirely if we couldn't get the previous
|
// Abort entirely if we couldn't get the previous
|
||||||
// batch.
|
// batch.
|
||||||
return error(req, msg).await;
|
return error(req, &msg).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -197,7 +197,7 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
template(DuplicatesTemplate {
|
Ok(template(&DuplicatesTemplate {
|
||||||
params: DuplicatesParams { before, after, sort },
|
params: DuplicatesParams { before, after, sort },
|
||||||
post,
|
post,
|
||||||
duplicates,
|
duplicates,
|
||||||
@ -205,28 +205,28 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
url: req_url,
|
url: req_url,
|
||||||
num_posts_filtered,
|
num_posts_filtered,
|
||||||
all_posts_filtered,
|
all_posts_filtered,
|
||||||
})
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process error.
|
// Process error.
|
||||||
Err(msg) => {
|
Err(msg) => {
|
||||||
if msg == "quarantined" || msg == "gated" {
|
if msg == "quarantined" || msg == "gated" {
|
||||||
let sub = req.param("sub").unwrap_or_default();
|
let sub = req.param("sub").unwrap_or_default();
|
||||||
quarantine(req, sub, msg)
|
Ok(quarantine(&req, sub, &msg))
|
||||||
} else {
|
} else {
|
||||||
error(req, msg).await
|
error(req, &msg).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// DUPLICATES
|
// DUPLICATES
|
||||||
async fn parse_duplicates(json: &serde_json::Value, filters: &HashSet<String>) -> (Vec<Post>, u64, bool) {
|
async fn parse_duplicates(json: &Value, filters: &HashSet<String>) -> (Vec<Post>, u64, bool) {
|
||||||
let post_duplicates: &Vec<Value> = &json["data"]["children"].as_array().map_or(Vec::new(), ToOwned::to_owned);
|
let post_duplicates: &Vec<Value> = &json["data"]["children"].as_array().map_or(Vec::new(), ToOwned::to_owned);
|
||||||
let mut duplicates: Vec<Post> = Vec::new();
|
let mut duplicates: Vec<Post> = Vec::new();
|
||||||
|
|
||||||
// Process each post and place them in the Vec<Post>.
|
// Process each post and place them in the Vec<Post>.
|
||||||
for val in post_duplicates.iter() {
|
for val in post_duplicates {
|
||||||
let post: Post = parse_post(val).await;
|
let post: Post = parse_post(val).await;
|
||||||
duplicates.push(post);
|
duplicates.push(post);
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ pub async fn instance_info(req: Request<Body>) -> Result<Response<Body>, String>
|
|||||||
"yaml" | "yml" => info_yaml(),
|
"yaml" | "yml" => info_yaml(),
|
||||||
"txt" => info_txt(),
|
"txt" => info_txt(),
|
||||||
"json" => info_json(),
|
"json" => info_json(),
|
||||||
"html" | "" => info_html(req),
|
"html" | "" => info_html(&req),
|
||||||
_ => {
|
_ => {
|
||||||
let error = ErrorTemplate {
|
let error = ErrorTemplate {
|
||||||
msg: "Error: Invalid info extension".into(),
|
msg: "Error: Invalid info extension".into(),
|
||||||
@ -68,13 +68,13 @@ fn info_txt() -> Result<Response<Body>, Error> {
|
|||||||
Response::builder()
|
Response::builder()
|
||||||
.status(200)
|
.status(200)
|
||||||
.header("content-type", "text/plain")
|
.header("content-type", "text/plain")
|
||||||
.body(Body::from(INSTANCE_INFO.to_string(StringType::Raw)))
|
.body(Body::from(INSTANCE_INFO.to_string(&StringType::Raw)))
|
||||||
}
|
}
|
||||||
fn info_html(req: Request<Body>) -> Result<Response<Body>, Error> {
|
fn info_html(req: &Request<Body>) -> Result<Response<Body>, Error> {
|
||||||
let message = MessageTemplate {
|
let message = MessageTemplate {
|
||||||
title: String::from("Instance information"),
|
title: String::from("Instance information"),
|
||||||
body: INSTANCE_INFO.to_string(StringType::Html),
|
body: INSTANCE_INFO.to_string(&StringType::Html),
|
||||||
prefs: Preferences::new(&req),
|
prefs: Preferences::new(req),
|
||||||
url: req.uri().to_string(),
|
url: req.uri().to_string(),
|
||||||
}
|
}
|
||||||
.render()
|
.render()
|
||||||
@ -109,7 +109,7 @@ impl InstanceInfo {
|
|||||||
}
|
}
|
||||||
fn to_table(&self) -> String {
|
fn to_table(&self) -> String {
|
||||||
let mut container = Container::default();
|
let mut container = Container::default();
|
||||||
let convert = |o: &Option<String>| -> String { o.clone().unwrap_or("<span class=\"unset\"><i>Unset</i></span>".to_owned()) };
|
let convert = |o: &Option<String>| -> String { o.clone().unwrap_or_else(|| "<span class=\"unset\"><i>Unset</i></span>".to_owned()) };
|
||||||
if let Some(banner) = &self.config.banner {
|
if let Some(banner) = &self.config.banner {
|
||||||
container.add_header(3, "Instance banner");
|
container.add_header(3, "Instance banner");
|
||||||
container.add_raw("<br />");
|
container.add_raw("<br />");
|
||||||
@ -151,7 +151,7 @@ impl InstanceInfo {
|
|||||||
);
|
);
|
||||||
container.to_html_string().replace("<th>", "<th colspan=\"2\">")
|
container.to_html_string().replace("<th>", "<th colspan=\"2\">")
|
||||||
}
|
}
|
||||||
fn to_string(&self, string_type: StringType) -> String {
|
fn to_string(&self, string_type: &StringType) -> String {
|
||||||
match string_type {
|
match string_type {
|
||||||
StringType::Raw => {
|
StringType::Raw => {
|
||||||
format!(
|
format!(
|
||||||
|
80
src/main.rs
80
src/main.rs
@ -1,6 +1,42 @@
|
|||||||
// Global specifiers
|
// Global specifiers
|
||||||
#![forbid(unsafe_code)]
|
#![forbid(unsafe_code)]
|
||||||
#![allow(clippy::cmp_owned)]
|
#![deny(
|
||||||
|
anonymous_parameters,
|
||||||
|
clippy::all,
|
||||||
|
illegal_floating_point_literal_pattern,
|
||||||
|
late_bound_lifetime_arguments,
|
||||||
|
path_statements,
|
||||||
|
patterns_in_fns_without_body,
|
||||||
|
rust_2018_idioms,
|
||||||
|
trivial_numeric_casts,
|
||||||
|
unused_extern_crates
|
||||||
|
)]
|
||||||
|
#![warn(
|
||||||
|
clippy::dbg_macro,
|
||||||
|
clippy::decimal_literal_representation,
|
||||||
|
clippy::get_unwrap,
|
||||||
|
clippy::nursery,
|
||||||
|
clippy::pedantic,
|
||||||
|
clippy::todo,
|
||||||
|
clippy::unimplemented,
|
||||||
|
clippy::use_debug,
|
||||||
|
clippy::all,
|
||||||
|
unused_qualifications,
|
||||||
|
variant_size_differences
|
||||||
|
)]
|
||||||
|
#![allow(
|
||||||
|
clippy::cmp_owned,
|
||||||
|
clippy::unused_async,
|
||||||
|
clippy::option_if_let_else,
|
||||||
|
clippy::items_after_statements,
|
||||||
|
clippy::cast_possible_truncation,
|
||||||
|
clippy::cast_possible_wrap,
|
||||||
|
clippy::cast_precision_loss,
|
||||||
|
clippy::struct_field_names,
|
||||||
|
clippy::struct_excessive_bools,
|
||||||
|
clippy::useless_let_if_seq,
|
||||||
|
clippy::collection_is_never_read
|
||||||
|
)]
|
||||||
|
|
||||||
// Reference local files
|
// Reference local files
|
||||||
mod config;
|
mod config;
|
||||||
@ -193,7 +229,7 @@ async fn main() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if let Some(expire_time) = hsts {
|
if let Some(expire_time) = hsts {
|
||||||
if let Ok(val) = HeaderValue::from_str(&format!("max-age={}", expire_time)) {
|
if let Ok(val) = HeaderValue::from_str(&format!("max-age={expire_time}")) {
|
||||||
app.default_headers.insert("Strict-Transport-Security", val);
|
app.default_headers.insert("Strict-Transport-Security", val);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -249,11 +285,11 @@ async fn main() {
|
|||||||
// Browse user profile
|
// Browse user profile
|
||||||
app
|
app
|
||||||
.at("/u/:name")
|
.at("/u/:name")
|
||||||
.get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
|
.get(|r| async move { Ok(redirect(&format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
|
||||||
app.at("/u/:name/comments/:id/:title").get(|r| post::item(r).boxed());
|
app.at("/u/:name/comments/:id/:title").get(|r| post::item(r).boxed());
|
||||||
app.at("/u/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
app.at("/u/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
|
||||||
|
|
||||||
app.at("/user/[deleted]").get(|req| error(req, "User has deleted their account".to_string()).boxed());
|
app.at("/user/[deleted]").get(|req| error(req, "User has deleted their account").boxed());
|
||||||
app.at("/user/:name").get(|r| user::profile(r).boxed());
|
app.at("/user/:name").get(|r| user::profile(r).boxed());
|
||||||
app.at("/user/:name/:listing").get(|r| user::profile(r).boxed());
|
app.at("/user/:name/:listing").get(|r| user::profile(r).boxed());
|
||||||
app.at("/user/:name/comments/:id").get(|r| post::item(r).boxed());
|
app.at("/user/:name/comments/:id").get(|r| post::item(r).boxed());
|
||||||
@ -273,7 +309,7 @@ async fn main() {
|
|||||||
|
|
||||||
app
|
app
|
||||||
.at("/r/u_:name")
|
.at("/r/u_:name")
|
||||||
.get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
|
.get(|r| async move { Ok(redirect(&format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
|
||||||
|
|
||||||
app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions_filters(r).boxed());
|
app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions_filters(r).boxed());
|
||||||
app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions_filters(r).boxed());
|
app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions_filters(r).boxed());
|
||||||
@ -298,10 +334,10 @@ async fn main() {
|
|||||||
|
|
||||||
app
|
app
|
||||||
.at("/r/:sub/w")
|
.at("/r/:sub/w")
|
||||||
.get(|r| async move { Ok(redirect(format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed());
|
.get(|r| async move { Ok(redirect(&format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed());
|
||||||
app
|
app
|
||||||
.at("/r/:sub/w/*page")
|
.at("/r/:sub/w/*page")
|
||||||
.get(|r| async move { Ok(redirect(format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed());
|
.get(|r| async move { Ok(redirect(&format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed());
|
||||||
app.at("/r/:sub/wiki").get(|r| subreddit::wiki(r).boxed());
|
app.at("/r/:sub/wiki").get(|r| subreddit::wiki(r).boxed());
|
||||||
app.at("/r/:sub/wiki/*page").get(|r| subreddit::wiki(r).boxed());
|
app.at("/r/:sub/wiki/*page").get(|r| subreddit::wiki(r).boxed());
|
||||||
|
|
||||||
@ -313,10 +349,10 @@ async fn main() {
|
|||||||
app.at("/").get(|r| subreddit::community(r).boxed());
|
app.at("/").get(|r| subreddit::community(r).boxed());
|
||||||
|
|
||||||
// View Reddit wiki
|
// View Reddit wiki
|
||||||
app.at("/w").get(|_| async { Ok(redirect("/wiki".to_string())) }.boxed());
|
app.at("/w").get(|_| async { Ok(redirect("/wiki")) }.boxed());
|
||||||
app
|
app
|
||||||
.at("/w/*page")
|
.at("/w/*page")
|
||||||
.get(|r| async move { Ok(redirect(format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed());
|
.get(|r| async move { Ok(redirect(&format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed());
|
||||||
app.at("/wiki").get(|r| subreddit::wiki(r).boxed());
|
app.at("/wiki").get(|r| subreddit::wiki(r).boxed());
|
||||||
app.at("/wiki/*page").get(|r| subreddit::wiki(r).boxed());
|
app.at("/wiki/*page").get(|r| subreddit::wiki(r).boxed());
|
||||||
|
|
||||||
@ -324,7 +360,7 @@ async fn main() {
|
|||||||
app.at("/search").get(|r| search::find(r).boxed());
|
app.at("/search").get(|r| search::find(r).boxed());
|
||||||
|
|
||||||
// Handle about pages
|
// Handle about pages
|
||||||
app.at("/about").get(|req| error(req, "About pages aren't added yet".to_string()).boxed());
|
app.at("/about").get(|req| error(req, "About pages aren't added yet").boxed());
|
||||||
|
|
||||||
// Instance info page
|
// Instance info page
|
||||||
app.at("/info").get(|r| instance_info::instance_info(r).boxed());
|
app.at("/info").get(|r| instance_info::instance_info(r).boxed());
|
||||||
@ -337,14 +373,14 @@ async fn main() {
|
|||||||
let sub = req.param("sub").unwrap_or_default();
|
let sub = req.param("sub").unwrap_or_default();
|
||||||
match req.param("id").as_deref() {
|
match req.param("id").as_deref() {
|
||||||
// Share link
|
// Share link
|
||||||
Some(id) if (8..12).contains(&id.len()) => match canonical_path(format!("/r/{}/s/{}", sub, id)).await {
|
Some(id) if (8..12).contains(&id.len()) => match canonical_path(format!("/r/{sub}/s/{id}")).await {
|
||||||
Ok(Some(path)) => Ok(redirect(path)),
|
Ok(Some(path)) => Ok(redirect(&path)),
|
||||||
Ok(None) => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,
|
Ok(None) => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,
|
||||||
Err(e) => error(req, e).await,
|
Err(e) => error(req, &e).await,
|
||||||
},
|
},
|
||||||
|
|
||||||
// Error message for unknown pages
|
// Error message for unknown pages
|
||||||
_ => error(req, "Nothing here".to_string()).await,
|
_ => error(req, "Nothing here").await,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
@ -356,29 +392,29 @@ async fn main() {
|
|||||||
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await,
|
Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await,
|
||||||
|
|
||||||
// Short link for post
|
// Short link for post
|
||||||
Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/{}", id)).await {
|
Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/{id}")).await {
|
||||||
Ok(path_opt) => match path_opt {
|
Ok(path_opt) => match path_opt {
|
||||||
Some(path) => Ok(redirect(path)),
|
Some(path) => Ok(redirect(&path)),
|
||||||
None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,
|
None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await,
|
||||||
},
|
},
|
||||||
Err(e) => error(req, e).await,
|
Err(e) => error(req, &e).await,
|
||||||
},
|
},
|
||||||
|
|
||||||
// Error message for unknown pages
|
// Error message for unknown pages
|
||||||
_ => error(req, "Nothing here".to_string()).await,
|
_ => error(req, "Nothing here").await,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
// Default service in case no routes match
|
// Default service in case no routes match
|
||||||
app.at("/*").get(|req| error(req, "Nothing here".to_string()).boxed());
|
app.at("/*").get(|req| error(req, "Nothing here").boxed());
|
||||||
|
|
||||||
println!("Running Redlib v{} on {}!", env!("CARGO_PKG_VERSION"), listener);
|
println!("Running Redlib v{} on {listener}!", env!("CARGO_PKG_VERSION"));
|
||||||
|
|
||||||
let server = app.listen(listener);
|
let server = app.listen(&listener);
|
||||||
|
|
||||||
// Run this server for... forever!
|
// Run this server for... forever!
|
||||||
if let Err(e) = server.await {
|
if let Err(e) = server.await {
|
||||||
eprintln!("Server error: {}", e);
|
eprintln!("Server error: {e}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -46,11 +46,11 @@ impl Oauth {
|
|||||||
}
|
}
|
||||||
async fn login(&mut self) -> Option<()> {
|
async fn login(&mut self) -> Option<()> {
|
||||||
// Construct URL for OAuth token
|
// Construct URL for OAuth token
|
||||||
let url = format!("{}/api/access_token", AUTH_ENDPOINT);
|
let url = format!("{AUTH_ENDPOINT}/api/access_token");
|
||||||
let mut builder = Request::builder().method(Method::POST).uri(&url);
|
let mut builder = Request::builder().method(Method::POST).uri(&url);
|
||||||
|
|
||||||
// Add headers from spoofed client
|
// Add headers from spoofed client
|
||||||
for (key, value) in self.initial_headers.iter() {
|
for (key, value) in &self.initial_headers {
|
||||||
builder = builder.header(key, value);
|
builder = builder.header(key, value);
|
||||||
}
|
}
|
||||||
// Set up HTTP Basic Auth - basically just the const OAuth ID's with no password,
|
// Set up HTTP Basic Auth - basically just the const OAuth ID's with no password,
|
||||||
@ -70,7 +70,7 @@ impl Oauth {
|
|||||||
let request = builder.body(body).unwrap();
|
let request = builder.body(body).unwrap();
|
||||||
|
|
||||||
// Send request
|
// Send request
|
||||||
let client: client::Client<_, hyper::Body> = CLIENT.clone();
|
let client: client::Client<_, Body> = CLIENT.clone();
|
||||||
let resp = client.request(request).await.ok()?;
|
let resp = client.request(request).await.ok()?;
|
||||||
|
|
||||||
// Parse headers - loid header _should_ be saved sent on subsequent token refreshes.
|
// Parse headers - loid header _should_ be saved sent on subsequent token refreshes.
|
||||||
|
24
src/post.rs
24
src/post.rs
@ -27,7 +27,7 @@ struct PostTemplate {
|
|||||||
comment_query: String,
|
comment_query: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
static COMMENT_SEARCH_CAPTURE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"\?q=(.*)&type=comment"#).unwrap());
|
static COMMENT_SEARCH_CAPTURE: Lazy<Regex> = Lazy::new(|| Regex::new(r"\?q=(.*)&type=comment").unwrap());
|
||||||
|
|
||||||
pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
// Build Reddit API path
|
// Build Reddit API path
|
||||||
@ -52,7 +52,7 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
|
|
||||||
// Log the post ID being fetched in debug mode
|
// Log the post ID being fetched in debug mode
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
dbg!(req.param("id").unwrap_or_default());
|
req.param("id").unwrap_or_default();
|
||||||
|
|
||||||
let single_thread = req.param("comment_id").is_some();
|
let single_thread = req.param("comment_id").is_some();
|
||||||
let highlighted_comment = &req.param("comment_id").unwrap_or_default();
|
let highlighted_comment = &req.param("comment_id").unwrap_or_default();
|
||||||
@ -83,7 +83,7 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Use the Post and Comment structs to generate a website to show users
|
// Use the Post and Comment structs to generate a website to show users
|
||||||
template(PostTemplate {
|
Ok(template(&PostTemplate {
|
||||||
comments,
|
comments,
|
||||||
post,
|
post,
|
||||||
url_without_query: url.clone().trim_end_matches(&format!("?q={query}&type=comment")).to_string(),
|
url_without_query: url.clone().trim_end_matches(&format!("?q={query}&type=comment")).to_string(),
|
||||||
@ -92,15 +92,15 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
single_thread,
|
single_thread,
|
||||||
url: req_url,
|
url: req_url,
|
||||||
comment_query: query,
|
comment_query: query,
|
||||||
})
|
}))
|
||||||
}
|
}
|
||||||
// If the Reddit API returns an error, exit and send error page to user
|
// If the Reddit API returns an error, exit and send error page to user
|
||||||
Err(msg) => {
|
Err(msg) => {
|
||||||
if msg == "quarantined" || msg == "gated" {
|
if msg == "quarantined" || msg == "gated" {
|
||||||
let sub = req.param("sub").unwrap_or_default();
|
let sub = req.param("sub").unwrap_or_default();
|
||||||
quarantine(req, sub, msg)
|
Ok(quarantine(&req, sub, &msg))
|
||||||
} else {
|
} else {
|
||||||
error(req, msg).await
|
error(req, &msg).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -139,19 +139,19 @@ fn query_comments(
|
|||||||
let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned);
|
let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned);
|
||||||
let mut results = Vec::new();
|
let mut results = Vec::new();
|
||||||
|
|
||||||
comments.into_iter().for_each(|comment| {
|
for comment in comments {
|
||||||
let data = &comment["data"];
|
let data = &comment["data"];
|
||||||
|
|
||||||
// If this comment contains replies, handle those too
|
// If this comment contains replies, handle those too
|
||||||
if data["replies"].is_object() {
|
if data["replies"].is_object() {
|
||||||
results.append(&mut query_comments(&data["replies"], post_link, post_author, highlighted_comment, filters, query, req))
|
results.append(&mut query_comments(&data["replies"], post_link, post_author, highlighted_comment, filters, query, req));
|
||||||
}
|
}
|
||||||
|
|
||||||
let c = build_comment(&comment, data, Vec::new(), post_link, post_author, highlighted_comment, filters, req);
|
let c = build_comment(&comment, data, Vec::new(), post_link, post_author, highlighted_comment, filters, req);
|
||||||
if c.body.to_lowercase().contains(&query.to_lowercase()) {
|
if c.body.to_lowercase().contains(&query.to_lowercase()) {
|
||||||
results.push(c);
|
results.push(c);
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
results
|
results
|
||||||
}
|
}
|
||||||
@ -170,10 +170,8 @@ fn build_comment(
|
|||||||
|
|
||||||
let body = if (val(comment, "author") == "[deleted]" && val(comment, "body") == "[removed]") || val(comment, "body") == "[ Removed by Reddit ]" {
|
let body = if (val(comment, "author") == "[deleted]" && val(comment, "body") == "[removed]") || val(comment, "body") == "[ Removed by Reddit ]" {
|
||||||
format!(
|
format!(
|
||||||
"<div class=\"md\"><p>[removed] — <a href=\"https://{}{}{}\">view removed comment</a></p></div>",
|
"<div class=\"md\"><p>[removed] — <a href=\"https://{}{post_link}{id}\">view removed comment</a></p></div>",
|
||||||
get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or(String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)),
|
get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or_else(|| String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)),
|
||||||
post_link,
|
|
||||||
id
|
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
rewrite_urls(&val(comment, "body_html"))
|
rewrite_urls(&val(comment, "body_html"))
|
||||||
|
@ -65,11 +65,11 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
query = REDDIT_URL_MATCH.replace(&query, "").to_string();
|
query = REDDIT_URL_MATCH.replace(&query, "").to_string();
|
||||||
|
|
||||||
if query.is_empty() {
|
if query.is_empty() {
|
||||||
return Ok(redirect("/".to_string()));
|
return Ok(redirect("/"));
|
||||||
}
|
}
|
||||||
|
|
||||||
if query.starts_with("r/") {
|
if query.starts_with("r/") {
|
||||||
return Ok(redirect(format!("/{}", query)));
|
return Ok(redirect(&format!("/{query}")));
|
||||||
}
|
}
|
||||||
|
|
||||||
let sub = req.param("sub").unwrap_or_default();
|
let sub = req.param("sub").unwrap_or_default();
|
||||||
@ -97,7 +97,7 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
|
|
||||||
// If all requested subs are filtered, we don't need to fetch posts.
|
// If all requested subs are filtered, we don't need to fetch posts.
|
||||||
if sub.split('+').all(|s| filters.contains(s)) {
|
if sub.split('+').all(|s| filters.contains(s)) {
|
||||||
template(SearchTemplate {
|
Ok(template(&SearchTemplate {
|
||||||
posts: Vec::new(),
|
posts: Vec::new(),
|
||||||
subreddits,
|
subreddits,
|
||||||
sub,
|
sub,
|
||||||
@ -106,7 +106,7 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
sort,
|
sort,
|
||||||
t: param(&path, "t").unwrap_or_default(),
|
t: param(&path, "t").unwrap_or_default(),
|
||||||
before: param(&path, "after").unwrap_or_default(),
|
before: param(&path, "after").unwrap_or_default(),
|
||||||
after: "".to_string(),
|
after: String::new(),
|
||||||
restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
|
restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
|
||||||
typed,
|
typed,
|
||||||
},
|
},
|
||||||
@ -116,14 +116,14 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
all_posts_filtered: false,
|
all_posts_filtered: false,
|
||||||
all_posts_hidden_nsfw: false,
|
all_posts_hidden_nsfw: false,
|
||||||
no_posts: false,
|
no_posts: false,
|
||||||
})
|
}))
|
||||||
} else {
|
} else {
|
||||||
match Post::fetch(&path, quarantined).await {
|
match Post::fetch(&path, quarantined).await {
|
||||||
Ok((mut posts, after)) => {
|
Ok((mut posts, after)) => {
|
||||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||||
let no_posts = posts.is_empty();
|
let no_posts = posts.is_empty();
|
||||||
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
||||||
template(SearchTemplate {
|
Ok(template(&SearchTemplate {
|
||||||
posts,
|
posts,
|
||||||
subreddits,
|
subreddits,
|
||||||
sub,
|
sub,
|
||||||
@ -142,14 +142,14 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
all_posts_filtered,
|
all_posts_filtered,
|
||||||
all_posts_hidden_nsfw,
|
all_posts_hidden_nsfw,
|
||||||
no_posts,
|
no_posts,
|
||||||
})
|
}))
|
||||||
}
|
}
|
||||||
Err(msg) => {
|
Err(msg) => {
|
||||||
if msg == "quarantined" || msg == "gated" {
|
if msg == "quarantined" || msg == "gated" {
|
||||||
let sub = req.param("sub").unwrap_or_default();
|
let sub = req.param("sub").unwrap_or_default();
|
||||||
quarantine(req, sub, msg)
|
Ok(quarantine(&req, sub, &msg))
|
||||||
} else {
|
} else {
|
||||||
error(req, msg).await
|
error(req, &msg).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -158,7 +158,7 @@ pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
|
|
||||||
async fn search_subreddits(q: &str, typed: &str) -> Vec<Subreddit> {
|
async fn search_subreddits(q: &str, typed: &str) -> Vec<Subreddit> {
|
||||||
let limit = if typed == "sr_user" { "50" } else { "3" };
|
let limit = if typed == "sr_user" { "50" } else { "3" };
|
||||||
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit={}", q.replace(' ', "+"), limit);
|
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit={limit}", q.replace(' ', "+"));
|
||||||
|
|
||||||
// Send a request to the url
|
// Send a request to the url
|
||||||
json(subreddit_search_path, false).await.unwrap_or_default()["data"]["children"]
|
json(subreddit_search_path, false).await.unwrap_or_default()["data"]["children"]
|
||||||
|
@ -70,7 +70,7 @@ impl ToString for CompressionType {
|
|||||||
match self {
|
match self {
|
||||||
Self::Gzip => "gzip".to_string(),
|
Self::Gzip => "gzip".to_string(),
|
||||||
Self::Brotli => "br".to_string(),
|
Self::Brotli => "br".to_string(),
|
||||||
_ => String::new(),
|
Self::Passthrough => String::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -104,13 +104,13 @@ pub trait RequestExt {
|
|||||||
fn params(&self) -> Params;
|
fn params(&self) -> Params;
|
||||||
fn param(&self, name: &str) -> Option<String>;
|
fn param(&self, name: &str) -> Option<String>;
|
||||||
fn set_params(&mut self, params: Params) -> Option<Params>;
|
fn set_params(&mut self, params: Params) -> Option<Params>;
|
||||||
fn cookies(&self) -> Vec<Cookie>;
|
fn cookies(&self) -> Vec<Cookie<'_>>;
|
||||||
fn cookie(&self, name: &str) -> Option<Cookie>;
|
fn cookie(&self, name: &str) -> Option<Cookie<'_>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait ResponseExt {
|
pub trait ResponseExt {
|
||||||
fn cookies(&self) -> Vec<Cookie>;
|
fn cookies(&self) -> Vec<Cookie<'_>>;
|
||||||
fn insert_cookie(&mut self, cookie: Cookie);
|
fn insert_cookie(&mut self, cookie: Cookie<'_>);
|
||||||
fn remove_cookie(&mut self, name: String);
|
fn remove_cookie(&mut self, name: String);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,7 +131,7 @@ impl RequestExt for Request<Body> {
|
|||||||
self.extensions_mut().insert(params)
|
self.extensions_mut().insert(params)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cookies(&self) -> Vec<Cookie> {
|
fn cookies(&self) -> Vec<Cookie<'_>> {
|
||||||
self.headers().get("Cookie").map_or(Vec::new(), |header| {
|
self.headers().get("Cookie").map_or(Vec::new(), |header| {
|
||||||
header
|
header
|
||||||
.to_str()
|
.to_str()
|
||||||
@ -142,13 +142,13 @@ impl RequestExt for Request<Body> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cookie(&self, name: &str) -> Option<Cookie> {
|
fn cookie(&self, name: &str) -> Option<Cookie<'_>> {
|
||||||
self.cookies().into_iter().find(|c| c.name() == name)
|
self.cookies().into_iter().find(|c| c.name() == name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ResponseExt for Response<Body> {
|
impl ResponseExt for Response<Body> {
|
||||||
fn cookies(&self) -> Vec<Cookie> {
|
fn cookies(&self) -> Vec<Cookie<'_>> {
|
||||||
self.headers().get("Cookie").map_or(Vec::new(), |header| {
|
self.headers().get("Cookie").map_or(Vec::new(), |header| {
|
||||||
header
|
header
|
||||||
.to_str()
|
.to_str()
|
||||||
@ -159,7 +159,7 @@ impl ResponseExt for Response<Body> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_cookie(&mut self, cookie: Cookie) {
|
fn insert_cookie(&mut self, cookie: Cookie<'_>) {
|
||||||
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
if let Ok(val) = header::HeaderValue::from_str(&cookie.to_string()) {
|
||||||
self.headers_mut().append("Set-Cookie", val);
|
self.headers_mut().append("Set-Cookie", val);
|
||||||
}
|
}
|
||||||
@ -176,19 +176,19 @@ impl ResponseExt for Response<Body> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Route<'_> {
|
impl Route<'_> {
|
||||||
fn method(&mut self, method: Method, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
fn method(&mut self, method: &Method, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
||||||
self.router.add(&format!("/{}{}", method.as_str(), self.path), dest);
|
self.router.add(&format!("/{}{}", method.as_str(), self.path), dest);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add an endpoint for `GET` requests
|
/// Add an endpoint for `GET` requests
|
||||||
pub fn get(&mut self, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
pub fn get(&mut self, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
||||||
self.method(Method::GET, dest)
|
self.method(&Method::GET, dest)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add an endpoint for `POST` requests
|
/// Add an endpoint for `POST` requests
|
||||||
pub fn post(&mut self, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
pub fn post(&mut self, dest: fn(Request<Body>) -> BoxResponse) -> &mut Self {
|
||||||
self.method(Method::POST, dest)
|
self.method(&Method::POST, dest)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,14 +200,14 @@ impl Server {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn at(&mut self, path: &str) -> Route {
|
pub fn at(&mut self, path: &str) -> Route<'_> {
|
||||||
Route {
|
Route {
|
||||||
path: path.to_owned(),
|
path: path.to_owned(),
|
||||||
router: &mut self.router,
|
router: &mut self.router,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn listen(self, addr: String) -> Boxed<Result<(), hyper::Error>> {
|
pub fn listen(self, addr: &str) -> Boxed<Result<(), hyper::Error>> {
|
||||||
let make_svc = make_service_fn(move |_conn| {
|
let make_svc = make_service_fn(move |_conn| {
|
||||||
// For correct borrowing, these values need to be borrowed
|
// For correct borrowing, these values need to be borrowed
|
||||||
let router = self.router.clone();
|
let router = self.router.clone();
|
||||||
@ -260,7 +260,7 @@ impl Server {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Build SocketAddr from provided address
|
// Build SocketAddr from provided address
|
||||||
let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {} as address (example format: 0.0.0.0:8080)", addr));
|
let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {addr} as address (example format: 0.0.0.0:8080)"));
|
||||||
|
|
||||||
// Bind server to address specified above. Gracefully shut down if CTRL+C is pressed
|
// Bind server to address specified above. Gracefully shut down if CTRL+C is pressed
|
||||||
let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async {
|
let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async {
|
||||||
@ -376,7 +376,7 @@ fn determine_compressor(accept_encoding: String) -> Option<CompressionType> {
|
|||||||
|
|
||||||
// The compressor and q-value (if the latter is defined)
|
// The compressor and q-value (if the latter is defined)
|
||||||
// will be delimited by semicolons.
|
// will be delimited by semicolons.
|
||||||
let mut spl: Split<char> = val.split(';');
|
let mut spl: Split<'_, char> = val.split(';');
|
||||||
|
|
||||||
// Get the compressor. For example, in
|
// Get the compressor. For example, in
|
||||||
// gzip;q=0.8
|
// gzip;q=0.8
|
||||||
@ -438,10 +438,10 @@ fn determine_compressor(accept_encoding: String) -> Option<CompressionType> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if cur_candidate.q != f64::NEG_INFINITY {
|
if cur_candidate.q == f64::NEG_INFINITY {
|
||||||
Some(cur_candidate.alg)
|
|
||||||
} else {
|
|
||||||
None
|
None
|
||||||
|
} else {
|
||||||
|
Some(cur_candidate.alg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -453,16 +453,16 @@ fn determine_compressor(accept_encoding: String) -> Option<CompressionType> {
|
|||||||
/// conditions are met:
|
/// conditions are met:
|
||||||
///
|
///
|
||||||
/// 1. the HTTP client requests a compression encoding in the Content-Encoding
|
/// 1. the HTTP client requests a compression encoding in the Content-Encoding
|
||||||
/// header (hence the need for the req_headers);
|
/// header (hence the need for the `req_headers`);
|
||||||
///
|
///
|
||||||
/// 2. the content encoding corresponds to a compression algorithm we support;
|
/// 2. the content encoding corresponds to a compression algorithm we support;
|
||||||
///
|
///
|
||||||
/// 3. the Media type in the Content-Type response header is text with any
|
/// 3. the Media type in the Content-Type response header is text with any
|
||||||
/// subtype (e.g. text/plain) or application/json.
|
/// subtype (e.g. text/plain) or application/json.
|
||||||
///
|
///
|
||||||
/// compress_response returns Ok on successful compression, or if not all three
|
/// `compress_response` returns Ok on successful compression, or if not all three
|
||||||
/// conditions above are met. It returns Err if there was a problem decoding
|
/// conditions above are met. It returns Err if there was a problem decoding
|
||||||
/// any header in either req_headers or res, but res will remain intact.
|
/// any header in either `req_headers` or res, but res will remain intact.
|
||||||
///
|
///
|
||||||
/// This function logs errors to stderr, but only in debug mode. No information
|
/// This function logs errors to stderr, but only in debug mode. No information
|
||||||
/// is logged in release builds.
|
/// is logged in release builds.
|
||||||
@ -601,7 +601,7 @@ fn compress_body(compressor: CompressionType, body_bytes: Vec<u8>) -> Result<Vec
|
|||||||
|
|
||||||
// This arm is for any requested compressor for which we don't yet
|
// This arm is for any requested compressor for which we don't yet
|
||||||
// have an implementation.
|
// have an implementation.
|
||||||
_ => {
|
CompressionType::Passthrough => {
|
||||||
let msg = "unsupported compressor".to_string();
|
let msg = "unsupported compressor".to_string();
|
||||||
return Err(msg);
|
return Err(msg);
|
||||||
}
|
}
|
||||||
@ -677,7 +677,7 @@ mod tests {
|
|||||||
|
|
||||||
// Perform the compression.
|
// Perform the compression.
|
||||||
if let Err(e) = block_on(compress_response(&req_headers, &mut res)) {
|
if let Err(e) = block_on(compress_response(&req_headers, &mut res)) {
|
||||||
panic!("compress_response(&req_headers, &mut res) => Err(\"{}\")", e);
|
panic!("compress_response(&req_headers, &mut res) => Err(\"{e}\")");
|
||||||
};
|
};
|
||||||
|
|
||||||
// If the content was compressed, we expect the Content-Encoding
|
// If the content was compressed, we expect the Content-Encoding
|
||||||
@ -699,7 +699,7 @@ mod tests {
|
|||||||
// the Response is the same as what with which we start.
|
// the Response is the same as what with which we start.
|
||||||
let body_vec = match block_on(body::to_bytes(res.body_mut())) {
|
let body_vec = match block_on(body::to_bytes(res.body_mut())) {
|
||||||
Ok(b) => b.to_vec(),
|
Ok(b) => b.to_vec(),
|
||||||
Err(e) => panic!("{}", e),
|
Err(e) => panic!("{e}"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if expected_encoding == CompressionType::Passthrough {
|
if expected_encoding == CompressionType::Passthrough {
|
||||||
@ -715,7 +715,7 @@ mod tests {
|
|||||||
let mut decoder: Box<dyn io::Read> = match expected_encoding {
|
let mut decoder: Box<dyn io::Read> = match expected_encoding {
|
||||||
CompressionType::Gzip => match gzip::Decoder::new(&mut body_cursor) {
|
CompressionType::Gzip => match gzip::Decoder::new(&mut body_cursor) {
|
||||||
Ok(dgz) => Box::new(dgz),
|
Ok(dgz) => Box::new(dgz),
|
||||||
Err(e) => panic!("{}", e),
|
Err(e) => panic!("{e}"),
|
||||||
},
|
},
|
||||||
|
|
||||||
CompressionType::Brotli => Box::new(BrotliDecompressor::new(body_cursor, expected_lorem_ipsum.len())),
|
CompressionType::Brotli => Box::new(BrotliDecompressor::new(body_cursor, expected_lorem_ipsum.len())),
|
||||||
@ -725,7 +725,7 @@ mod tests {
|
|||||||
|
|
||||||
let mut decompressed = Vec::<u8>::new();
|
let mut decompressed = Vec::<u8>::new();
|
||||||
if let Err(e) = io::copy(&mut decoder, &mut decompressed) {
|
if let Err(e) = io::copy(&mut decoder, &mut decompressed) {
|
||||||
panic!("{}", e);
|
panic!("{e}");
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(decompressed.eq(&expected_lorem_ipsum));
|
assert!(decompressed.eq(&expected_lorem_ipsum));
|
||||||
|
@ -42,10 +42,10 @@ const PREFS: [&str; 15] = [
|
|||||||
// Retrieve cookies from request "Cookie" header
|
// Retrieve cookies from request "Cookie" header
|
||||||
pub async fn get(req: Request<Body>) -> Result<Response<Body>, String> {
|
pub async fn get(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
let url = req.uri().to_string();
|
let url = req.uri().to_string();
|
||||||
template(SettingsTemplate {
|
Ok(template(&SettingsTemplate {
|
||||||
prefs: Preferences::new(&req),
|
prefs: Preferences::new(&req),
|
||||||
url,
|
url,
|
||||||
})
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set cookies using response "Set-Cookie" header
|
// Set cookies using response "Set-Cookie" header
|
||||||
@ -54,7 +54,7 @@ pub async fn set(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
let (parts, mut body) = req.into_parts();
|
let (parts, mut body) = req.into_parts();
|
||||||
|
|
||||||
// Grab existing cookies
|
// Grab existing cookies
|
||||||
let _cookies: Vec<Cookie> = parts
|
let _cookies: Vec<Cookie<'_>> = parts
|
||||||
.headers
|
.headers
|
||||||
.get_all("Cookie")
|
.get_all("Cookie")
|
||||||
.iter()
|
.iter()
|
||||||
@ -73,7 +73,7 @@ pub async fn set(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
|
|
||||||
let form = url::form_urlencoded::parse(&body_bytes).collect::<HashMap<_, _>>();
|
let form = url::form_urlencoded::parse(&body_bytes).collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
let mut response = redirect("/settings".to_string());
|
let mut response = redirect("/settings");
|
||||||
|
|
||||||
for &name in &PREFS {
|
for &name in &PREFS {
|
||||||
match form.get(name) {
|
match form.get(name) {
|
||||||
@ -96,7 +96,7 @@ fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body
|
|||||||
let (parts, _) = req.into_parts();
|
let (parts, _) = req.into_parts();
|
||||||
|
|
||||||
// Grab existing cookies
|
// Grab existing cookies
|
||||||
let _cookies: Vec<Cookie> = parts
|
let _cookies: Vec<Cookie<'_>> = parts
|
||||||
.headers
|
.headers
|
||||||
.get_all("Cookie")
|
.get_all("Cookie")
|
||||||
.iter()
|
.iter()
|
||||||
@ -112,7 +112,7 @@ fn set_cookies_method(req: Request<Body>, remove_cookies: bool) -> Response<Body
|
|||||||
None => "/".to_string(),
|
None => "/".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut response = redirect(path);
|
let mut response = redirect(&path);
|
||||||
|
|
||||||
for name in [PREFS.to_vec(), vec!["subscriptions", "filters"]].concat() {
|
for name in [PREFS.to_vec(), vec!["subscriptions", "filters"]].concat() {
|
||||||
match form.get(name) {
|
match form.get(name) {
|
||||||
|
@ -76,7 +76,7 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if req.param("sub").is_some() && sub_name.starts_with("u_") {
|
if req.param("sub").is_some() && sub_name.starts_with("u_") {
|
||||||
return Ok(redirect(["/user/", &sub_name[2..]].concat()));
|
return Ok(redirect(&["/user/", &sub_name[2..]].concat()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Request subreddit metadata
|
// Request subreddit metadata
|
||||||
@ -117,11 +117,11 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
|
|
||||||
// If all requested subs are filtered, we don't need to fetch posts.
|
// If all requested subs are filtered, we don't need to fetch posts.
|
||||||
if sub_name.split('+').all(|s| filters.contains(s)) {
|
if sub_name.split('+').all(|s| filters.contains(s)) {
|
||||||
template(SubredditTemplate {
|
Ok(template(&SubredditTemplate {
|
||||||
sub,
|
sub,
|
||||||
posts: Vec::new(),
|
posts: Vec::new(),
|
||||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||||
ends: (param(&path, "after").unwrap_or_default(), "".to_string()),
|
ends: (param(&path, "after").unwrap_or_default(), String::new()),
|
||||||
prefs: Preferences::new(&req),
|
prefs: Preferences::new(&req),
|
||||||
url,
|
url,
|
||||||
redirect_url,
|
redirect_url,
|
||||||
@ -129,14 +129,14 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
all_posts_filtered: false,
|
all_posts_filtered: false,
|
||||||
all_posts_hidden_nsfw: false,
|
all_posts_hidden_nsfw: false,
|
||||||
no_posts: false,
|
no_posts: false,
|
||||||
})
|
}))
|
||||||
} else {
|
} else {
|
||||||
match Post::fetch(&path, quarantined).await {
|
match Post::fetch(&path, quarantined).await {
|
||||||
Ok((mut posts, after)) => {
|
Ok((mut posts, after)) => {
|
||||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||||
let no_posts = posts.is_empty();
|
let no_posts = posts.is_empty();
|
||||||
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
||||||
template(SubredditTemplate {
|
Ok(template(&SubredditTemplate {
|
||||||
sub,
|
sub,
|
||||||
posts,
|
posts,
|
||||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||||
@ -148,40 +148,38 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
all_posts_filtered,
|
all_posts_filtered,
|
||||||
all_posts_hidden_nsfw,
|
all_posts_hidden_nsfw,
|
||||||
no_posts,
|
no_posts,
|
||||||
})
|
}))
|
||||||
}
|
}
|
||||||
Err(msg) => match msg.as_str() {
|
Err(msg) => match msg.as_str() {
|
||||||
"quarantined" | "gated" => quarantine(req, sub_name, msg),
|
"quarantined" | "gated" => Ok(quarantine(&req, sub_name, &msg)),
|
||||||
"private" => error(req, format!("r/{} is a private community", sub_name)).await,
|
"private" => error(req, &format!("r/{sub_name} is a private community")).await,
|
||||||
"banned" => error(req, format!("r/{} has been banned from Reddit", sub_name)).await,
|
"banned" => error(req, &format!("r/{sub_name} has been banned from Reddit")).await,
|
||||||
_ => error(req, msg).await,
|
_ => error(req, &msg).await,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn quarantine(req: Request<Body>, sub: String, restriction: String) -> Result<Response<Body>, String> {
|
pub fn quarantine(req: &Request<Body>, sub: String, restriction: &str) -> Response<Body> {
|
||||||
let wall = WallTemplate {
|
let wall = WallTemplate {
|
||||||
title: format!("r/{} is {}", sub, restriction),
|
title: format!("r/{sub} is {restriction}"),
|
||||||
msg: "Please click the button below to continue to this subreddit.".to_string(),
|
msg: "Please click the button below to continue to this subreddit.".to_string(),
|
||||||
url: req.uri().to_string(),
|
url: req.uri().to_string(),
|
||||||
sub,
|
sub,
|
||||||
prefs: Preferences::new(&req),
|
prefs: Preferences::new(req),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(
|
Response::builder()
|
||||||
Response::builder()
|
.status(403)
|
||||||
.status(403)
|
.header("content-type", "text/html")
|
||||||
.header("content-type", "text/html")
|
.body(wall.render().unwrap_or_default().into())
|
||||||
.body(wall.render().unwrap_or_default().into())
|
.unwrap_or_default()
|
||||||
.unwrap_or_default(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn add_quarantine_exception(req: Request<Body>) -> Result<Response<Body>, String> {
|
pub async fn add_quarantine_exception(req: Request<Body>) -> Result<Response<Body>, String> {
|
||||||
let subreddit = req.param("sub").ok_or("Invalid URL")?;
|
let subreddit = req.param("sub").ok_or("Invalid URL")?;
|
||||||
let redir = param(&format!("?{}", req.uri().query().unwrap_or_default()), "redir").ok_or("Invalid URL")?;
|
let redir = param(&format!("?{}", req.uri().query().unwrap_or_default()), "redir").ok_or("Invalid URL")?;
|
||||||
let mut response = redirect(redir);
|
let mut response = redirect(&redir);
|
||||||
response.insert_cookie(
|
response.insert_cookie(
|
||||||
Cookie::build((&format!("allow_quaran_{}", subreddit.to_lowercase()), "true"))
|
Cookie::build((&format!("allow_quaran_{}", subreddit.to_lowercase()), "true"))
|
||||||
.path("/")
|
.path("/")
|
||||||
@ -206,9 +204,8 @@ pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>,
|
|||||||
if sub == "random" || sub == "randnsfw" {
|
if sub == "random" || sub == "randnsfw" {
|
||||||
if action.contains(&"filter".to_string()) || action.contains(&"unfilter".to_string()) {
|
if action.contains(&"filter".to_string()) || action.contains(&"unfilter".to_string()) {
|
||||||
return Err("Can't filter random subreddit!".to_string());
|
return Err("Can't filter random subreddit!".to_string());
|
||||||
} else {
|
|
||||||
return Err("Can't subscribe to random subreddit!".to_string());
|
|
||||||
}
|
}
|
||||||
|
return Err("Can't subscribe to random subreddit!".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
let query = req.uri().query().unwrap_or_default().to_string();
|
let query = req.uri().query().unwrap_or_default().to_string();
|
||||||
@ -219,7 +216,7 @@ pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>,
|
|||||||
|
|
||||||
// Retrieve list of posts for these subreddits to extract display names
|
// Retrieve list of posts for these subreddits to extract display names
|
||||||
|
|
||||||
let posts = json(format!("/r/{}/hot.json?raw_json=1", sub), true).await;
|
let posts = json(format!("/r/{sub}/hot.json?raw_json=1"), true).await;
|
||||||
let display_lookup: Vec<(String, &str)> = match &posts {
|
let display_lookup: Vec<(String, &str)> = match &posts {
|
||||||
Ok(posts) => posts["data"]["children"]
|
Ok(posts) => posts["data"]["children"]
|
||||||
.as_array()
|
.as_array()
|
||||||
@ -247,7 +244,7 @@ pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>,
|
|||||||
display
|
display
|
||||||
} else {
|
} else {
|
||||||
// This subreddit display name isn't known, retrieve it
|
// This subreddit display name isn't known, retrieve it
|
||||||
let path: String = format!("/r/{}/about.json?raw_json=1", part);
|
let path: String = format!("/r/{part}/about.json?raw_json=1");
|
||||||
display = json(path, true).await;
|
display = json(path, true).await;
|
||||||
match &display {
|
match &display {
|
||||||
Ok(display) => display["data"]["display_name"].as_str(),
|
Ok(display) => display["data"]["display_name"].as_str(),
|
||||||
@ -282,13 +279,13 @@ pub async fn subscriptions_filters(req: Request<Body>) -> Result<Response<Body>,
|
|||||||
|
|
||||||
// Redirect back to subreddit
|
// Redirect back to subreddit
|
||||||
// check for redirect parameter if unsubscribing/unfiltering from outside sidebar
|
// check for redirect parameter if unsubscribing/unfiltering from outside sidebar
|
||||||
let path = if let Some(redirect_path) = param(&format!("?{}", query), "redirect") {
|
let path = if let Some(redirect_path) = param(&format!("?{query}"), "redirect") {
|
||||||
format!("/{}", redirect_path)
|
format!("/{redirect_path}")
|
||||||
} else {
|
} else {
|
||||||
format!("/r/{}", sub)
|
format!("/r/{sub}")
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut response = redirect(path);
|
let mut response = redirect(&path);
|
||||||
|
|
||||||
// Delete cookie if empty, else set
|
// Delete cookie if empty, else set
|
||||||
if sub_list.is_empty() {
|
if sub_list.is_empty() {
|
||||||
@ -326,22 +323,22 @@ pub async fn wiki(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let page = req.param("page").unwrap_or_else(|| "index".to_string());
|
let page = req.param("page").unwrap_or_else(|| "index".to_string());
|
||||||
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
|
let path: String = format!("/r/{sub}/wiki/{page}.json?raw_json=1");
|
||||||
let url = req.uri().to_string();
|
let url = req.uri().to_string();
|
||||||
|
|
||||||
match json(path, quarantined).await {
|
match json(path, quarantined).await {
|
||||||
Ok(response) => template(WikiTemplate {
|
Ok(response) => Ok(template(&WikiTemplate {
|
||||||
sub,
|
sub,
|
||||||
wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or("<h3>Wiki not found</h3>")),
|
wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or("<h3>Wiki not found</h3>")),
|
||||||
page,
|
page,
|
||||||
prefs: Preferences::new(&req),
|
prefs: Preferences::new(&req),
|
||||||
url,
|
url,
|
||||||
}),
|
})),
|
||||||
Err(msg) => {
|
Err(msg) => {
|
||||||
if msg == "quarantined" || msg == "gated" {
|
if msg == "quarantined" || msg == "gated" {
|
||||||
quarantine(req, sub, msg)
|
Ok(quarantine(&req, sub, &msg))
|
||||||
} else {
|
} else {
|
||||||
error(req, msg).await
|
error(req, &msg).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -357,13 +354,13 @@ pub async fn sidebar(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Build the Reddit JSON API url
|
// Build the Reddit JSON API url
|
||||||
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
let path: String = format!("/r/{sub}/about.json?raw_json=1");
|
||||||
let url = req.uri().to_string();
|
let url = req.uri().to_string();
|
||||||
|
|
||||||
// Send a request to the url
|
// Send a request to the url
|
||||||
match json(path, quarantined).await {
|
match json(path, quarantined).await {
|
||||||
// If success, receive JSON in response
|
// If success, receive JSON in response
|
||||||
Ok(response) => template(WikiTemplate {
|
Ok(response) => Ok(template(&WikiTemplate {
|
||||||
wiki: rewrite_urls(&val(&response, "description_html")),
|
wiki: rewrite_urls(&val(&response, "description_html")),
|
||||||
// wiki: format!(
|
// wiki: format!(
|
||||||
// "{}<hr><h1>Moderators</h1><br><ul>{}</ul>",
|
// "{}<hr><h1>Moderators</h1><br><ul>{}</ul>",
|
||||||
@ -374,12 +371,12 @@ pub async fn sidebar(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
page: "Sidebar".to_string(),
|
page: "Sidebar".to_string(),
|
||||||
prefs: Preferences::new(&req),
|
prefs: Preferences::new(&req),
|
||||||
url,
|
url,
|
||||||
}),
|
})),
|
||||||
Err(msg) => {
|
Err(msg) => {
|
||||||
if msg == "quarantined" || msg == "gated" {
|
if msg == "quarantined" || msg == "gated" {
|
||||||
quarantine(req, sub, msg)
|
Ok(quarantine(&req, sub, &msg))
|
||||||
} else {
|
} else {
|
||||||
error(req, msg).await
|
error(req, &msg).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -422,7 +419,7 @@ pub async fn sidebar(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
// SUBREDDIT
|
// SUBREDDIT
|
||||||
async fn subreddit(sub: &str, quarantined: bool) -> Result<Subreddit, String> {
|
async fn subreddit(sub: &str, quarantined: bool) -> Result<Subreddit, String> {
|
||||||
// Build the Reddit JSON API url
|
// Build the Reddit JSON API url
|
||||||
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
let path: String = format!("/r/{sub}/about.json?raw_json=1");
|
||||||
|
|
||||||
// Send a request to the url
|
// Send a request to the url
|
||||||
let res = json(path, quarantined).await?;
|
let res = json(path, quarantined).await?;
|
||||||
|
17
src/user.rs
17
src/user.rs
@ -35,9 +35,8 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
|
|
||||||
// Build the Reddit JSON API path
|
// Build the Reddit JSON API path
|
||||||
let path = format!(
|
let path = format!(
|
||||||
"/user/{}/{}.json?{}&raw_json=1",
|
"/user/{}/{listing}.json?{}&raw_json=1",
|
||||||
req.param("name").unwrap_or_else(|| "reddit".to_string()),
|
req.param("name").unwrap_or_else(|| "reddit".to_string()),
|
||||||
listing,
|
|
||||||
req.uri().query().unwrap_or_default(),
|
req.uri().query().unwrap_or_default(),
|
||||||
);
|
);
|
||||||
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
|
||||||
@ -60,11 +59,11 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
|
|
||||||
let filters = get_filters(&req);
|
let filters = get_filters(&req);
|
||||||
if filters.contains(&["u_", &username].concat()) {
|
if filters.contains(&["u_", &username].concat()) {
|
||||||
template(UserTemplate {
|
Ok(template(&UserTemplate {
|
||||||
user,
|
user,
|
||||||
posts: Vec::new(),
|
posts: Vec::new(),
|
||||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||||
ends: (param(&path, "after").unwrap_or_default(), "".to_string()),
|
ends: (param(&path, "after").unwrap_or_default(), String::new()),
|
||||||
listing,
|
listing,
|
||||||
prefs: Preferences::new(&req),
|
prefs: Preferences::new(&req),
|
||||||
url,
|
url,
|
||||||
@ -73,7 +72,7 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
all_posts_filtered: false,
|
all_posts_filtered: false,
|
||||||
all_posts_hidden_nsfw: false,
|
all_posts_hidden_nsfw: false,
|
||||||
no_posts: false,
|
no_posts: false,
|
||||||
})
|
}))
|
||||||
} else {
|
} else {
|
||||||
// Request user posts/comments from Reddit
|
// Request user posts/comments from Reddit
|
||||||
match Post::fetch(&path, false).await {
|
match Post::fetch(&path, false).await {
|
||||||
@ -81,7 +80,7 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
let (_, all_posts_filtered) = filter_posts(&mut posts, &filters);
|
||||||
let no_posts = posts.is_empty();
|
let no_posts = posts.is_empty();
|
||||||
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on");
|
||||||
template(UserTemplate {
|
Ok(template(&UserTemplate {
|
||||||
user,
|
user,
|
||||||
posts,
|
posts,
|
||||||
sort: (sort, param(&path, "t").unwrap_or_default()),
|
sort: (sort, param(&path, "t").unwrap_or_default()),
|
||||||
@ -94,10 +93,10 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
all_posts_filtered,
|
all_posts_filtered,
|
||||||
all_posts_hidden_nsfw,
|
all_posts_hidden_nsfw,
|
||||||
no_posts,
|
no_posts,
|
||||||
})
|
}))
|
||||||
}
|
}
|
||||||
// If there is an error show error page
|
// If there is an error show error page
|
||||||
Err(msg) => error(req, msg).await,
|
Err(msg) => error(req, &msg).await,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -105,7 +104,7 @@ pub async fn profile(req: Request<Body>) -> Result<Response<Body>, String> {
|
|||||||
// USER
|
// USER
|
||||||
async fn user(name: &str) -> Result<User, String> {
|
async fn user(name: &str) -> Result<User, String> {
|
||||||
// Build the Reddit JSON API path
|
// Build the Reddit JSON API path
|
||||||
let path: String = format!("/user/{}/about.json?raw_json=1", name);
|
let path: String = format!("/user/{name}/about.json?raw_json=1");
|
||||||
|
|
||||||
// Send a request to the url
|
// Send a request to the url
|
||||||
json(path, false).await.map(|res| {
|
json(path, false).await.map(|res| {
|
||||||
|
68
src/utils.rs
68
src/utils.rs
@ -116,8 +116,8 @@ impl Poll {
|
|||||||
|
|
||||||
Some(Self {
|
Some(Self {
|
||||||
poll_options,
|
poll_options,
|
||||||
total_vote_count,
|
|
||||||
voting_end_timestamp,
|
voting_end_timestamp,
|
||||||
|
total_vote_count,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -327,9 +327,8 @@ impl Post {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Fetch the list of posts from the JSON response
|
// Fetch the list of posts from the JSON response
|
||||||
let post_list = match res["data"]["children"].as_array() {
|
let Some(post_list) = res["data"]["children"].as_array() else {
|
||||||
Some(list) => list,
|
return Err("No posts found".to_string());
|
||||||
None => return Err("No posts found".to_string()),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut posts: Vec<Self> = Vec::new();
|
let mut posts: Vec<Self> = Vec::new();
|
||||||
@ -384,7 +383,7 @@ impl Post {
|
|||||||
alt_url: String::new(),
|
alt_url: String::new(),
|
||||||
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
|
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||||
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
|
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||||
poster: "".to_string(),
|
poster: String::new(),
|
||||||
},
|
},
|
||||||
media,
|
media,
|
||||||
domain: val(post, "domain"),
|
domain: val(post, "domain"),
|
||||||
@ -457,7 +456,7 @@ pub struct Award {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for Award {
|
impl std::fmt::Display for Award {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{} {} {}", self.name, self.icon_url, self.description)
|
write!(f, "{} {} {}", self.name, self.icon_url, self.description)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -473,8 +472,8 @@ impl std::ops::Deref for Awards {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for Awards {
|
impl std::fmt::Display for Awards {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
self.iter().fold(Ok(()), |result, award| result.and_then(|_| writeln!(f, "{}", award)))
|
self.iter().fold(Ok(()), |result, award| result.and_then(|()| writeln!(f, "{award}")))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -603,7 +602,7 @@ impl Preferences {
|
|||||||
let mut themes = vec!["system".to_string()];
|
let mut themes = vec!["system".to_string()];
|
||||||
for file in ThemeAssets::iter() {
|
for file in ThemeAssets::iter() {
|
||||||
let chunks: Vec<&str> = file.as_ref().split(".css").collect();
|
let chunks: Vec<&str> = file.as_ref().split(".css").collect();
|
||||||
themes.push(chunks[0].to_owned())
|
themes.push(chunks[0].to_owned());
|
||||||
}
|
}
|
||||||
Self {
|
Self {
|
||||||
available_themes: themes,
|
available_themes: themes,
|
||||||
@ -657,7 +656,7 @@ pub fn filter_posts(posts: &mut Vec<Post>, filters: &HashSet<String>) -> (u64, b
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a [`Post`] from a provided JSON.
|
/// Creates a [`Post`] from a provided JSON.
|
||||||
pub async fn parse_post(post: &serde_json::Value) -> Post {
|
pub async fn parse_post(post: &Value) -> Post {
|
||||||
// Grab UTC time as unix timestamp
|
// Grab UTC time as unix timestamp
|
||||||
let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default());
|
let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default());
|
||||||
// Parse post score and upvote ratio
|
// Parse post score and upvote ratio
|
||||||
@ -675,9 +674,8 @@ pub async fn parse_post(post: &serde_json::Value) -> Post {
|
|||||||
|
|
||||||
let body = if val(post, "removed_by_category") == "moderator" {
|
let body = if val(post, "removed_by_category") == "moderator" {
|
||||||
format!(
|
format!(
|
||||||
"<div class=\"md\"><p>[removed] — <a href=\"https://{}{}\">view removed post</a></p></div>",
|
"<div class=\"md\"><p>[removed] — <a href=\"https://{}{permalink}\">view removed post</a></p></div>",
|
||||||
get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or(String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)),
|
get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or_else(|| String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)),
|
||||||
permalink
|
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
rewrite_urls(&val(post, "selftext_html"))
|
rewrite_urls(&val(post, "selftext_html"))
|
||||||
@ -753,7 +751,7 @@ pub async fn parse_post(post: &serde_json::Value) -> Post {
|
|||||||
// Grab a query parameter from a url
|
// Grab a query parameter from a url
|
||||||
pub fn param(path: &str, value: &str) -> Option<String> {
|
pub fn param(path: &str, value: &str) -> Option<String> {
|
||||||
Some(
|
Some(
|
||||||
Url::parse(format!("https://libredd.it/{}", path).as_str())
|
Url::parse(format!("https://libredd.it/{path}").as_str())
|
||||||
.ok()?
|
.ok()?
|
||||||
.query_pairs()
|
.query_pairs()
|
||||||
.into_owned()
|
.into_owned()
|
||||||
@ -770,7 +768,7 @@ pub fn setting(req: &Request<Body>, name: &str) -> String {
|
|||||||
.cookie(name)
|
.cookie(name)
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
// If there is no cookie for this setting, try receiving a default from the config
|
// If there is no cookie for this setting, try receiving a default from the config
|
||||||
if let Some(default) = crate::config::get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) {
|
if let Some(default) = get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) {
|
||||||
Cookie::new(name, default)
|
Cookie::new(name, default)
|
||||||
} else {
|
} else {
|
||||||
Cookie::from(name)
|
Cookie::from(name)
|
||||||
@ -783,21 +781,21 @@ pub fn setting(req: &Request<Body>, name: &str) -> String {
|
|||||||
// Retrieve the value of a setting by name or the default value
|
// Retrieve the value of a setting by name or the default value
|
||||||
pub fn setting_or_default(req: &Request<Body>, name: &str, default: String) -> String {
|
pub fn setting_or_default(req: &Request<Body>, name: &str, default: String) -> String {
|
||||||
let value = setting(req, name);
|
let value = setting(req, name);
|
||||||
if !value.is_empty() {
|
if value.is_empty() {
|
||||||
value
|
|
||||||
} else {
|
|
||||||
default
|
default
|
||||||
|
} else {
|
||||||
|
value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detect and redirect in the event of a random subreddit
|
// Detect and redirect in the event of a random subreddit
|
||||||
pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> {
|
pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> {
|
||||||
if sub == "random" || sub == "randnsfw" {
|
if sub == "random" || sub == "randnsfw" {
|
||||||
let new_sub = json(format!("/r/{}/about.json?raw_json=1", sub), false).await?["data"]["display_name"]
|
let new_sub = json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"]
|
||||||
.as_str()
|
.as_str()
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.to_string();
|
.to_string();
|
||||||
Ok(redirect(format!("/r/{}{}", new_sub, additional)))
|
Ok(redirect(&format!("/r/{new_sub}{additional}")))
|
||||||
} else {
|
} else {
|
||||||
Err("No redirect needed".to_string())
|
Err("No redirect needed".to_string())
|
||||||
}
|
}
|
||||||
@ -963,28 +961,26 @@ pub fn val(j: &Value, k: &str) -> String {
|
|||||||
// NETWORKING
|
// NETWORKING
|
||||||
//
|
//
|
||||||
|
|
||||||
pub fn template(t: impl Template) -> Result<Response<Body>, String> {
|
pub fn template(t: &impl Template) -> Response<Body> {
|
||||||
Ok(
|
Response::builder()
|
||||||
Response::builder()
|
.status(200)
|
||||||
.status(200)
|
.header("content-type", "text/html")
|
||||||
.header("content-type", "text/html")
|
.body(t.render().unwrap_or_default().into())
|
||||||
.body(t.render().unwrap_or_default().into())
|
.unwrap_or_default()
|
||||||
.unwrap_or_default(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn redirect(path: String) -> Response<Body> {
|
pub fn redirect(path: &str) -> Response<Body> {
|
||||||
Response::builder()
|
Response::builder()
|
||||||
.status(302)
|
.status(302)
|
||||||
.header("content-type", "text/html")
|
.header("content-type", "text/html")
|
||||||
.header("Location", &path)
|
.header("Location", path)
|
||||||
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path).into())
|
.body(format!("Redirecting to <a href=\"{path}\">{path}</a>...").into())
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Renders a generic error landing page.
|
/// Renders a generic error landing page.
|
||||||
pub async fn error(req: Request<Body>, msg: impl ToString) -> Result<Response<Body>, String> {
|
pub async fn error(req: Request<Body>, msg: &str) -> Result<Response<Body>, String> {
|
||||||
error!("Error page rendered: {}", msg.to_string());
|
error!("Error page rendered: {msg}");
|
||||||
let url = req.uri().to_string();
|
let url = req.uri().to_string();
|
||||||
let body = ErrorTemplate {
|
let body = ErrorTemplate {
|
||||||
msg: msg.to_string(),
|
msg: msg.to_string(),
|
||||||
@ -1005,7 +1001,7 @@ pub async fn error(req: Request<Body>, msg: impl ToString) -> Result<Response<Bo
|
|||||||
/// subreddits or posts or userpages for users Reddit has deemed NSFW will
|
/// subreddits or posts or userpages for users Reddit has deemed NSFW will
|
||||||
/// be denied.
|
/// be denied.
|
||||||
pub fn sfw_only() -> bool {
|
pub fn sfw_only() -> bool {
|
||||||
match crate::config::get_setting("REDLIB_SFW_ONLY") {
|
match get_setting("REDLIB_SFW_ONLY") {
|
||||||
Some(val) => val == "on",
|
Some(val) => val == "on",
|
||||||
None => false,
|
None => false,
|
||||||
}
|
}
|
||||||
@ -1029,7 +1025,7 @@ pub async fn nsfw_landing(req: Request<Body>, req_url: String) -> Result<Respons
|
|||||||
|
|
||||||
// Determine from the request URL if the resource is a subreddit, a user
|
// Determine from the request URL if the resource is a subreddit, a user
|
||||||
// page, or a post.
|
// page, or a post.
|
||||||
let res: String = if !req.param("name").unwrap_or_default().is_empty() {
|
let resource: String = if !req.param("name").unwrap_or_default().is_empty() {
|
||||||
res_type = ResourceType::User;
|
res_type = ResourceType::User;
|
||||||
req.param("name").unwrap_or_default()
|
req.param("name").unwrap_or_default()
|
||||||
} else if !req.param("id").unwrap_or_default().is_empty() {
|
} else if !req.param("id").unwrap_or_default().is_empty() {
|
||||||
@ -1041,7 +1037,7 @@ pub async fn nsfw_landing(req: Request<Body>, req_url: String) -> Result<Respons
|
|||||||
};
|
};
|
||||||
|
|
||||||
let body = NSFWLandingTemplate {
|
let body = NSFWLandingTemplate {
|
||||||
res,
|
res: resource,
|
||||||
res_type,
|
res_type,
|
||||||
prefs: Preferences::new(&req),
|
prefs: Preferences::new(&req),
|
||||||
url: req_url,
|
url: req_url,
|
||||||
|
Loading…
Reference in New Issue
Block a user