From 4defb58f2a2acaf97afce21a3d5744e32fd5507a Mon Sep 17 00:00:00 2001
From: spikecodes <19519553+spikecodes@users.noreply.github.com>
Date: Thu, 20 May 2021 12:24:06 -0700
Subject: [PATCH] Optimizations and commenting
---
Cargo.lock | 6 +-
Cargo.toml | 2 +-
src/client.rs | 10 ++--
src/main.rs | 11 +---
src/post.rs | 144 ++++++++++++++++++++++-------------------------
src/search.rs | 54 +++++++-----------
src/server.rs | 51 +++++++++--------
src/settings.rs | 40 ++++++-------
src/subreddit.rs | 67 ++++++++++------------
src/user.rs | 35 +++++-------
src/utils.rs | 112 ++++++++++++++++--------------------
11 files changed, 241 insertions(+), 291 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 9541173..a95ffaa 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -609,7 +609,7 @@ checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e"
[[package]]
name = "libreddit"
-version = "0.14.3"
+version = "0.14.4"
dependencies = [
"askama",
"async-recursion",
@@ -807,9 +807,9 @@ checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086"
[[package]]
name = "proc-macro2"
-version = "1.0.26"
+version = "1.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec"
+checksum = "f0d8caf72986c1a598726adc988bb5984792ef84f5ee5aa50209145ee8077038"
dependencies = [
"unicode-xid",
]
diff --git a/Cargo.toml b/Cargo.toml
index 46f13dd..27f9eee 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -3,7 +3,7 @@ name = "libreddit"
description = " Alternative private front-end to Reddit"
license = "AGPL-3.0"
repository = "https://github.com/spikecodes/libreddit"
-version = "0.14.3"
+version = "0.14.4"
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
edition = "2018"
diff --git a/src/client.rs b/src/client.rs
index 76bae36..7dd5b39 100644
--- a/src/client.rs
+++ b/src/client.rs
@@ -9,7 +9,9 @@ use crate::server::RequestExt;
pub async fn proxy(req: Request
, format: &str) -> Result, String> {
let mut url = format!("{}?{}", format, req.uri().query().unwrap_or_default());
+ // For each parameter in request
for (name, value) in req.params().iter() {
+ // Fill the parameter value in the url
url = url.replace(&format!("{{{}}}", name), value);
}
@@ -29,14 +31,13 @@ async fn stream(url: &str, req: &Request) -> Result, String
let mut builder = Request::get(url);
// Copy useful headers from original request
- let headers = req.headers();
for &key in &["Range", "If-Modified-Since", "Cache-Control"] {
- if let Some(value) = headers.get(key) {
+ if let Some(value) = req.headers().get(key) {
builder = builder.header(key, value);
}
}
- let stream_request = builder.body(Body::default()).expect("stream");
+ let stream_request = builder.body(Body::empty()).map_err(|_| "Couldn't build empty body in stream".to_string())?;
client
.request(stream_request)
@@ -64,9 +65,10 @@ fn request(url: String, quarantine: bool) -> Boxed, String
// Prepare the HTTPS connector.
let https = hyper_rustls::HttpsConnector::with_native_roots();
- // Build the hyper client from the HTTPS connector.
+ // Construct the hyper client from the HTTPS connector.
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
+ // Build request
let builder = Request::builder()
.method("GET")
.uri(&url)
diff --git a/src/main.rs b/src/main.rs
index 8420e56..1b41763 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,14 +1,7 @@
// Global specifiers
#![forbid(unsafe_code)]
#![warn(clippy::pedantic, clippy::all)]
-#![allow(
- clippy::needless_pass_by_value,
- clippy::match_wildcard_for_single_variants,
- clippy::cast_possible_truncation,
- clippy::similar_names,
- clippy::cast_possible_wrap,
- clippy::find_map
-)]
+#![allow(clippy::needless_pass_by_value, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::find_map)]
// Reference local files
mod post;
@@ -137,7 +130,7 @@ async fn main() {
let port = matches.value_of("port").unwrap_or("8080");
let hsts = matches.value_of("hsts");
- let listener = format!("{}:{}", address, port);
+ let listener = [address, ":", port].concat();
println!("Starting Libreddit...");
diff --git a/src/post.rs b/src/post.rs
index 0cbf0b8..6ddb7be 100644
--- a/src/post.rs
+++ b/src/post.rs
@@ -7,8 +7,6 @@ use crate::utils::{error, format_num, format_url, param, rewrite_urls, setting,
use hyper::{Body, Request, Response};
-use async_recursion::async_recursion;
-
use askama::Template;
// STRUCTS
@@ -52,10 +50,10 @@ pub async fn item(req: Request) -> Result, String> {
// Send a request to the url, receive JSON in response
match json(path, quarantined).await {
// Otherwise, grab the JSON output from the request
- Ok(res) => {
+ Ok(response) => {
// Parse the JSON into Post and Comment structs
- let post = parse_post(&res[0]).await;
- let comments = parse_comments(&res[1], &post.permalink, &post.author.name, highlighted_comment).await;
+ let post = parse_post(&response[0]).await;
+ let comments = parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment);
// Use the Post and Comment structs to generate a website to show users
template(PostTemplate {
@@ -151,79 +149,71 @@ async fn parse_post(json: &serde_json::Value) -> Post {
}
// COMMENTS
-#[async_recursion]
-async fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str) -> Vec {
- // Separate the comment JSON into a Vector of comments
- let comment_data = match json["data"]["children"].as_array() {
- Some(f) => f.to_owned(),
- None => Vec::new(),
- };
-
- let mut comments: Vec = Vec::new();
+fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str) -> Vec {
+ // Parse the comment JSON into a Vector of Comments
+ let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned);
// For each comment, retrieve the values to build a Comment object
- for comment in comment_data {
- let kind = comment["kind"].as_str().unwrap_or_default().to_string();
- let data = &comment["data"];
-
- let unix_time = data["created_utc"].as_f64().unwrap_or_default();
- let (rel_time, created) = time(unix_time);
-
- let edited = match data["edited"].as_f64() {
- Some(stamp) => time(stamp),
- None => (String::new(), String::new()),
- };
-
- let score = data["score"].as_i64().unwrap_or(0);
- let body = rewrite_urls(&val(&comment, "body_html"));
-
- // If this comment contains replies, handle those too
- let replies: Vec = if data["replies"].is_object() {
- parse_comments(&data["replies"], post_link, post_author, highlighted_comment).await
- } else {
- Vec::new()
- };
-
- let parent_kind_and_id = val(&comment, "parent_id");
- let parent_info = parent_kind_and_id.split('_').collect::>();
-
- let id = val(&comment, "id");
- let highlighted = id == highlighted_comment;
-
- comments.push(Comment {
- id,
- kind,
- parent_id: parent_info[1].to_string(),
- parent_kind: parent_info[0].to_string(),
- post_link: post_link.to_string(),
- post_author: post_author.to_string(),
- body,
- author: Author {
- name: val(&comment, "author"),
- flair: Flair {
- flair_parts: FlairPart::parse(
- data["author_flair_type"].as_str().unwrap_or_default(),
- data["author_flair_richtext"].as_array(),
- data["author_flair_text"].as_str(),
- ),
- text: esc!(&comment, "link_flair_text"),
- background_color: val(&comment, "author_flair_background_color"),
- foreground_color: val(&comment, "author_flair_text_color"),
- },
- distinguished: val(&comment, "distinguished"),
- },
- score: if data["score_hidden"].as_bool().unwrap_or_default() {
- ("\u{2022}".to_string(), "Hidden".to_string())
- } else {
- format_num(score)
- },
- rel_time,
- created,
- edited,
- replies,
- highlighted,
- });
- }
-
comments
+ .into_iter()
+ .map(|comment| {
+ let kind = comment["kind"].as_str().unwrap_or_default().to_string();
+ let data = &comment["data"];
+
+ let unix_time = data["created_utc"].as_f64().unwrap_or_default();
+ let (rel_time, created) = time(unix_time);
+
+ let edited = data["edited"].as_f64().map_or((String::new(), String::new()), time);
+
+ let score = data["score"].as_i64().unwrap_or(0);
+ let body = rewrite_urls(&val(&comment, "body_html"));
+
+ // If this comment contains replies, handle those too
+ let replies: Vec = if data["replies"].is_object() {
+ parse_comments(&data["replies"], post_link, post_author, highlighted_comment)
+ } else {
+ Vec::new()
+ };
+
+ let parent_kind_and_id = val(&comment, "parent_id");
+ let parent_info = parent_kind_and_id.split('_').collect::>();
+
+ let id = val(&comment, "id");
+ let highlighted = id == highlighted_comment;
+
+ Comment {
+ id,
+ kind,
+ parent_id: parent_info[1].to_string(),
+ parent_kind: parent_info[0].to_string(),
+ post_link: post_link.to_string(),
+ post_author: post_author.to_string(),
+ body,
+ author: Author {
+ name: val(&comment, "author"),
+ flair: Flair {
+ flair_parts: FlairPart::parse(
+ data["author_flair_type"].as_str().unwrap_or_default(),
+ data["author_flair_richtext"].as_array(),
+ data["author_flair_text"].as_str(),
+ ),
+ text: esc!(&comment, "link_flair_text"),
+ background_color: val(&comment, "author_flair_background_color"),
+ foreground_color: val(&comment, "author_flair_text_color"),
+ },
+ distinguished: val(&comment, "distinguished"),
+ },
+ score: if data["score_hidden"].as_bool().unwrap_or_default() {
+ ("\u{2022}".to_string(), "Hidden".to_string())
+ } else {
+ format_num(score)
+ },
+ rel_time,
+ created,
+ edited,
+ replies,
+ highlighted,
+ }
+ })
+ .collect()
}
diff --git a/src/search.rs b/src/search.rs
index edf4e3c..41bb763 100644
--- a/src/search.rs
+++ b/src/search.rs
@@ -52,10 +52,8 @@ pub async fn find(req: Request) -> Result, String> {
let sort = param(&path, "sort").unwrap_or_else(|| "relevance".to_string());
- let subreddits = match param(&path, "restrict_sr") {
- None => search_subreddits(&query).await,
- Some(_) => Vec::new(),
- };
+ // If search is not restricted to this subreddit, show other subreddits in search results
+ let subreddits = param(&path, "restrict_sr").map_or(search_subreddits(&query).await, |_| Vec::new());
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
@@ -90,35 +88,25 @@ async fn search_subreddits(q: &str) -> Vec {
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit=3", q.replace(' ', "+"));
// Send a request to the url
- match json(subreddit_search_path, false).await {
- // If success, receive JSON in response
- Ok(response) => {
- match response["data"]["children"].as_array() {
- // For each subreddit from subreddit list
- Some(list) => list
- .iter()
- .map(|subreddit| {
- // Fetch subreddit icon either from the community_icon or icon_img value
- let community_icon: &str = subreddit["data"]["community_icon"].as_str().map_or("", |s| s.split('?').collect::>()[0]);
- let icon = if community_icon.is_empty() {
- val(&subreddit, "icon_img")
- } else {
- community_icon.to_string()
- };
+ json(subreddit_search_path, false).await.unwrap_or_default()["data"]["children"]
+ .as_array()
+ .map(ToOwned::to_owned)
+ .unwrap_or_default()
+ .iter()
+ .map(|subreddit| {
+ // For each subreddit from subreddit list
+ // Fetch subreddit icon either from the community_icon or icon_img value
+ let icon = subreddit["data"]["community_icon"]
+ .as_str()
+ .map_or_else(|| val(&subreddit, "icon_img"), ToString::to_string);
- Subreddit {
- name: val(subreddit, "display_name_prefixed"),
- url: val(subreddit, "url"),
- icon: format_url(&icon),
- description: val(subreddit, "public_description"),
- subscribers: format_num(subreddit["data"]["subscribers"].as_f64().unwrap_or_default() as i64),
- }
- })
- .collect::>(),
- _ => Vec::new(),
+ Subreddit {
+ name: val(subreddit, "display_name_prefixed"),
+ url: val(subreddit, "url"),
+ icon: format_url(&icon),
+ description: val(subreddit, "public_description"),
+ subscribers: format_num(subreddit["data"]["subscribers"].as_f64().unwrap_or_default() as i64),
}
- }
- // If the Reddit API returns an error, exit this function
- _ => Vec::new(),
- }
+ })
+ .collect::>()
}
diff --git a/src/server.rs b/src/server.rs
index 90b04de..05a7b10 100644
--- a/src/server.rs
+++ b/src/server.rs
@@ -69,29 +69,31 @@ impl RequestExt for Request {
}
fn cookies(&self) -> Vec {
- let mut cookies = Vec::new();
- if let Some(header) = self.headers().get("Cookie") {
- for cookie in header.to_str().unwrap_or_default().split("; ") {
- cookies.push(Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named("")));
- }
- }
- cookies
+ self.headers().get("Cookie").map_or(Vec::new(), |header| {
+ header
+ .to_str()
+ .unwrap_or_default()
+ .split("; ")
+ .map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named("")))
+ .collect()
+ })
}
fn cookie(&self, name: &str) -> Option {
- self.cookies().iter().find(|c| c.name() == name).map(std::borrow::ToOwned::to_owned)
+ self.cookies().into_iter().find(|c| c.name() == name)
}
}
impl ResponseExt for Response {
fn cookies(&self) -> Vec {
- let mut cookies = Vec::new();
- for header in self.headers().get_all("Cookie") {
- if let Ok(cookie) = Cookie::parse(header.to_str().unwrap_or_default()) {
- cookies.push(cookie);
- }
- }
- cookies
+ self.headers().get("Cookie").map_or(Vec::new(), |header| {
+ header
+ .to_str()
+ .unwrap_or_default()
+ .split("; ")
+ .map(|cookie| Cookie::parse(cookie).unwrap_or_else(|_| Cookie::named("")))
+ .collect()
+ })
}
fn insert_cookie(&mut self, cookie: Cookie) {
@@ -144,6 +146,7 @@ impl Server {
pub fn listen(self, addr: String) -> Boxed> {
let make_svc = make_service_fn(move |_conn| {
+ // For correct borrowing, these values need to be borrowed
let router = self.router.clone();
let default_headers = self.default_headers.clone();
@@ -159,7 +162,7 @@ impl Server {
let mut path = req.uri().path().replace("//", "/");
// Remove trailing slashes
- if path.ends_with('/') && path != "/" {
+ if path != "/" && path.ends_with('/') {
path.pop();
}
@@ -198,17 +201,15 @@ impl Server {
}
});
+ // Build SocketAddr from provided address
let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {} as address (example format: 0.0.0.0:8080)", addr));
- let server = HyperServer::bind(address).serve(make_svc);
+ // Bind server to address specified above. Gracefully shut down if CTRL+C is pressed
+ let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async {
+ // Wait for the CTRL+C signal
+ tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler")
+ });
- let graceful = server.with_graceful_shutdown(shutdown_signal());
-
- graceful.boxed()
+ server.boxed()
}
}
-
-async fn shutdown_signal() {
- // Wait for the CTRL+C signal
- tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler");
-}
diff --git a/src/settings.rs b/src/settings.rs
index 3e80e9c..9f2439e 100644
--- a/src/settings.rs
+++ b/src/settings.rs
@@ -43,12 +43,12 @@ pub async fn set(req: Request) -> Result, String> {
let (parts, mut body) = req.into_parts();
// Grab existing cookies
- let mut cookies = Vec::new();
- for header in parts.headers.get_all("Cookie") {
- if let Ok(cookie) = Cookie::parse(header.to_str().unwrap_or_default()) {
- cookies.push(cookie);
- }
- }
+ let _cookies: Vec = parts
+ .headers
+ .get_all("Cookie")
+ .iter()
+ .filter_map(|header| Cookie::parse(header.to_str().unwrap_or_default()).ok())
+ .collect();
// Aggregate the body...
// let whole_body = hyper::body::aggregate(req).await.map_err(|e| e.to_string())?;
@@ -62,22 +62,22 @@ pub async fn set(req: Request) -> Result, String> {
let form = url::form_urlencoded::parse(&body_bytes).collect::>();
- let mut res = redirect("/settings".to_string());
+ let mut response = redirect("/settings".to_string());
for &name in &PREFS {
match form.get(name) {
- Some(value) => res.insert_cookie(
+ Some(value) => response.insert_cookie(
Cookie::build(name.to_owned(), value.to_owned())
.path("/")
.http_only(true)
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
.finish(),
),
- None => res.remove_cookie(name.to_string()),
+ None => response.remove_cookie(name.to_string()),
};
}
- Ok(res)
+ Ok(response)
}
fn set_cookies_method(req: Request, remove_cookies: bool) -> Response {
@@ -85,12 +85,12 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response = parts
+ .headers
+ .get_all("Cookie")
+ .iter()
+ .filter_map(|header| Cookie::parse(header.to_str().unwrap_or_default()).ok())
+ .collect();
let query = parts.uri.query().unwrap_or_default().as_bytes();
@@ -101,11 +101,11 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response "/".to_string(),
};
- let mut res = redirect(path);
+ let mut response = redirect(path);
for name in [PREFS.to_vec(), vec!["subscriptions"]].concat() {
match form.get(name) {
- Some(value) => res.insert_cookie(
+ Some(value) => response.insert_cookie(
Cookie::build(name.to_owned(), value.to_owned())
.path("/")
.http_only(true)
@@ -114,13 +114,13 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response {
if remove_cookies {
- res.remove_cookie(name.to_string())
+ response.remove_cookie(name.to_string())
}
}
};
}
- res
+ response
}
// Set cookies using response "Set-Cookie" header
diff --git a/src/subreddit.rs b/src/subreddit.rs
index 3909451..7027a76 100644
--- a/src/subreddit.rs
+++ b/src/subreddit.rs
@@ -133,15 +133,15 @@ pub fn quarantine(req: Request, sub: String) -> Result, Str
pub async fn add_quarantine_exception(req: Request) -> Result, String> {
let subreddit = req.param("sub").ok_or("Invalid URL")?;
let redir = param(&format!("?{}", req.uri().query().unwrap_or_default()), "redir").ok_or("Invalid URL")?;
- let mut res = redirect(redir);
- res.insert_cookie(
+ let mut response = redirect(redir);
+ response.insert_cookie(
Cookie::build(&format!("allow_quaran_{}", subreddit.to_lowercase()), "true")
.path("/")
.http_only(true)
.expires(cookie::Expiration::Session)
.finish(),
);
- Ok(res)
+ Ok(response)
}
pub fn can_access_quarantine(req: &Request, sub: &str) -> bool {
@@ -211,13 +211,13 @@ pub async fn subscriptions(req: Request) -> Result, String>
format!("/r/{}", sub)
};
- let mut res = redirect(path);
+ let mut response = redirect(path);
// Delete cookie if empty, else set
if sub_list.is_empty() {
- res.remove_cookie("subscriptions".to_string());
+ response.remove_cookie("subscriptions".to_string());
} else {
- res.insert_cookie(
+ response.insert_cookie(
Cookie::build("subscriptions", sub_list.join("+"))
.path("/")
.http_only(true)
@@ -226,7 +226,7 @@ pub async fn subscriptions(req: Request) -> Result, String>
);
}
- Ok(res)
+ Ok(response)
}
pub async fn wiki(req: Request) -> Result, String> {
@@ -260,6 +260,7 @@ pub async fn wiki(req: Request) -> Result, String> {
pub async fn sidebar(req: Request) -> Result, String> {
let sub = req.param("sub").unwrap_or_else(|| "reddit.com".to_string());
let quarantined = can_access_quarantine(&req, &sub);
+
// Handle random subreddits
if let Ok(random) = catch_random(&sub, "/about/sidebar").await {
return Ok(random);
@@ -307,10 +308,9 @@ async fn moderators_list(sub: &str, quarantined: bool) -> Result, St
let path: String = format!("/r/{}/about/moderators.json?raw_json=1", sub);
// Retrieve response
- let response = json(path, quarantined).await?["data"]["children"].clone();
- Ok(
+ json(path, quarantined).await.map(|response| {
// Traverse json tree and format into list of strings
- response
+ response["data"]["children"]
.as_array()
.unwrap_or(&Vec::new())
.iter()
@@ -322,8 +322,8 @@ async fn moderators_list(sub: &str, quarantined: bool) -> Result, St
Some(name.to_string())
}
})
- .collect::>(),
- )
+ .collect::>()
+ })
}
// SUBREDDIT
@@ -332,32 +332,25 @@ async fn subreddit(sub: &str, quarantined: bool) -> Result {
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
// Send a request to the url
- match json(path, quarantined).await {
- // If success, receive JSON in response
- Ok(res) => {
- // Metadata regarding the subreddit
- let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
- let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
+ let res = json(path, quarantined).await?;
- // Fetch subreddit icon either from the community_icon or icon_img value
- let community_icon: &str = res["data"]["community_icon"].as_str().unwrap_or_default();
- let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
+ // Metadata regarding the subreddit
+ let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
+ let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
- let sub = Subreddit {
- name: esc!(&res, "display_name"),
- title: esc!(&res, "title"),
- description: esc!(&res, "public_description"),
- info: rewrite_urls(&val(&res, "description_html").replace("\\", "")),
- moderators: moderators_list(sub, quarantined).await?,
- icon: format_url(&icon),
- members: format_num(members),
- active: format_num(active),
- wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
- };
+ // Fetch subreddit icon either from the community_icon or icon_img value
+ let community_icon: &str = res["data"]["community_icon"].as_str().unwrap_or_default();
+ let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
- Ok(sub)
- }
- // If the Reddit API returns an error, exit this function
- Err(msg) => return Err(msg),
- }
+ Ok(Subreddit {
+ name: esc!(&res, "display_name"),
+ title: esc!(&res, "title"),
+ description: esc!(&res, "public_description"),
+ info: rewrite_urls(&val(&res, "description_html").replace("\\", "")),
+ moderators: moderators_list(sub, quarantined).await?,
+ icon: format_url(&icon),
+ members: format_num(members),
+ active: format_num(active),
+ wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
+ })
}
diff --git a/src/user.rs b/src/user.rs
index 199653a..b02e37e 100644
--- a/src/user.rs
+++ b/src/user.rs
@@ -61,27 +61,22 @@ async fn user(name: &str) -> Result {
let path: String = format!("/user/{}/about.json?raw_json=1", name);
// Send a request to the url
- match json(path, false).await {
- // If success, receive JSON in response
- Ok(res) => {
- // Grab creation date as unix timestamp
- let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
+ json(path, false).await.map(|res| {
+ // Grab creation date as unix timestamp
+ let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
- // Closure used to parse JSON from Reddit APIs
- let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string();
+ // Closure used to parse JSON from Reddit APIs
+ let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string();
- // Parse the JSON output into a User struct
- Ok(User {
- name: name.to_string(),
- title: esc!(about("title")),
- icon: format_url(&about("icon_img")),
- karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
- created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"),
- banner: esc!(about("banner_img")),
- description: about("public_description"),
- })
+ // Parse the JSON output into a User struct
+ User {
+ name: name.to_string(),
+ title: esc!(about("title")),
+ icon: format_url(&about("icon_img")),
+ karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
+ created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"),
+ banner: esc!(about("banner_img")),
+ description: about("public_description"),
}
- // If the Reddit API returns an error, exit this function
- Err(msg) => return Err(msg),
- }
+ })
}
diff --git a/src/utils.rs b/src/utils.rs
index 8147815..bb16fdb 100644
--- a/src/utils.rs
+++ b/src/utils.rs
@@ -463,83 +463,71 @@ pub fn format_url(url: &str) -> String {
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
String::new()
} else {
- match Url::parse(url) {
- Ok(parsed) => {
- let domain = parsed.domain().unwrap_or_default();
+ Url::parse(url).map_or(String::new(), |parsed| {
+ let domain = parsed.domain().unwrap_or_default();
- let capture = |regex: &str, format: &str, segments: i16| {
- Regex::new(regex)
- .map(|re| match re.captures(url) {
- Some(caps) => match segments {
- 1 => [format, &caps[1]].join(""),
- 2 => [format, &caps[1], "/", &caps[2]].join(""),
- _ => String::new(),
- },
- None => String::new(),
- })
- .unwrap_or_default()
+ let capture = |regex: &str, format: &str, segments: i16| {
+ Regex::new(regex).map_or(String::new(), |re| {
+ re.captures(url).map_or(String::new(), |caps| match segments {
+ 1 => [format, &caps[1]].join(""),
+ 2 => [format, &caps[1], "/", &caps[2]].join(""),
+ _ => String::new(),
+ })
+ })
+ };
+
+ macro_rules! chain {
+ () => {
+ {
+ String::new()
+ }
};
- macro_rules! chain {
- () => {
- {
- String::new()
+ ( $first_fn:expr, $($other_fns:expr), *) => {
+ {
+ let result = $first_fn;
+ if result.is_empty() {
+ chain!($($other_fns,)*)
}
- };
-
- ( $first_fn:expr, $($other_fns:expr), *) => {
+ else
{
- let result = $first_fn;
- if result.is_empty() {
- chain!($($other_fns,)*)
- }
- else
- {
- result
- }
+ result
}
- };
- }
-
- match domain {
- "v.redd.it" => chain!(
- capture(r"https://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$))", "/vid/", 2),
- capture(r"https://v\.redd\.it/(.+)/(HLSPlaylist\.m3u8.*)$", "/hls/", 2)
- ),
- "i.redd.it" => capture(r"https://i\.redd\.it/(.*)", "/img/", 1),
- "a.thumbs.redditmedia.com" => capture(r"https://a\.thumbs\.redditmedia\.com/(.*)", "/thumb/a/", 1),
- "b.thumbs.redditmedia.com" => capture(r"https://b\.thumbs\.redditmedia\.com/(.*)", "/thumb/b/", 1),
- "emoji.redditmedia.com" => capture(r"https://emoji\.redditmedia\.com/(.*)/(.*)", "/emoji/", 2),
- "preview.redd.it" => capture(r"https://preview\.redd\.it/(.*)", "/preview/pre/", 1),
- "external-preview.redd.it" => capture(r"https://external\-preview\.redd\.it/(.*)", "/preview/external-pre/", 1),
- "styles.redditmedia.com" => capture(r"https://styles\.redditmedia\.com/(.*)", "/style/", 1),
- "www.redditstatic.com" => capture(r"https://www\.redditstatic\.com/(.*)", "/static/", 1),
- _ => String::new(),
- }
+ }
+ };
}
- Err(_) => String::new(),
- }
+
+ match domain {
+ "v.redd.it" => chain!(
+ capture(r"https://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$))", "/vid/", 2),
+ capture(r"https://v\.redd\.it/(.+)/(HLSPlaylist\.m3u8.*)$", "/hls/", 2)
+ ),
+ "i.redd.it" => capture(r"https://i\.redd\.it/(.*)", "/img/", 1),
+ "a.thumbs.redditmedia.com" => capture(r"https://a\.thumbs\.redditmedia\.com/(.*)", "/thumb/a/", 1),
+ "b.thumbs.redditmedia.com" => capture(r"https://b\.thumbs\.redditmedia\.com/(.*)", "/thumb/b/", 1),
+ "emoji.redditmedia.com" => capture(r"https://emoji\.redditmedia\.com/(.*)/(.*)", "/emoji/", 2),
+ "preview.redd.it" => capture(r"https://preview\.redd\.it/(.*)", "/preview/pre/", 1),
+ "external-preview.redd.it" => capture(r"https://external\-preview\.redd\.it/(.*)", "/preview/external-pre/", 1),
+ "styles.redditmedia.com" => capture(r"https://styles\.redditmedia\.com/(.*)", "/style/", 1),
+ "www.redditstatic.com" => capture(r"https://www\.redditstatic\.com/(.*)", "/static/", 1),
+ _ => String::new(),
+ }
+ })
}
}
// Rewrite Reddit links to Libreddit in body of text
pub fn rewrite_urls(input_text: &str) -> String {
- let text1 = match Regex::new(r#"href="(https|http|)://(www.|old.|np.|amp.|)(reddit).(com)/"#) {
- Ok(re) => re.replace_all(input_text, r#"href="/"#).to_string(),
- Err(_) => String::new(),
- };
+ let text1 = Regex::new(r#"href="(https|http|)://(www.|old.|np.|amp.|)(reddit).(com)/"#).map_or(String::new(), |re| re.replace_all(input_text, r#"href="/"#).to_string());
// Rewrite external media previews to Libreddit
- match Regex::new(r"https://external-preview\.redd\.it(.*)[^?]") {
- Ok(re) => {
- if re.is_match(&text1) {
- re.replace_all(&text1, format_url(re.find(&text1).map(|x| x.as_str()).unwrap_or_default())).to_string()
- } else {
- text1
- }
+ Regex::new(r"https://external-preview\.redd\.it(.*)[^?]").map_or(String::new(), |re| {
+ if re.is_match(&text1) {
+ re.replace_all(&text1, format_url(re.find(&text1).map(|x| x.as_str()).unwrap_or_default())).to_string()
+ } else {
+ text1
}
- Err(_) => String::new(),
- }
+ })
}
// Append `m` and `k` for millions and thousands respectively