diff --git a/Cargo.lock b/Cargo.lock index df32cb1..d041577 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -31,7 +31,7 @@ dependencies = [ "futures-util", "http", "log", - "rustls 0.18.1", + "rustls", "tokio-rustls", "trust-dns-proto", "trust-dns-resolver", @@ -193,10 +193,10 @@ dependencies = [ "actix-service", "actix-utils", "futures-util", - "rustls 0.18.1", + "rustls", "tokio-rustls", "webpki", - "webpki-roots 0.20.0", + "webpki-roots", ] [[package]] @@ -249,7 +249,7 @@ dependencies = [ "mime", "pin-project 1.0.4", "regex", - "rustls 0.18.1", + "rustls", "serde", "serde_json", "serde_urlencoded", @@ -393,7 +393,7 @@ dependencies = [ "mime", "percent-encoding", "rand", - "rustls 0.18.1", + "rustls", "serde", "serde_json", "serde_urlencoded", @@ -529,12 +529,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "chunked_transfer" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7477065d45a8fe57167bf3cf8bcd3729b54cfcb81cca49bda2d038ea89ae82ca" - [[package]] name = "const_fn" version = "0.4.5" @@ -678,9 +672,9 @@ checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" [[package]] name = "futures" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c70be434c505aee38639abccb918163b63158a4b4bb791b45b7023044bdc3c9c" +checksum = "309f13e3f4be6d5917178c84db67c0b9a09177ac16d4f9a7313a767a68adaa77" dependencies = [ "futures-channel", "futures-core", @@ -692,9 +686,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f01c61843314e95f96cc9245702248733a3a3d744e43e2e755e3c7af8348a0a9" +checksum = "7a3b03bd32f6ec7885edeb99acd1e47e20e34fd4dfd3c6deed6fcac8a9d28f6a" dependencies = [ "futures-core", "futures-sink", @@ -702,21 +696,21 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db8d3b0917ff63a2a96173133c02818fac4a746b0a57569d3baca9ec0e945e08" +checksum = "ed8aeae2b6ab243ebabe6f54cd4cf53054d98883d5d326128af7d57a9ca5cd3d" [[package]] name = "futures-io" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e37c1a51b037b80922864b8eed90692c5cd8abd4c71ce49b77146caa47f3253b" +checksum = "d41234e71d5e8ca73d01563974ef6f50e516d71e18f1a2f1184742e31f5d469f" [[package]] name = "futures-macro" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f8719ca0e1f3c5e34f3efe4570ef2c0610ca6da85ae7990d472e9cbfba13664" +checksum = "3520e0eb4e704e88d771b92d51273ee212997f0d8282f17f5d8ff1cb39104e42" dependencies = [ "proc-macro-hack", "proc-macro2", @@ -726,24 +720,24 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6adabac1290109cfa089f79192fb6244ad2c3f1cc2281f3e1dd987592b71feb" +checksum = "c72d188479368953c6c8c7140e40d7a4401674ab3b98a41e60e515d6cbdbe5de" [[package]] name = "futures-task" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92a0843a2ff66823a8f7c77bffe9a09be2b64e533562c412d63075643ec0038" +checksum = "08944cea9021170d383287169859c0ca8147d9ec285978393109954448f33cc7" dependencies = [ "once_cell", ] [[package]] name = "futures-util" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "036a2107cdeb57f6d7322f1b6c363dad67cd63ca3b7d1b925bdf75bd5d96cda9" +checksum = "d3dd206efbe2ca683b2ce138ccdf61e1b0a63f5816dcedc9d8654c500ba0cea6" dependencies = [ "futures-channel", "futures-core", @@ -992,7 +986,6 @@ dependencies = [ "serde", "serde_json", "time", - "ureq", "url", ] @@ -1268,9 +1261,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" [[package]] name = "proc-macro-nested" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eba180dafb9038b050a4c280019bbedf9f2467b61e5d892dcad585bb57aadc5a" +checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" [[package]] name = "proc-macro2" @@ -1420,19 +1413,6 @@ dependencies = [ "webpki", ] -[[package]] -name = "rustls" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "064fd21ff87c6e87ed4506e68beb42459caa4a0e2eb144932e6776768556980b" -dependencies = [ - "base64 0.13.0", - "log", - "ring", - "sct", - "webpki", -] - [[package]] name = "ryu" version = "1.0.5" @@ -1769,7 +1749,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e12831b255bcfa39dc0436b01e19fea231a37db570686c06ee72c423479f889a" dependencies = [ "futures-core", - "rustls 0.18.1", + "rustls", "tokio", "webpki", ] @@ -1910,22 +1890,6 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" -[[package]] -name = "ureq" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96014ded8c85822677daee4f909d18acccca744810fd4f8ffc492c284f2324bc" -dependencies = [ - "base64 0.13.0", - "chunked_transfer", - "log", - "once_cell", - "rustls 0.19.0", - "url", - "webpki", - "webpki-roots 0.21.0", -] - [[package]] name = "url" version = "2.2.0" @@ -2033,15 +1997,6 @@ dependencies = [ "webpki", ] -[[package]] -name = "webpki-roots" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82015b7e0b8bad8185994674a13a93306bea76cf5a16c5a181382fd3a5ec2376" -dependencies = [ - "webpki", -] - [[package]] name = "widestring" version = "0.4.3" diff --git a/Cargo.toml b/Cargo.toml index cc75352..e35d9e5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,6 @@ edition = "2018" [dependencies] base64 = "0.13.0" actix-web = { version = "3.3.2", features = ["rustls"] } -ureq = "2.0.1" askama = "0.10.5" serde = { version = "1.0.118", default_features = false, features = ["derive"] } serde_json = "1.0" diff --git a/src/main.rs b/src/main.rs index 0c3870f..e1ed067 100644 --- a/src/main.rs +++ b/src/main.rs @@ -18,7 +18,7 @@ async fn style() -> HttpResponse { async fn robots() -> HttpResponse { HttpResponse::Ok() .header("Cache-Control", "public, max-age=1209600, s-maxage=86400") - .body(include_str!("../static/robots.txt")) + .body("User-agent: *\nAllow: /") } async fn favicon() -> HttpResponse { @@ -42,7 +42,7 @@ async fn main() -> std::io::Result<()> { match arg.split('=').collect::>()[0] { "--address" | "-a" => address = arg.split('=').collect::>()[1].to_string(), // "--redirect-https" | "-r" => https = true, - _ => {} + _ => (), } } @@ -51,55 +51,61 @@ async fn main() -> std::io::Result<()> { HttpServer::new(|| { App::new() - // REDIRECT TO HTTPS - // .wrap(middleware::DefaultHeaders::new().header("Strict-Transport-Security", "max-age=31536000")) - // .wrap_fn(|req, srv| { - // let fut = srv.call(req); - // async { - // let mut res = fut.await?; - // if https { - // res.headers_mut().insert( - // actix_web::http::header::STRICT_TRANSPORT_SECURITY, actix_web::http::HeaderValue::from_static("max-age=31536000;"), - // ); - // } - // Ok(res) - // } - // }) - // TRAILING SLASH MIDDLEWARE + // Redirect to HTTPS + // .wrap_fn(|req, srv| { let fut = srv.call(req); async { let mut res = fut.await?; if https {} Ok(res) } }) + // Append trailing slash and remove double slashes .wrap(middleware::NormalizePath::default()) - // DEFAULT SERVICE + // Default service in case no routes match .default_service(web::get().to(|| utils::error("Nothing here".to_string()))) - // GENERAL SERVICES + // Read static files .route("/style.css/", web::get().to(style)) .route("/favicon.ico/", web::get().to(favicon)) .route("/thumbnail.svg/", web::get().to(thumbnail)) .route("/robots.txt/", web::get().to(robots)) - // SETTINGS SERVICE - .route("/settings/", web::get().to(settings::get)) - .route("/settings/", web::post().to(settings::set)) - // PROXY SERVICE + // Proxy media through Libreddit .route("/proxy/{url:.*}/", web::get().to(proxy::handler)) - // SEARCH SERVICES - .route("/search/", web::get().to(search::find)) - .route("r/{sub}/search/", web::get().to(search::find)) - // USER SERVICES - .route("/u/{username}/", web::get().to(user::profile)) - .route("/user/{username}/", web::get().to(user::profile)) - // WIKI SERVICES - .route("/wiki/", web::get().to(subreddit::wiki)) - .route("/wiki/{page}/", web::get().to(subreddit::wiki)) - .route("/r/{sub}/wiki/", web::get().to(subreddit::wiki)) - .route("/r/{sub}/wiki/{page}/", web::get().to(subreddit::wiki)) - // SUBREDDIT SERVICES - .route("/r/{sub}/", web::get().to(subreddit::page)) - .route("/r/{sub}/{sort:hot|new|top|rising|controversial}/", web::get().to(subreddit::page)) - // POPULAR SERVICES - .route("/", web::get().to(subreddit::page)) - .route("/{sort:best|hot|new|top|rising|controversial}/", web::get().to(subreddit::page)) - // POST SERVICES - .route("/{id:.{5,6}}/", web::get().to(post::item)) - .route("/r/{sub}/comments/{id}/{title}/", web::get().to(post::item)) - .route("/r/{sub}/comments/{id}/{title}/{comment_id}/", web::get().to(post::item)) + // Browse user profile + .route("/{scope:u|user}/{username}/", web::get().to(user::profile)) + // Configure settings + .service(web::resource("/settings/").route(web::get().to(settings::get)).route(web::post().to(settings::set))) + // Subreddit services + .service( + web::scope("/r/{sub}") + // See posts and info about subreddit + .route("/", web::get().to(subreddit::page)) + .route("/{sort:hot|new|top|rising|controversial}/", web::get().to(subreddit::page)) + // View post on subreddit + .service( + web::scope("/comments/{id}/{title}") + .route("/", web::get().to(post::item)) + .route("/{comment_id}/", web::get().to(post::item)), + ) + // Search inside subreddit + .route("/search/", web::get().to(search::find)) + // View wiki of subreddit + .service( + web::scope("/wiki") + .route("/", web::get().to(subreddit::wiki)) + .route("/{page}/", web::get().to(subreddit::wiki)), + ), + ) + // Universal services + .service( + web::scope("") + // Front page + .route("/", web::get().to(subreddit::page)) + .route("/{sort:best|hot|new|top|rising|controversial}/", web::get().to(subreddit::page)) + // View Reddit wiki + .service( + web::scope("/wiki") + .route("/", web::get().to(subreddit::wiki)) + .route("/{page}/", web::get().to(subreddit::wiki)), + ) + // Search all of Reddit + .route("/search/", web::get().to(search::find)) + // Short link for post + .route("/{id:.{5,6}}/", web::get().to(post::item)), + ) }) .bind(&address) .unwrap_or_else(|e| panic!("Cannot bind to the address {}: {}", address, e)) diff --git a/src/post.rs b/src/post.rs index adc4708..5850fa8 100644 --- a/src/post.rs +++ b/src/post.rs @@ -1,5 +1,5 @@ // CRATES -use crate::utils::{cookie, error, format_num, format_url, media, parse_rich_flair, param, prefs, request, rewrite_url, val, Comment, Flags, Flair, Post, Preferences}; +use crate::utils::{cookie, error, format_num, format_url, media, param, parse_rich_flair, prefs, request, rewrite_url, val, Comment, Flags, Flair, Post, Preferences}; use actix_web::{HttpRequest, HttpResponse}; use async_recursion::async_recursion; @@ -57,7 +57,7 @@ pub async fn item(req: HttpRequest) -> HttpResponse { HttpResponse::Ok().content_type("text/html").body(s) } // If the Reddit API returns an error, exit and send error page to user - Err(msg) => error(msg.to_string()).await, + Err(msg) => error(msg).await, } } @@ -82,8 +82,12 @@ async fn parse_post(json: &serde_json::Value) -> Post { community: val(post, "subreddit"), body: rewrite_url(&val(post, "selftext_html")), author: val(post, "author"), - author_flair: Flair{ - flair_parts: parse_rich_flair(val(post, "author_flair_type"), post["data"]["author_flair_richtext"].as_array(), post["data"]["author_flair_text"].as_str()), + author_flair: Flair { + flair_parts: parse_rich_flair( + val(post, "author_flair_type"), + post["data"]["author_flair_richtext"].as_array(), + post["data"]["author_flair_text"].as_str(), + ), background_color: val(post, "author_flair_background_color"), foreground_color: val(post, "author_flair_text_color"), }, @@ -92,8 +96,12 @@ async fn parse_post(json: &serde_json::Value) -> Post { upvote_ratio: ratio as i64, post_type, thumbnail: format_url(val(post, "thumbnail").as_str()), - flair: Flair{ - flair_parts: parse_rich_flair(val(post, "link_flair_type"), post["data"]["link_flair_richtext"].as_array(), post["data"]["link_flair_text"].as_str()), + flair: Flair { + flair_parts: parse_rich_flair( + val(post, "link_flair_type"), + post["data"]["link_flair_richtext"].as_array(), + post["data"]["link_flair_text"].as_str(), + ), background_color: val(post, "link_flair_background_color"), foreground_color: if val(post, "link_flair_text_color") == "dark" { "black".to_string() @@ -145,8 +153,12 @@ async fn parse_comments(json: &serde_json::Value) -> Vec { score: format_num(score), time: OffsetDateTime::from_unix_timestamp(unix_time).format("%b %d %Y %H:%M UTC"), replies, - flair: Flair{ - flair_parts: parse_rich_flair(val(&comment, "author_flair_type"), comment["data"]["author_flair_richtext"].as_array(), comment["data"]["author_flair_text"].as_str()), + flair: Flair { + flair_parts: parse_rich_flair( + val(&comment, "author_flair_type"), + comment["data"]["author_flair_richtext"].as_array(), + comment["data"]["author_flair_text"].as_str(), + ), background_color: val(&comment, "author_flair_background_color"), foreground_color: val(&comment, "author_flair_text_color"), }, diff --git a/src/proxy.rs b/src/proxy.rs index c874b1e..bca2b0b 100644 --- a/src/proxy.rs +++ b/src/proxy.rs @@ -41,9 +41,9 @@ pub async fn handler(web::Path(b64): web::Path) -> Result Err(error::ErrorForbidden("Resource must be from Reddit")) } } - Err(_) => Err(error::ErrorBadRequest("Can't parse base64 into URL")), + _ => Err(error::ErrorBadRequest("Can't parse base64 into URL")), } } - Err(_) => Err(error::ErrorBadRequest("Can't decode base64")), + _ => Err(error::ErrorBadRequest("Can't decode base64")), } } diff --git a/src/search.rs b/src/search.rs index 583b832..5f43b83 100644 --- a/src/search.rs +++ b/src/search.rs @@ -90,6 +90,6 @@ pub async fn find(req: HttpRequest) -> HttpResponse { .render() .unwrap(), ), - Err(msg) => error(msg.to_string()).await, + Err(msg) => error(msg).await, } } diff --git a/src/subreddit.rs b/src/subreddit.rs index cda7404..7e68f46 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -58,33 +58,33 @@ pub async fn page(req: HttpRequest) -> HttpResponse { .unwrap(); HttpResponse::Ok().content_type("text/html").body(s) } - Err(msg) => error(msg.to_string()).await, + Err(msg) => error(msg).await, } } pub async fn wiki(req: HttpRequest) -> HttpResponse { - let sub = req.match_info().get("sub").unwrap_or("reddit.com"); - let page = req.match_info().get("page").unwrap_or("index"); + let sub = req.match_info().get("sub").unwrap_or("reddit.com").to_string(); + let page = req.match_info().get("page").unwrap_or("index").to_string(); let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page); match request(&path).await { Ok(res) => { let s = WikiTemplate { - sub: sub.to_string(), + sub, wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()), - page: page.to_string(), + page, prefs: prefs(req), } .render() .unwrap(); HttpResponse::Ok().content_type("text/html").body(s) } - Err(msg) => error(msg.to_string()).await, + Err(msg) => error(msg).await, } } // SUBREDDIT -async fn subreddit(sub: &str) -> Result { +async fn subreddit(sub: &str) -> Result { // Build the Reddit JSON API url let path: String = format!("/r/{}/about.json?raw_json=1", sub); diff --git a/src/user.rs b/src/user.rs index ea54f55..9a7502f 100644 --- a/src/user.rs +++ b/src/user.rs @@ -1,5 +1,5 @@ // CRATES -use crate::utils::{error, fetch_posts, format_url, nested_val, param, prefs, request, Post, Preferences, User}; +use crate::utils::{error, fetch_posts, format_url, param, prefs, request, Post, Preferences, User}; use actix_web::{HttpRequest, HttpResponse, Result}; use askama::Template; use time::OffsetDateTime; @@ -42,12 +42,12 @@ pub async fn profile(req: HttpRequest) -> HttpResponse { HttpResponse::Ok().content_type("text/html").body(s) } // If there is an error show error page - Err(msg) => error(msg.to_string()).await, + Err(msg) => error(msg).await, } } // USER -async fn user(name: &str) -> Result { +async fn user(name: &str) -> Result { // Build the Reddit JSON API path let path: String = format!("/user/{}/about.json", name); @@ -58,15 +58,18 @@ async fn user(name: &str) -> Result { // Grab creation date as unix timestamp let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64; + // nested_val function used to parse JSON from Reddit APIs + let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string(); + // Parse the JSON output into a User struct Ok(User { name: name.to_string(), - title: nested_val(&res, "subreddit", "title"), - icon: format_url(nested_val(&res, "subreddit", "icon_img").as_str()), + title: about("title"), + icon: format_url(about("icon_img").as_str()), karma: res["data"]["total_karma"].as_i64().unwrap_or(0), created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"), - banner: nested_val(&res, "subreddit", "banner_img"), - description: nested_val(&res, "subreddit", "public_description"), + banner: about("banner_img"), + description: about("public_description"), }) } // If the Reddit API returns an error, exit this function diff --git a/src/utils.rs b/src/utils.rs index 0ec409f..d6acf27 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -7,21 +7,21 @@ use base64::encode; use regex::Regex; use serde_json::{from_str, Value}; use std::collections::HashMap; -use time::{OffsetDateTime, Duration}; +use time::{Duration, OffsetDateTime}; use url::Url; // // STRUCTS // // Post flair with content, background color and foreground color -pub struct Flair{ - pub flair_parts: Vec, +pub struct Flair { + pub flair_parts: Vec, pub background_color: String, pub foreground_color: String, } -pub struct FlairPart{ - pub flair_part_type: String, +pub struct FlairPart { + pub flair_part_type: String, pub value: String, } @@ -101,7 +101,7 @@ pub struct Params { #[derive(Template)] #[template(path = "error.html", escape = "none")] pub struct ErrorTemplate { - pub message: String, + pub msg: String, pub prefs: Preferences, } @@ -169,7 +169,7 @@ pub fn format_num(num: i64) -> String { } } -pub async fn media(data: &serde_json::Value) -> (String, String) { +pub async fn media(data: &Value) -> (String, String) { let post_type: &str; let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() { post_type = "video"; @@ -183,11 +183,11 @@ pub async fn media(data: &serde_json::Value) -> (String, String) { Some(gif) => { post_type = "gif"; format_url(gif["source"]["url"].as_str().unwrap_or_default()) - }, + } None => { post_type = "image"; format_url(preview["source"]["url"].as_str().unwrap_or_default()) - }, + } } } else if data["is_self"].as_bool().unwrap_or_default() { post_type = "self"; @@ -201,37 +201,42 @@ pub async fn media(data: &serde_json::Value) -> (String, String) { } pub fn parse_rich_flair(flair_type: String, rich_flair: Option<&Vec>, text_flair: Option<&str>) -> Vec { - let mut result: Vec = Vec::new(); - if flair_type == "richtext" && !rich_flair.is_none() { - for part in rich_flair.unwrap() { - let flair_part_type = part["e"].as_str().unwrap_or_default().to_string(); - let value = if flair_part_type == "text" { - part["t"].as_str().unwrap_or_default().to_string() - - } else if flair_part_type == "emoji" { - format_url(part["u"].as_str().unwrap_or_default()) - } else { - "".to_string() - }; - result.push(FlairPart { - flair_part_type, - value, - }); - } - } else if flair_type == "text" && !text_flair.is_none() { - result.push(FlairPart { - flair_part_type: "text".to_string(), - value: text_flair.unwrap().to_string(), - }); + match flair_type.as_str() { + "richtext" => match rich_flair { + Some(rich) => rich + .iter() + .map(|part| { + let value = |name: &str| part[name].as_str().unwrap_or_default(); + FlairPart { + flair_part_type: value("e").to_string(), + value: match value("e") { + "text" => value("t").to_string(), + "emoji" => format_url(value("u")), + _ => String::new(), + }, + } + }) + .collect::>(), + None => Vec::new(), + }, + "text" => match text_flair { + Some(text) => vec![FlairPart { + flair_part_type: "text".to_string(), + value: text.to_string(), + }], + None => Vec::new(), + }, + _ => Vec::new(), } - result } pub fn time(unix_time: i64) -> String { let time = OffsetDateTime::from_unix_timestamp(unix_time); let time_delta = OffsetDateTime::now_utc() - time; - if time_delta > Duration::days(1) { + if time_delta > Duration::days(30) { time.format("%b %d '%y") // %b %e '%y + } else if time_delta.whole_days() > 0 { + format!("{}d ago", time_delta.whole_days()) } else if time_delta.whole_hours() > 0 { format!("{}h ago", time_delta.whole_hours()) } else { @@ -244,17 +249,12 @@ pub fn time(unix_time: i64) -> String { // // val() function used to parse JSON from Reddit APIs -pub fn val(j: &serde_json::Value, k: &str) -> String { +pub fn val(j: &Value, k: &str) -> String { String::from(j["data"][k].as_str().unwrap_or_default()) } -// nested_val() function used to parse JSON from Reddit APIs -pub fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String { - String::from(j["data"][n][k].as_str().unwrap_or_default()) -} - // Fetch posts of a user or subreddit and return a vector of posts and the "after" value -pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec, String), &'static str> { +pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec, String), String> { let res; let post_list; @@ -271,7 +271,7 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec post_list = list, - None => return Err("No posts found"), + None => return Err("No posts found".to_string()), } let mut posts: Vec = Vec::new(); @@ -292,8 +292,12 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec Result<(Vec Result<(Vec HttpResponse { let body = ErrorTemplate { - message: msg, + msg, prefs: Preferences::default(), } .render() @@ -339,34 +347,50 @@ pub async fn error(msg: String) -> HttpResponse { } // Make a request to a Reddit API and parse the JSON response -pub async fn request(path: &str) -> Result { +pub async fn request(path: &str) -> Result { let url = format!("https://www.reddit.com{}", path); - // Send request using ureq - match ureq::get(&url).call() { - // If response is success - Ok(response) => { - // Parse the response from Reddit as JSON - match from_str(&response.into_string().unwrap()) { - Ok(json) => Ok(json), - Err(_) => { - #[cfg(debug_assertions)] - dbg!(format!("{} - Failed to parse page JSON data", url)); - Err("Failed to parse page JSON data") + // Send request using awc + async fn send(url: &str) -> Result { + let client = actix_web::client::Client::default(); + let response = client.get(url).send().await; + + match response { + Ok(mut payload) => { + // Get first number of response HTTP status code + match payload.status().to_string().chars().next() { + // If success + Some('2') => Ok(String::from_utf8(payload.body().limit(20_000_000).await.unwrap().to_vec()).unwrap()), + // If redirection + Some('3') => Err((true, payload.headers().get("location").unwrap().to_str().unwrap().to_string())), + // Otherwise + _ => Err((false, "Page not found".to_string())), } } - } - // If response is error - Err(ureq::Error::Status(_, _)) => { - #[cfg(debug_assertions)] - dbg!(format!("{} - Page not found", url)); - Err("Page not found") - } - // If failed to send request - Err(e) => { - #[cfg(debug_assertions)] - dbg!(e); - Err("Couldn't send request to Reddit") + Err(_) => Err((false, "Couldn't send request to Reddit".to_string())), } } + + fn err(u: String, m: String) -> Result { + #[cfg(debug_assertions)] + dbg!(format!("{} - {}", u, m)); + Err(m) + }; + + fn json(url: String, body: String) -> Result { + match from_str(body.as_str()) { + Ok(json) => Ok(json), + Err(_) => err(url, "Failed to parse page JSON data".to_string()), + } + } + + match send(&url).await { + Ok(body) => json(url, body), + Err((true, location)) => match send(location.as_str()).await { + Ok(body) => json(url, body), + Err((true, location)) => err(url, location), + Err((_, msg)) => err(url, msg), + }, + Err((_, msg)) => err(url, msg), + } } diff --git a/templates/error.html b/templates/error.html index cf86271..42067a7 100644 --- a/templates/error.html +++ b/templates/error.html @@ -1,6 +1,6 @@ {% extends "base.html" %} -{% block title %}Error: {{ message }}{% endblock %} +{% block title %}Error: {{ msg }}{% endblock %} {% block sortstyle %}{% endblock %} {% block content %} -

{{ message }}

+

{{ msg }}

{% endblock %} \ No newline at end of file