From 39ba50dadaebb633cd6f2295ca9aac4b345ea36f Mon Sep 17 00:00:00 2001 From: spikecodes <19519553+spikecodes@users.noreply.github.com> Date: Thu, 31 Dec 2020 21:03:44 -0800 Subject: [PATCH] Error Page --- src/main.rs | 9 ++++++--- src/post.rs | 11 +++-------- src/search.rs | 13 ++++--------- src/subreddit.rs | 11 +++-------- src/user.rs | 17 ++++++----------- src/utils.rs | 10 +++++++++- 6 files changed, 31 insertions(+), 40 deletions(-) diff --git a/src/main.rs b/src/main.rs index d52e226..1a6c7b2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -42,9 +42,10 @@ async fn main() -> std::io::Result<()> { HttpServer::new(|| { App::new() - // .default_service(web::get().to(subreddit::page)) // TRAILING SLASH MIDDLEWARE .wrap(NormalizePath::default()) + // DEFAULT SERVICE + .default_service(web::get().to(utils::error)) // GENERAL SERVICES .route("/style.css/", web::get().to(style)) .route("/favicon.ico/", web::get().to(|| HttpResponse::Ok())) @@ -52,14 +53,16 @@ async fn main() -> std::io::Result<()> { // PROXY SERVICE .route("/proxy/{url:.*}/", web::get().to(proxy::handler)) // SEARCH SERVICES - .route("/search/", web::get().to(search::page)) - .route("r/{sub}/search/", web::get().to(search::page)) + .route("/search/", web::get().to(search::find)) + .route("r/{sub}/search/", web::get().to(search::find)) // USER SERVICES .route("/u/{username}/", web::get().to(user::profile)) .route("/user/{username}/", web::get().to(user::profile)) // SUBREDDIT SERVICES .route("/r/{sub}/", web::get().to(subreddit::page)) .route("/r/{sub}/{sort}/", web::get().to(subreddit::page)) + // WIKI SERVICES + // .route("/r/{sub}/wiki/index", web::get().to(subreddit::wiki)) // POPULAR SERVICES .route("/", web::get().to(subreddit::page)) .route("/{sort:best|hot|new|top|rising}/", web::get().to(subreddit::page)) diff --git a/src/post.rs b/src/post.rs index c63e28f..152c1e7 100644 --- a/src/post.rs +++ b/src/post.rs @@ -1,6 +1,6 @@ // CRATES -use crate::utils::{format_num, format_url, param, request, val, Comment, ErrorTemplate, Flags, Flair, Post}; -use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result}; +use crate::utils::{error, format_num, format_url, param, request, val, Comment, Flags, Flair, Post}; +use actix_web::{HttpRequest, HttpResponse, Result}; use async_recursion::async_recursion; @@ -30,12 +30,7 @@ pub async fn item(req: HttpRequest) -> Result { // If the Reddit API returns an error, exit and send error page to user if req.is_err() { - let s = ErrorTemplate { - message: req.err().unwrap().to_string(), - } - .render() - .unwrap(); - return Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s)); + error(req.err().unwrap().to_string()).await } else { // Otherwise, grab the JSON output from the request let res = req.unwrap(); diff --git a/src/search.rs b/src/search.rs index 486dbd1..2e0a063 100644 --- a/src/search.rs +++ b/src/search.rs @@ -1,6 +1,6 @@ // CRATES -use crate::utils::{fetch_posts, param, ErrorTemplate, Post}; -use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result}; +use crate::utils::{error, fetch_posts, param, Post}; +use actix_web::{HttpRequest, HttpResponse, Result}; use askama::Template; // STRUCTS @@ -16,7 +16,7 @@ struct SearchTemplate { } // SERVICES -pub async fn page(req: HttpRequest) -> Result { +pub async fn find(req: HttpRequest) -> Result { let path = format!("{}.json?{}", req.path(), req.query_string()); let q = param(&path, "q").await; let sort = if param(&path, "sort").await.is_empty() { @@ -29,12 +29,7 @@ pub async fn page(req: HttpRequest) -> Result { let posts = fetch_posts(path.clone(), String::new()).await; if posts.is_err() { - let s = ErrorTemplate { - message: posts.err().unwrap().to_string(), - } - .render() - .unwrap(); - Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s)) + error(posts.err().unwrap().to_string()).await } else { let items = posts.unwrap(); diff --git a/src/subreddit.rs b/src/subreddit.rs index e0e834d..718976b 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -1,6 +1,6 @@ // CRATES -use crate::utils::{fetch_posts, format_num, format_url, param, request, val, ErrorTemplate, Post, Subreddit}; -use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result}; +use crate::utils::{error, fetch_posts, format_num, format_url, param, request, val, Post, Subreddit}; +use actix_web::{HttpRequest, HttpResponse, Result}; use askama::Template; use std::convert::TryInto; @@ -29,12 +29,7 @@ pub async fn page(req: HttpRequest) -> Result { let posts = fetch_posts(path.clone(), String::new()).await; if posts.is_err() { - let s = ErrorTemplate { - message: posts.err().unwrap().to_string(), - } - .render() - .unwrap(); - Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s)) + error(posts.err().unwrap().to_string()).await } else { let sub = sub_result.unwrap_or(Subreddit::default()); let items = posts.unwrap(); diff --git a/src/user.rs b/src/user.rs index 2042210..cd089fc 100644 --- a/src/user.rs +++ b/src/user.rs @@ -1,6 +1,6 @@ // CRATES -use crate::utils::{fetch_posts, format_url, nested_val, param, request, ErrorTemplate, Post, User}; -use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result}; +use crate::utils::{error, fetch_posts, format_url, nested_val, param, request, Post, User}; +use actix_web::{HttpRequest, HttpResponse, Result}; use askama::Template; use chrono::{TimeZone, Utc}; @@ -28,12 +28,7 @@ pub async fn profile(req: HttpRequest) -> Result { // If there is an error show error page if user.is_err() || posts.is_err() { - let s = ErrorTemplate { - message: user.err().unwrap().to_string(), - } - .render() - .unwrap(); - Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s)) + error(user.err().unwrap().to_string()).await } else { let posts_unwrapped = posts.unwrap(); @@ -58,15 +53,15 @@ pub async fn profile(req: HttpRequest) -> Result { async fn user(name: &String) -> Result { // Build the Reddit JSON API path let path: String = format!("user/{}/about.json", name); - + // Send a request to the url, receive JSON in response let req = request(path).await; - + // If the Reddit API returns an error, exit this function if req.is_err() { return Err(req.err().unwrap()); } - + // Otherwise, grab the JSON output from the request let res = req.unwrap(); diff --git a/src/utils.rs b/src/utils.rs index 2c8edee..852b0e0 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,6 +1,8 @@ // // CRATES // +use actix_web::{http::StatusCode, HttpResponse, Result}; +use askama::Template; use chrono::{TimeZone, Utc}; use serde_json::{from_str, Value}; use url::Url; @@ -81,7 +83,7 @@ pub struct Params { } // Error template -#[derive(askama::Template)] +#[derive(Template)] #[template(path = "error.html", escape = "none")] pub struct ErrorTemplate { pub message: String, @@ -202,6 +204,12 @@ pub async fn fetch_posts(path: String, fallback_title: String) -> Result<(Vec Result { + let msg = if message.is_empty() { "Page not found".to_string() } else { message }; + let body = ErrorTemplate { message: msg }.render().unwrap(); + Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(body)) +} + // Make a request to a Reddit API and parse the JSON response pub async fn request(path: String) -> Result { let url = format!("https://www.reddit.com/{}", path);