Rewrite + Searching

This commit is contained in:
spikecodes
2020-12-31 15:54:13 -08:00
parent c7282520cd
commit a6dc7ee043
17 changed files with 342 additions and 262 deletions

View File

@ -2,9 +2,9 @@
use actix_web::{get, middleware::NormalizePath, web, App, HttpResponse, HttpServer};
// Reference local files
mod popular;
mod post;
mod proxy;
mod search;
mod subreddit;
mod user;
mod utils;
@ -42,6 +42,7 @@ async fn main() -> std::io::Result<()> {
HttpServer::new(|| {
App::new()
// .default_service(web::get().to(subreddit::page))
// TRAILING SLASH MIDDLEWARE
.wrap(NormalizePath::default())
// GENERAL SERVICES
@ -50,17 +51,22 @@ async fn main() -> std::io::Result<()> {
.route("/robots.txt/", web::get().to(robots))
// PROXY SERVICE
.route("/proxy/{url:.*}/", web::get().to(proxy::handler))
// SEARCH SERVICES
.route("/search/", web::get().to(search::page))
.route("r/{sub}/search/", web::get().to(search::page))
// USER SERVICES
.route("/u/{username}/", web::get().to(user::page))
.route("/user/{username}/", web::get().to(user::page))
.route("/u/{username}/", web::get().to(user::profile))
.route("/user/{username}/", web::get().to(user::profile))
// SUBREDDIT SERVICES
.route("/r/{sub}/", web::get().to(subreddit::page))
.route("/r/{sub}/{sort}/", web::get().to(subreddit::page))
// POPULAR SERVICES
.route("/", web::get().to(popular::page))
.route("/", web::get().to(subreddit::page))
.route("/{sort:best|hot|new|top|rising}/", web::get().to(subreddit::page))
// POST SERVICES
.route("/{id:.{5,6}}/", web::get().to(post::short))
.route("/r/{sub}/comments/{id}/{title}/", web::get().to(post::page))
.route("/r/{sub}/comments/{id}/{title}/{comment_id}/", web::get().to(post::comment))
.route("/{id:.{5,6}}/", web::get().to(post::item))
.route("/r/{sub}/comments/{id}/{title}/", web::get().to(post::item))
.route("/r/{sub}/comments/{id}/{title}/{comment_id}/", web::get().to(post::item))
})
.bind(address.clone())
.expect(format!("Cannot bind to the address: {}", address).as_str())

View File

@ -1,57 +0,0 @@
// CRATES
use crate::utils::{fetch_posts, ErrorTemplate, Params, Post};
use actix_web::{http::StatusCode, web, HttpResponse, Result};
use askama::Template;
// STRUCTS
#[derive(Template)]
#[template(path = "popular.html", escape = "none")]
struct PopularTemplate {
posts: Vec<Post>,
sort: (String, String),
ends: (String, String),
}
// RENDER
async fn render(sort: Option<String>, t: Option<String>, ends: (Option<String>, Option<String>)) -> Result<HttpResponse> {
let sorting = sort.unwrap_or("hot".to_string());
let before = ends.1.clone().unwrap_or(String::new()); // If there is an after, there must be a before
let timeframe = match &t { Some(val) => format!("&t={}", val), None => String::new() };
// Build the Reddit JSON API url
let url = match ends.0 {
Some(val) => format!("r/popular/{}.json?before={}&count=25{}", sorting, val, timeframe),
None => match ends.1 {
Some(val) => format!("r/popular/{}.json?after={}&count=25{}", sorting, val, timeframe),
None => format!("r/popular/{}.json?{}", sorting, timeframe),
},
};
let items_result = fetch_posts(url, String::new()).await;
if items_result.is_err() {
let s = ErrorTemplate {
message: items_result.err().unwrap().to_string(),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
} else {
let items = items_result.unwrap();
let s = PopularTemplate {
posts: items.0,
sort: (sorting, t.unwrap_or(String::new())),
ends: (before, items.1),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
}
// SERVICES
pub async fn page(params: web::Query<Params>) -> Result<HttpResponse> {
render(params.sort.clone(), params.t.clone(), (params.before.clone(), params.after.clone())).await
}

View File

@ -1,6 +1,6 @@
// CRATES
use crate::utils::{format_num, format_url, request, val, Comment, ErrorTemplate, Flair, Flags, Params, Post};
use actix_web::{http::StatusCode, web, HttpResponse, Result};
use crate::utils::{format_num, format_url, param, request, val, Comment, ErrorTemplate, Flags, Flair, Post};
use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result};
use async_recursion::async_recursion;
@ -16,22 +16,17 @@ struct PostTemplate {
sort: String,
}
async fn render(id: String, sort: Option<String>, comment_id: Option<String>) -> Result<HttpResponse> {
pub async fn item(req: HttpRequest) -> Result<HttpResponse> {
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
let sort = param(&path, "sort").await;
let id = req.match_info().get("id").unwrap_or("").to_string();
// Log the post ID being fetched in debug mode
#[cfg(debug_assertions)]
dbg!(&id);
// Handling sort paramater
let sorting: String = sort.unwrap_or("confidence".to_string());
// Build the Reddit JSON API url
let url: String = match comment_id {
None => format!("{}.json?sort={}&raw_json=1", id, sorting),
Some(val) => format!("{}.json?sort={}&comment={}&raw_json=1", id, sorting, val),
};
// Send a request to the url, receive JSON in response
let req = request(url).await;
let req = request(path.clone()).await;
// If the Reddit API returns an error, exit and send error page to user
if req.is_err() {
@ -41,37 +36,18 @@ async fn render(id: String, sort: Option<String>, comment_id: Option<String>) ->
.render()
.unwrap();
return Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s));
} else {
// Otherwise, grab the JSON output from the request
let res = req.unwrap();
// Parse the JSON into Post and Comment structs
let post = parse_post(res[0].clone()).await.unwrap();
let comments = parse_comments(res[1].clone()).await.unwrap();
// Use the Post and Comment structs to generate a website to show users
let s = PostTemplate { comments, post, sort }.render().unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
// Otherwise, grab the JSON output from the request
let res = req.unwrap();
// Parse the JSON into Post and Comment structs
let post = parse_post(res[0].clone()).await;
let comments = parse_comments(res[1].clone()).await;
// Use the Post and Comment structs to generate a website to show users
let s = PostTemplate {
comments: comments.unwrap(),
post: post.unwrap(),
sort: sorting,
}
.render()
.unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
// SERVICES
pub async fn short(web::Path(id): web::Path<String>, params: web::Query<Params>) -> Result<HttpResponse> {
render(id, params.sort.clone(), None).await
}
pub async fn comment(web::Path((_sub, id, _title, comment_id)): web::Path<(String, String, String, String)>, params: web::Query<Params>) -> Result<HttpResponse> {
render(id, params.sort.clone(), Some(comment_id)).await
}
pub async fn page(web::Path((_sub, id)): web::Path<(String, String)>, params: web::Query<Params>) -> Result<HttpResponse> {
render(id, params.sort.clone(), None).await
}
// UTILITIES
@ -111,7 +87,7 @@ async fn parse_post(json: serde_json::Value) -> Result<Post, &'static str> {
let post = Post {
title: val(post_data, "title").await,
community: val(post_data, "subreddit").await,
body: val(post_data,"selftext_html").await,
body: val(post_data, "selftext_html").await,
author: val(post_data, "author").await,
author_flair: Flair(
val(post_data, "author_flair_text").await,
@ -132,7 +108,7 @@ async fn parse_post(json: serde_json::Value) -> Result<Post, &'static str> {
),
flags: Flags {
nsfw: post_data["data"]["over_18"].as_bool().unwrap_or(false),
stickied: post_data["data"]["stickied"].as_bool().unwrap_or(false)
stickied: post_data["data"]["stickied"].as_bool().unwrap_or(false),
},
media: media.1,
time: Utc.timestamp(unix_time, 0).format("%b %e %Y %H:%M UTC").to_string(),
@ -157,7 +133,7 @@ async fn parse_comments(json: serde_json::Value) -> Result<Vec<Comment>, &'stati
}
let score = comment["data"]["score"].as_i64().unwrap_or(0);
let body = val(comment, "body_html").await;
let body = val(comment, "body_html").await;
let replies: Vec<Comment> = if comment["data"]["replies"].is_object() {
parse_comments(comment["data"]["replies"].clone()).await.unwrap_or(Vec::new())

52
src/search.rs Normal file
View File

@ -0,0 +1,52 @@
// CRATES
use crate::utils::{fetch_posts, param, ErrorTemplate, Post};
use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result};
use askama::Template;
// STRUCTS
#[derive(Template)]
#[allow(dead_code)]
#[template(path = "search.html", escape = "none")]
struct SearchTemplate {
posts: Vec<Post>,
query: String,
sub: String,
sort: (String, String),
ends: (String, String),
}
// SERVICES
pub async fn page(req: HttpRequest) -> Result<HttpResponse> {
let path = format!("{}.json?{}", req.path(), req.query_string());
let q = param(&path, "q").await;
let sort = if param(&path, "sort").await.is_empty() {
"relevance".to_string()
} else {
param(&path, "sort").await
};
let sub = req.match_info().get("sub").unwrap_or("").to_string();
let posts = fetch_posts(path.clone(), String::new()).await;
if posts.is_err() {
let s = ErrorTemplate {
message: posts.err().unwrap().to_string(),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
} else {
let items = posts.unwrap();
let s = SearchTemplate {
posts: items.0,
query: q,
sub: sub,
sort: (sort, param(&path, "t").await),
ends: (param(&path, "after").await, items.1),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
}

View File

@ -1,6 +1,6 @@
// CRATES
use crate::utils::{fetch_posts, format_num, format_url, request, val, ErrorTemplate, Params, Post, Subreddit};
use actix_web::{http::StatusCode, web, HttpResponse, Result};
use crate::utils::{fetch_posts, format_num, format_url, param, request, val, ErrorTemplate, Post, Subreddit};
use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result};
use askama::Template;
use std::convert::TryInto;
@ -15,49 +15,35 @@ struct SubredditTemplate {
}
// SERVICES
#[allow(dead_code)]
pub async fn page(web::Path(sub): web::Path<String>, params: web::Query<Params>) -> Result<HttpResponse> {
render(sub, params.sort.clone(), params.t.clone(), (params.before.clone(), params.after.clone())).await
}
// web::Path(sub): web::Path<String>, params: web::Query<Params>
pub async fn page(req: HttpRequest) -> Result<HttpResponse> {
let path = format!("{}.json?{}", req.path(), req.query_string());
let sub = req.match_info().get("sub").unwrap_or("popular").to_string();
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
pub async fn render(sub_name: String, sort: Option<String>, t: Option<String>, ends: (Option<String>, Option<String>)) -> Result<HttpResponse> {
let sorting = sort.unwrap_or("hot".to_string());
let before = ends.1.clone().unwrap_or(String::new()); // If there is an after, there must be a before
let timeframe = match &t { Some(val) => format!("&t={}", val), None => String::new() };
// Build the Reddit JSON API url
let url = match ends.0 {
Some(val) => format!("r/{}/{}.json?before={}&count=25{}", sub_name, sorting, val, timeframe),
None => match ends.1 {
Some(val) => format!("r/{}/{}.json?after={}&count=25{}", sub_name, sorting, val, timeframe),
None => format!("r/{}/{}.json?{}", sub_name, sorting, timeframe),
},
};
let sub_result = if !&sub_name.contains("+") {
subreddit(&sub_name).await
let sub_result = if !&sub.contains("+") && sub != "popular" {
subreddit(&sub).await
} else {
Ok(Subreddit::default())
};
let items_result = fetch_posts(url, String::new()).await;
let posts = fetch_posts(path.clone(), String::new()).await;
if sub_result.is_err() || items_result.is_err() {
if posts.is_err() {
let s = ErrorTemplate {
message: sub_result.err().unwrap().to_string(),
message: posts.err().unwrap().to_string(),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
} else {
let sub = sub_result.unwrap();
let items = items_result.unwrap();
let sub = sub_result.unwrap_or(Subreddit::default());
let items = posts.unwrap();
let s = SubredditTemplate {
sub: sub,
posts: items.0,
sort: (sorting, t.unwrap_or(String::new())),
ends: (before, items.1),
sort: (sort, param(&path, "t").await),
ends: (param(&path, "after").await, items.1),
}
.render()
.unwrap();

View File

@ -1,6 +1,6 @@
// CRATES
use crate::utils::{fetch_posts, format_url, nested_val, request, ErrorTemplate, Params, Post, User};
use actix_web::{http::StatusCode, web, HttpResponse, Result};
use crate::utils::{fetch_posts, format_url, nested_val, param, request, ErrorTemplate, Post, User};
use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result};
use askama::Template;
use chrono::{TimeZone, Utc};
@ -14,25 +14,19 @@ struct UserTemplate {
ends: (String, String),
}
async fn render(username: String, sort: Option<String>, t: Option<String>, ends: (Option<String>, Option<String>)) -> Result<HttpResponse> {
let sorting = sort.unwrap_or("new".to_string());
pub async fn profile(req: HttpRequest) -> Result<HttpResponse> {
// Build the Reddit JSON API path
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
let before = ends.1.clone().unwrap_or(String::new()); // If there is an after, there must be a before
let timeframe = match &t { Some(val) => format!("&t={}", val), None => String::new() };
// Build the Reddit JSON API url
let url = match ends.0 {
Some(val) => format!("user/{}/.json?sort={}&before={}&count=25&raw_json=1{}", username, sorting, val, timeframe),
None => match ends.1 {
Some(val) => format!("user/{}/.json?sort={}&after={}&count=25&raw_json=1{}", username, sorting, val, timeframe),
None => format!("user/{}/.json?sort={}&raw_json=1{}", username, sorting, timeframe),
},
};
// Retrieve other variables from Libreddit request
let sort = param(&path, "sort").await;
let username = req.match_info().get("username").unwrap_or("").to_string();
// Request user profile data and user posts/comments from Reddit
let user = user(&username).await;
let posts = fetch_posts(url, "Comment".to_string()).await;
let posts = fetch_posts(path.clone(), "Comment".to_string()).await;
// If there is an error show error page
if user.is_err() || posts.is_err() {
let s = ErrorTemplate {
message: user.err().unwrap().to_string(),
@ -42,12 +36,12 @@ async fn render(username: String, sort: Option<String>, t: Option<String>, ends:
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
} else {
let posts_unwrapped = posts.unwrap();
let s = UserTemplate {
user: user.unwrap(),
posts: posts_unwrapped.0,
sort: (sorting, t.unwrap_or(String::new())),
ends: (before, posts_unwrapped.1)
sort: (sort, param(&path, "t").await),
ends: (param(&path, "after").await, posts_unwrapped.1),
}
.render()
.unwrap();
@ -56,9 +50,9 @@ async fn render(username: String, sort: Option<String>, t: Option<String>, ends:
}
// SERVICES
pub async fn page(web::Path(username): web::Path<String>, params: web::Query<Params>) -> Result<HttpResponse> {
render(username, params.sort.clone(), params.t.clone(), (params.before.clone(), params.after.clone())).await
}
// pub async fn page(web::Path(username): web::Path<String>, params: web::Query<Params>) -> Result<HttpResponse> {
// render(username, params.sort.clone(), params.t.clone(), (params.before.clone(), params.after.clone())).await
// }
// USER
async fn user(name: &String) -> Result<User, &'static str> {

View File

@ -3,6 +3,7 @@
//
use chrono::{TimeZone, Utc};
use serde_json::{from_str, Value};
use url::Url;
// use surf::{client, get, middleware::Redirect};
#[cfg(feature = "proxy")]
@ -16,7 +17,7 @@ pub struct Flair(pub String, pub String, pub String);
// Post flags with nsfw and stickied
pub struct Flags {
pub nsfw: bool,
pub stickied: bool
pub stickied: bool,
}
// Post containing content, metadata and media
@ -72,6 +73,7 @@ pub struct Subreddit {
#[derive(serde::Deserialize)]
pub struct Params {
pub t: Option<String>,
pub q: Option<String>,
pub sort: Option<String>,
pub after: Option<String>,
pub before: Option<String>,
@ -88,6 +90,13 @@ pub struct ErrorTemplate {
// FORMATTING
//
// Grab a query param from a url
pub async fn param(path: &String, value: &str) -> String {
let url = Url::parse(format!("https://reddit.com/{}", path).as_str()).unwrap();
let pairs: std::collections::HashMap<_, _> = url.query_pairs().into_owned().collect();
pairs.get(value).unwrap_or(&String::new()).to_owned()
}
// Direct urls to proxy if proxy is enabled
pub async fn format_url(url: String) -> String {
if url.is_empty() {
@ -127,9 +136,9 @@ pub async fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
}
// Fetch posts of a user or subreddit
pub async fn fetch_posts(url: String, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
pub async fn fetch_posts(path: String, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
// Send a request to the url, receive JSON in response
let req = request(url.clone()).await;
let req = request(path.clone()).await;
// If the Reddit API returns an error, exit this function
if req.is_err() {
@ -178,14 +187,14 @@ pub async fn fetch_posts(url: String, fallback_title: String) -> Result<(Vec<Pos
),
flags: Flags {
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
stickied: post["data"]["stickied"].as_bool().unwrap_or(false)
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
},
url: val(post, "permalink").await,
time: Utc.timestamp(unix_time, 0).format("%b %e '%y").to_string(),
});
}
dbg!(url);
dbg!(path);
Ok((posts, res["data"]["after"].as_str().unwrap_or("").to_string()))
}
@ -195,8 +204,8 @@ pub async fn fetch_posts(url: String, fallback_title: String) -> Result<(Vec<Pos
//
// Make a request to a Reddit API and parse the JSON response
pub async fn request(mut url: String) -> Result<serde_json::Value, &'static str> {
url = format!("https://www.reddit.com/{}", url);
pub async fn request(path: String) -> Result<serde_json::Value, &'static str> {
let url = format!("https://www.reddit.com/{}", path);
// --- actix-web::client ---
// let client = actix_web::client::Client::default();