2020-10-26 09:25:59 +13:00
|
|
|
// CRATES
|
2020-12-30 16:01:02 +13:00
|
|
|
use crate::utils::{format_num, format_url, request, val, Comment, ErrorTemplate, Flair, Flags, Params, Post};
|
2020-12-15 13:35:04 +13:00
|
|
|
use actix_web::{http::StatusCode, web, HttpResponse, Result};
|
|
|
|
|
2020-12-20 16:54:46 +13:00
|
|
|
use async_recursion::async_recursion;
|
|
|
|
|
2020-10-26 09:25:59 +13:00
|
|
|
use askama::Template;
|
|
|
|
use chrono::{TimeZone, Utc};
|
2020-11-30 15:50:29 +13:00
|
|
|
|
2020-10-26 09:25:59 +13:00
|
|
|
// STRUCTS
|
|
|
|
#[derive(Template)]
|
|
|
|
#[template(path = "post.html", escape = "none")]
|
|
|
|
struct PostTemplate {
|
|
|
|
comments: Vec<Comment>,
|
|
|
|
post: Post,
|
2020-10-26 16:57:19 +13:00
|
|
|
sort: String,
|
2020-10-26 09:25:59 +13:00
|
|
|
}
|
|
|
|
|
2020-12-22 14:17:40 +13:00
|
|
|
async fn render(id: String, sort: Option<String>, comment_id: Option<String>) -> Result<HttpResponse> {
|
2020-12-22 18:40:06 +13:00
|
|
|
// Log the post ID being fetched in debug mode
|
|
|
|
#[cfg(debug_assertions)]
|
2020-12-06 17:54:43 +13:00
|
|
|
dbg!(&id);
|
2020-11-20 17:42:18 +13:00
|
|
|
|
2020-12-21 08:29:23 +13:00
|
|
|
// Handling sort paramater
|
|
|
|
let sorting: String = sort.unwrap_or("confidence".to_string());
|
|
|
|
|
2020-11-21 18:04:35 +13:00
|
|
|
// Build the Reddit JSON API url
|
2020-12-22 14:17:40 +13:00
|
|
|
let url: String = match comment_id {
|
2020-12-29 15:42:46 +13:00
|
|
|
None => format!("{}.json?sort={}&raw_json=1", id, sorting),
|
|
|
|
Some(val) => format!("{}.json?sort={}&comment={}&raw_json=1", id, sorting, val),
|
2020-12-22 14:17:40 +13:00
|
|
|
};
|
2020-11-21 18:04:35 +13:00
|
|
|
|
|
|
|
// Send a request to the url, receive JSON in response
|
|
|
|
let req = request(url).await;
|
|
|
|
|
|
|
|
// If the Reddit API returns an error, exit and send error page to user
|
|
|
|
if req.is_err() {
|
2020-11-20 17:42:18 +13:00
|
|
|
let s = ErrorTemplate {
|
2020-11-21 18:04:35 +13:00
|
|
|
message: req.err().unwrap().to_string(),
|
2020-11-20 17:42:18 +13:00
|
|
|
}
|
|
|
|
.render()
|
|
|
|
.unwrap();
|
2020-11-26 10:53:30 +13:00
|
|
|
return Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s));
|
2020-11-21 18:04:35 +13:00
|
|
|
}
|
|
|
|
|
|
|
|
// Otherwise, grab the JSON output from the request
|
|
|
|
let res = req.unwrap();
|
|
|
|
|
|
|
|
// Parse the JSON into Post and Comment structs
|
2020-12-06 17:54:43 +13:00
|
|
|
let post = parse_post(res[0].clone()).await;
|
|
|
|
let comments = parse_comments(res[1].clone()).await;
|
2020-11-21 18:04:35 +13:00
|
|
|
|
|
|
|
// Use the Post and Comment structs to generate a website to show users
|
|
|
|
let s = PostTemplate {
|
|
|
|
comments: comments.unwrap(),
|
|
|
|
post: post.unwrap(),
|
2020-12-21 08:29:23 +13:00
|
|
|
sort: sorting,
|
2020-10-26 09:25:59 +13:00
|
|
|
}
|
2020-11-21 18:04:35 +13:00
|
|
|
.render()
|
|
|
|
.unwrap();
|
|
|
|
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
2020-10-26 09:25:59 +13:00
|
|
|
}
|
|
|
|
|
|
|
|
// SERVICES
|
2020-12-21 06:10:37 +13:00
|
|
|
pub async fn short(web::Path(id): web::Path<String>, params: web::Query<Params>) -> Result<HttpResponse> {
|
2020-12-22 14:17:40 +13:00
|
|
|
render(id, params.sort.clone(), None).await
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn comment(web::Path((_sub, id, _title, comment_id)): web::Path<(String, String, String, String)>, params: web::Query<Params>) -> Result<HttpResponse> {
|
|
|
|
render(id, params.sort.clone(), Some(comment_id)).await
|
2020-10-26 09:25:59 +13:00
|
|
|
}
|
|
|
|
|
2020-12-15 13:35:04 +13:00
|
|
|
pub async fn page(web::Path((_sub, id)): web::Path<(String, String)>, params: web::Query<Params>) -> Result<HttpResponse> {
|
2020-12-22 14:17:40 +13:00
|
|
|
render(id, params.sort.clone(), None).await
|
2020-10-26 09:25:59 +13:00
|
|
|
}
|
|
|
|
|
|
|
|
// UTILITIES
|
2020-12-01 17:33:55 +13:00
|
|
|
async fn media(data: &serde_json::Value) -> (String, String) {
|
|
|
|
let post_type: &str;
|
|
|
|
let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() {
|
|
|
|
post_type = "video";
|
2020-12-24 17:36:49 +13:00
|
|
|
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap().to_string()).await
|
2020-12-01 17:33:55 +13:00
|
|
|
} else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() {
|
|
|
|
post_type = "video";
|
2020-12-24 17:36:49 +13:00
|
|
|
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap().to_string()).await
|
2020-12-01 17:33:55 +13:00
|
|
|
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
|
|
|
post_type = "image";
|
2020-12-24 17:36:49 +13:00
|
|
|
format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap().to_string()).await
|
2020-10-26 16:57:19 +13:00
|
|
|
} else {
|
2020-12-01 17:33:55 +13:00
|
|
|
post_type = "link";
|
|
|
|
data["url"].as_str().unwrap().to_string()
|
2020-10-26 16:57:19 +13:00
|
|
|
};
|
2020-10-26 09:25:59 +13:00
|
|
|
|
2020-12-01 17:33:55 +13:00
|
|
|
(post_type.to_string(), url)
|
2020-10-26 09:25:59 +13:00
|
|
|
}
|
|
|
|
|
2020-10-26 13:52:57 +13:00
|
|
|
// POSTS
|
2020-11-21 18:04:35 +13:00
|
|
|
async fn parse_post(json: serde_json::Value) -> Result<Post, &'static str> {
|
2020-12-23 15:29:43 +13:00
|
|
|
// Retrieve post (as opposed to comments) from JSON
|
2020-12-06 17:54:43 +13:00
|
|
|
let post_data: &serde_json::Value = &json["data"]["children"][0];
|
2020-10-26 09:25:59 +13:00
|
|
|
|
2020-12-23 15:29:43 +13:00
|
|
|
// Grab UTC time as unix timestamp
|
2020-10-26 09:25:59 +13:00
|
|
|
let unix_time: i64 = post_data["data"]["created_utc"].as_f64().unwrap().round() as i64;
|
2020-12-23 15:29:43 +13:00
|
|
|
// Parse post score
|
2020-10-26 09:25:59 +13:00
|
|
|
let score = post_data["data"]["score"].as_i64().unwrap();
|
|
|
|
|
2020-12-23 15:29:43 +13:00
|
|
|
// Determine the type of media along with the media URL
|
2020-12-01 17:33:55 +13:00
|
|
|
let media = media(&post_data["data"]).await;
|
|
|
|
|
2020-12-23 15:29:43 +13:00
|
|
|
// Build a post using data parsed from Reddit post API
|
2020-11-19 15:50:59 +13:00
|
|
|
let post = Post {
|
2020-10-26 15:05:09 +13:00
|
|
|
title: val(post_data, "title").await,
|
2020-10-26 09:25:59 +13:00
|
|
|
community: val(post_data, "subreddit").await,
|
2020-12-29 15:42:46 +13:00
|
|
|
body: val(post_data,"selftext_html").await,
|
2020-10-26 09:25:59 +13:00
|
|
|
author: val(post_data, "author").await,
|
2020-12-21 08:29:23 +13:00
|
|
|
author_flair: Flair(
|
|
|
|
val(post_data, "author_flair_text").await,
|
|
|
|
val(post_data, "author_flair_background_color").await,
|
|
|
|
val(post_data, "author_flair_text_color").await,
|
|
|
|
),
|
2020-10-26 09:25:59 +13:00
|
|
|
url: val(post_data, "permalink").await,
|
2020-12-08 07:32:46 +13:00
|
|
|
score: format_num(score),
|
2020-12-01 17:33:55 +13:00
|
|
|
post_type: media.0,
|
2020-11-17 15:49:08 +13:00
|
|
|
flair: Flair(
|
|
|
|
val(post_data, "link_flair_text").await,
|
|
|
|
val(post_data, "link_flair_background_color").await,
|
2020-11-17 17:36:36 +13:00
|
|
|
if val(post_data, "link_flair_text_color").await == "dark" {
|
|
|
|
"black".to_string()
|
|
|
|
} else {
|
|
|
|
"white".to_string()
|
|
|
|
},
|
2020-12-22 05:38:24 +13:00
|
|
|
),
|
2020-12-30 16:01:02 +13:00
|
|
|
flags: Flags {
|
|
|
|
nsfw: post_data["data"]["over_18"].as_bool().unwrap_or(false),
|
|
|
|
stickied: post_data["data"]["stickied"].as_bool().unwrap_or(false)
|
|
|
|
},
|
2020-12-23 15:29:43 +13:00
|
|
|
media: media.1,
|
|
|
|
time: Utc.timestamp(unix_time, 0).format("%b %e %Y %H:%M UTC").to_string(),
|
2020-11-19 15:50:59 +13:00
|
|
|
};
|
|
|
|
|
|
|
|
Ok(post)
|
2020-10-26 09:25:59 +13:00
|
|
|
}
|
|
|
|
|
|
|
|
// COMMENTS
|
2020-12-20 16:54:46 +13:00
|
|
|
#[async_recursion]
|
2020-11-21 18:04:35 +13:00
|
|
|
async fn parse_comments(json: serde_json::Value) -> Result<Vec<Comment>, &'static str> {
|
2020-12-20 16:54:46 +13:00
|
|
|
// Separate the comment JSON into a Vector of comments
|
2020-12-06 17:54:43 +13:00
|
|
|
let comment_data = json["data"]["children"].as_array().unwrap();
|
2020-10-26 09:25:59 +13:00
|
|
|
|
|
|
|
let mut comments: Vec<Comment> = Vec::new();
|
2020-10-26 16:57:19 +13:00
|
|
|
|
2020-12-20 16:54:46 +13:00
|
|
|
// For each comment, retrieve the values to build a Comment object
|
2020-12-23 15:29:43 +13:00
|
|
|
for comment in comment_data {
|
2020-10-26 09:25:59 +13:00
|
|
|
let unix_time: i64 = comment["data"]["created_utc"].as_f64().unwrap_or(0.0).round() as i64;
|
2020-12-20 16:54:46 +13:00
|
|
|
if unix_time == 0 {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2020-10-26 13:52:57 +13:00
|
|
|
let score = comment["data"]["score"].as_i64().unwrap_or(0);
|
2020-12-29 15:42:46 +13:00
|
|
|
let body = val(comment, "body_html").await;
|
2020-10-26 13:52:57 +13:00
|
|
|
|
2020-12-20 16:54:46 +13:00
|
|
|
let replies: Vec<Comment> = if comment["data"]["replies"].is_object() {
|
|
|
|
parse_comments(comment["data"]["replies"].clone()).await.unwrap_or(Vec::new())
|
|
|
|
} else {
|
|
|
|
Vec::new()
|
|
|
|
};
|
2020-10-26 15:05:09 +13:00
|
|
|
|
2020-10-26 09:25:59 +13:00
|
|
|
comments.push(Comment {
|
2020-12-21 17:52:15 +13:00
|
|
|
id: val(comment, "id").await,
|
2020-10-26 15:05:09 +13:00
|
|
|
body: body,
|
2020-10-26 09:25:59 +13:00
|
|
|
author: val(comment, "author").await,
|
2020-12-08 07:32:46 +13:00
|
|
|
score: format_num(score),
|
2020-10-26 16:57:19 +13:00
|
|
|
time: Utc.timestamp(unix_time, 0).format("%b %e %Y %H:%M UTC").to_string(),
|
2020-12-20 16:54:46 +13:00
|
|
|
replies: replies,
|
2020-12-21 08:29:23 +13:00
|
|
|
flair: Flair(
|
|
|
|
val(comment, "author_flair_text").await,
|
|
|
|
val(comment, "author_flair_background_color").await,
|
|
|
|
val(comment, "author_flair_text_color").await,
|
|
|
|
),
|
2020-10-26 09:25:59 +13:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(comments)
|
2020-10-26 16:57:19 +13:00
|
|
|
}
|