redsunlib/src/utils.rs

295 lines
7.8 KiB
Rust
Raw Normal View History

2021-01-03 17:50:23 +13:00
// use std::collections::HashMap;
2021-01-06 15:04:49 +13:00
use std::collections::HashMap;
2020-11-21 19:05:27 +13:00
//
// CRATES
//
2021-01-06 15:04:49 +13:00
use actix_web::{cookie::Cookie, HttpResponse, Result};
2021-01-01 18:03:44 +13:00
use askama::Template;
2021-01-03 17:50:23 +13:00
use base64::encode;
2020-11-21 19:05:27 +13:00
use chrono::{TimeZone, Utc};
2021-01-03 07:58:21 +13:00
use regex::Regex;
2021-01-02 19:21:43 +13:00
use serde_json::from_str;
2021-01-01 12:54:13 +13:00
use url::Url;
2020-12-01 17:57:15 +13:00
// use surf::{client, get, middleware::Redirect};
2020-11-21 19:05:27 +13:00
2020-11-20 17:42:18 +13:00
//
// STRUCTS
//
2020-11-18 08:37:40 +13:00
// Post flair with text, background color and foreground color
pub struct Flair(pub String, pub String, pub String);
2020-12-30 16:01:02 +13:00
// Post flags with nsfw and stickied
pub struct Flags {
pub nsfw: bool,
2021-01-01 12:54:13 +13:00
pub stickied: bool,
2020-12-30 16:01:02 +13:00
}
2020-11-18 08:37:40 +13:00
// Post containing content, metadata and media
pub struct Post {
pub id: String,
2020-11-18 08:37:40 +13:00
pub title: String,
pub community: String,
pub body: String,
pub author: String,
2020-12-21 08:29:23 +13:00
pub author_flair: Flair,
pub permalink: String,
2020-11-18 08:37:40 +13:00
pub score: String,
pub upvote_ratio: i64,
pub post_type: String,
2020-12-23 15:29:43 +13:00
pub flair: Flair,
2020-12-30 16:01:02 +13:00
pub flags: Flags,
2021-01-06 15:04:49 +13:00
pub thumbnail: String,
2020-11-18 08:37:40 +13:00
pub media: String,
pub time: String,
}
// Comment with content, post, score and data/time that it was posted
pub struct Comment {
pub id: String,
2020-11-18 08:37:40 +13:00
pub body: String,
pub author: String,
2020-12-21 08:29:23 +13:00
pub flair: Flair,
2020-11-18 08:37:40 +13:00
pub score: String,
2020-11-30 15:50:29 +13:00
pub time: String,
2020-12-20 16:54:46 +13:00
pub replies: Vec<Comment>,
2020-11-18 08:37:40 +13:00
}
// User struct containing metadata about user
pub struct User {
pub name: String,
pub title: String,
2020-11-18 08:37:40 +13:00
pub icon: String,
pub karma: i64,
2020-12-24 19:16:04 +13:00
pub created: String,
2020-11-18 08:37:40 +13:00
pub banner: String,
2020-11-30 15:50:29 +13:00
pub description: String,
2020-11-18 08:37:40 +13:00
}
2020-12-29 15:42:46 +13:00
#[derive(Default)]
2020-11-18 08:37:40 +13:00
// Subreddit struct containing metadata about community
pub struct Subreddit {
pub name: String,
pub title: String,
pub description: String,
2020-12-29 15:42:46 +13:00
pub info: String,
2020-11-18 08:37:40 +13:00
pub icon: String,
2020-11-23 13:43:23 +13:00
pub members: String,
2020-11-30 15:50:29 +13:00
pub active: String,
2021-01-02 19:21:43 +13:00
pub wiki: bool,
2020-11-18 08:37:40 +13:00
}
2020-11-20 10:49:32 +13:00
// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
#[derive(serde::Deserialize)]
pub struct Params {
2020-12-30 14:11:47 +13:00
pub t: Option<String>,
2021-01-01 12:54:13 +13:00
pub q: Option<String>,
2020-11-20 10:49:32 +13:00
pub sort: Option<String>,
pub after: Option<String>,
2020-11-30 15:50:29 +13:00
pub before: Option<String>,
2020-11-20 10:49:32 +13:00
}
2020-11-20 17:42:18 +13:00
// Error template
2021-01-01 18:03:44 +13:00
#[derive(Template)]
2020-11-20 17:42:18 +13:00
#[template(path = "error.html", escape = "none")]
pub struct ErrorTemplate {
2020-11-30 15:50:29 +13:00
pub message: String,
2021-01-06 15:04:49 +13:00
pub layout: String,
2020-11-20 17:42:18 +13:00
}
2020-12-01 18:10:08 +13:00
//
2020-12-08 07:53:22 +13:00
// FORMATTING
2020-12-01 18:10:08 +13:00
//
2021-01-01 12:54:13 +13:00
// Grab a query param from a url
2021-01-02 09:33:57 +13:00
pub fn param(path: &str, value: &str) -> String {
2021-01-03 19:37:54 +13:00
let url = Url::parse(format!("https://libredd.it/{}", path).as_str()).unwrap();
2021-01-06 15:04:49 +13:00
let pairs: HashMap<_, _> = url.query_pairs().into_owned().collect();
2021-01-01 12:54:13 +13:00
pairs.get(value).unwrap_or(&String::new()).to_owned()
}
2021-01-06 15:04:49 +13:00
// Cookie value from request
pub fn cookie(req: actix_web::HttpRequest, name: &str) -> String {
actix_web::HttpMessage::cookie(&req, name).unwrap_or_else(|| Cookie::new(name, "")).value().to_string()
}
2021-01-03 17:50:23 +13:00
2020-12-26 15:06:33 +13:00
// Direct urls to proxy if proxy is enabled
2021-01-03 08:09:26 +13:00
pub fn format_url(url: String) -> String {
2021-01-06 05:15:34 +13:00
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" {
2021-01-05 16:26:41 +13:00
String::new()
} else {
format!("/proxy/{}", encode(url).as_str())
}
2020-12-01 18:10:08 +13:00
}
2021-01-03 07:58:21 +13:00
// Rewrite Reddit links to Libreddit in body of text
2021-01-03 08:09:26 +13:00
pub fn rewrite_url(text: &str) -> String {
2021-01-03 07:58:21 +13:00
let re = Regex::new(r#"href="(https://|http://|)(www.|)(reddit).(com)/"#).unwrap();
re.replace_all(text, r#"href="/"#).to_string()
}
2020-12-26 15:06:33 +13:00
// Append `m` and `k` for millions and thousands respectively
2020-12-08 07:53:22 +13:00
pub fn format_num(num: i64) -> String {
2020-12-08 08:36:05 +13:00
if num > 1000000 {
format!("{}m", num / 1000000)
} else if num > 1000 {
format!("{}k", num / 1000)
} else {
num.to_string()
}
2020-12-08 07:53:22 +13:00
}
2021-01-06 15:04:49 +13:00
pub async fn media(data: &serde_json::Value) -> (String, String) {
let post_type: &str;
let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() {
post_type = "video";
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap_or_default().to_string())
} else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() {
post_type = "video";
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap_or_default().to_string())
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
post_type = "image";
format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap_or_default().to_string())
} else {
post_type = "link";
data["url"].as_str().unwrap_or_default().to_string()
};
(post_type.to_string(), url)
}
2020-11-20 17:42:18 +13:00
//
// JSON PARSING
//
2020-11-18 08:37:40 +13:00
// val() function used to parse JSON from Reddit APIs
2021-01-02 09:33:57 +13:00
pub fn val(j: &serde_json::Value, k: &str) -> String {
2021-01-02 19:21:43 +13:00
String::from(j["data"][k].as_str().unwrap_or_default())
2020-11-18 08:37:40 +13:00
}
// nested_val() function used to parse JSON from Reddit APIs
2021-01-02 09:33:57 +13:00
pub fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
2021-01-02 12:28:13 +13:00
String::from(j["data"][n][k].as_str().unwrap_or_default())
2020-11-18 13:03:28 +13:00
}
2020-12-26 15:06:33 +13:00
// Fetch posts of a user or subreddit
2021-01-02 09:55:09 +13:00
pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
2021-01-02 12:28:13 +13:00
let res;
let post_list;
// Send a request to the url
match request(&path).await {
// If success, receive JSON in response
2021-01-02 19:21:43 +13:00
Ok(response) => {
res = response;
}
2021-01-02 12:28:13 +13:00
// If the Reddit API returns an error, exit this function
2021-01-02 19:21:43 +13:00
Err(msg) => return Err(msg),
2020-11-21 19:05:27 +13:00
}
// Fetch the list of posts from the JSON response
2021-01-02 12:28:13 +13:00
match res["data"]["children"].as_array() {
2021-01-02 19:21:43 +13:00
Some(list) => post_list = list,
None => return Err("No posts found"),
2021-01-02 12:28:13 +13:00
}
2020-11-21 19:05:27 +13:00
let mut posts: Vec<Post> = Vec::new();
2021-01-05 16:26:41 +13:00
// For each post from posts list
2020-12-23 15:29:43 +13:00
for post in post_list {
2021-01-02 12:28:13 +13:00
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap_or_default().round() as i64;
let score = post["data"]["score"].as_i64().unwrap_or_default();
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
2021-01-02 09:33:57 +13:00
let title = val(post, "title");
2020-11-21 19:05:27 +13:00
2021-01-06 15:04:49 +13:00
// Determine the type of media along with the media URL
let media = media(&post["data"]).await;
2020-11-21 19:05:27 +13:00
posts.push(Post {
id: val(post, "id"),
2020-11-21 19:05:27 +13:00
title: if title.is_empty() { fallback_title.to_owned() } else { title },
2021-01-02 09:33:57 +13:00
community: val(post, "subreddit"),
2021-01-03 08:09:26 +13:00
body: rewrite_url(&val(post, "body_html")),
2021-01-02 09:33:57 +13:00
author: val(post, "author"),
2020-12-21 08:29:23 +13:00
author_flair: Flair(
2021-01-02 09:33:57 +13:00
val(post, "author_flair_text"),
val(post, "author_flair_background_color"),
val(post, "author_flair_text_color"),
2020-12-21 08:29:23 +13:00
),
2020-12-08 07:53:22 +13:00
score: format_num(score),
upvote_ratio: ratio as i64,
2021-01-06 15:04:49 +13:00
post_type: media.0,
thumbnail: format_url(val(post, "thumbnail")),
media: media.1,
2020-11-21 19:05:27 +13:00
flair: Flair(
2021-01-02 09:33:57 +13:00
val(post, "link_flair_text"),
val(post, "link_flair_background_color"),
if val(post, "link_flair_text_color") == "dark" {
2020-11-21 19:05:27 +13:00
"black".to_string()
} else {
"white".to_string()
},
),
2020-12-30 16:01:02 +13:00
flags: Flags {
2021-01-05 16:26:41 +13:00
nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(),
stickied: post["data"]["stickied"].as_bool().unwrap_or_default(),
2020-12-30 16:01:02 +13:00
},
permalink: val(post, "permalink"),
2020-12-23 15:29:43 +13:00
time: Utc.timestamp(unix_time, 0).format("%b %e '%y").to_string(),
2020-11-21 19:05:27 +13:00
});
}
2021-01-05 16:26:41 +13:00
Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string()))
2020-11-21 19:05:27 +13:00
}
2020-11-20 17:42:18 +13:00
//
// NETWORKING
//
2021-01-05 16:26:41 +13:00
pub async fn error(msg: String) -> HttpResponse {
2021-01-06 15:04:49 +13:00
let body = ErrorTemplate {
message: msg,
layout: String::new(),
}
.render()
.unwrap_or_default();
2021-01-03 17:50:23 +13:00
HttpResponse::NotFound().content_type("text/html").body(body)
2021-01-01 18:03:44 +13:00
}
2020-11-19 15:50:59 +13:00
// Make a request to a Reddit API and parse the JSON response
2021-01-02 09:55:09 +13:00
pub async fn request(path: &str) -> Result<serde_json::Value, &'static str> {
2021-01-01 12:54:13 +13:00
let url = format!("https://www.reddit.com/{}", path);
2020-12-22 14:17:40 +13:00
2021-01-05 16:26:41 +13:00
// Send request using reqwest
2021-01-04 18:31:21 +13:00
match reqwest::get(&url).await {
Ok(res) => {
// Read the status from the response
match res.status().is_success() {
true => {
// Parse the response from Reddit as JSON
match from_str(res.text().await.unwrap_or_default().as_str()) {
Ok(json) => Ok(json),
Err(_) => {
#[cfg(debug_assertions)]
dbg!(format!("{} - Failed to parse page JSON data", url));
Err("Failed to parse page JSON data")
}
}
}
2021-01-05 16:26:41 +13:00
// If Reddit returns error, tell user Page Not Found
2021-01-04 18:31:21 +13:00
false => {
2021-01-02 12:28:13 +13:00
#[cfg(debug_assertions)]
2021-01-04 18:31:21 +13:00
dbg!(format!("{} - Page not found", url));
Err("Page not found")
2021-01-02 12:28:13 +13:00
}
}
2021-01-05 16:26:41 +13:00
}
// If can't send request to Reddit, return this to user
2021-01-04 18:31:21 +13:00
Err(e) => {
2021-01-02 12:28:13 +13:00
#[cfg(debug_assertions)]
2021-01-04 18:31:21 +13:00
dbg!(format!("{} - {}", url, e));
Err("Couldn't send request to Reddit")
2021-01-02 12:28:13 +13:00
}
2020-11-20 17:42:18 +13:00
}
2020-11-19 15:50:59 +13:00
}