redsunlib/src/utils.rs

735 lines
20 KiB
Rust
Raw Normal View History

2020-11-21 19:05:27 +13:00
//
// CRATES
//
2021-03-18 11:30:33 +13:00
use crate::{client::json, esc, server::RequestExt};
2021-01-01 18:03:44 +13:00
use askama::Template;
2021-03-18 11:30:33 +13:00
use cookie::Cookie;
use hyper::{Body, Request, Response};
2021-01-03 07:58:21 +13:00
use regex::Regex;
2021-03-18 11:30:33 +13:00
use serde_json::Value;
use std::collections::{HashMap, HashSet};
use std::str::FromStr;
2021-01-14 09:52:00 +13:00
use time::{Duration, OffsetDateTime};
2021-03-18 11:30:33 +13:00
use url::Url;
2020-11-21 19:05:27 +13:00
2021-01-13 10:43:03 +13:00
// Post flair with content, background color and foreground color
2021-01-14 09:52:00 +13:00
pub struct Flair {
pub flair_parts: Vec<FlairPart>,
2021-02-21 10:59:16 +13:00
pub text: String,
2021-01-13 10:43:03 +13:00
pub background_color: String,
pub foreground_color: String,
}
2021-01-15 12:13:52 +13:00
// Part of flair, either emoji or text
2021-01-14 09:52:00 +13:00
pub struct FlairPart {
pub flair_part_type: String,
2021-01-13 10:43:03 +13:00
pub value: String,
}
2021-02-25 18:29:23 +13:00
impl FlairPart {
pub fn parse(flair_type: &str, rich_flair: Option<&Vec<Value>>, text_flair: Option<&str>) -> Vec<Self> {
// Parse type of flair
match flair_type {
// If flair contains emojis and text
"richtext" => match rich_flair {
Some(rich) => rich
.iter()
// For each part of the flair, extract text and emojis
.map(|part| {
let value = |name: &str| part[name].as_str().unwrap_or_default();
Self {
flair_part_type: value("e").to_string(),
value: match value("e") {
2021-07-20 05:20:00 +12:00
"text" => esc!(value("t")),
2021-02-25 18:29:23 +13:00
"emoji" => format_url(value("u")),
_ => String::new(),
},
}
})
.collect::<Vec<Self>>(),
None => Vec::new(),
},
// If flair contains only text
"text" => match text_flair {
Some(text) => vec![Self {
flair_part_type: "text".to_string(),
2021-07-20 05:20:00 +12:00
value: esc!(text),
2021-02-25 18:29:23 +13:00
}],
None => Vec::new(),
},
_ => Vec::new(),
}
}
}
2021-01-17 12:02:24 +13:00
pub struct Author {
pub name: String,
pub flair: Flair,
pub distinguished: String,
}
2020-12-30 16:01:02 +13:00
// Post flags with nsfw and stickied
pub struct Flags {
pub nsfw: bool,
2021-01-01 12:54:13 +13:00
pub stickied: bool,
2020-12-30 16:01:02 +13:00
}
2020-11-18 08:37:40 +13:00
#[derive(Debug)]
2021-01-18 09:58:12 +13:00
pub struct Media {
pub url: String,
pub alt_url: String,
2021-01-18 09:58:12 +13:00
pub width: i64,
pub height: i64,
pub poster: String,
2021-01-18 09:58:12 +13:00
}
2021-02-25 18:29:23 +13:00
impl Media {
pub async fn parse(data: &Value) -> (String, Self, Vec<GalleryMedia>) {
let mut gallery = Vec::new();
// Define the various known places that Reddit might put video URLs.
let data_preview = &data["preview"]["reddit_video_preview"];
let secure_media = &data["secure_media"]["reddit_video"];
let crosspost_parent_media = &data["crosspost_parent_list"][0]["secure_media"]["reddit_video"];
2021-02-25 18:29:23 +13:00
// If post is a video, return the video
let (post_type, url_val, alt_url_val) = if data_preview["fallback_url"].is_string() {
(
if data_preview["is_gif"].as_bool().unwrap_or(false) {
"gif"
} else {
"video"
},
&data_preview["fallback_url"],
Some(&data_preview["hls_url"]),
)
} else if secure_media["fallback_url"].is_string() {
(
if secure_media["is_gif"].as_bool().unwrap_or(false) {
"gif"
} else {
"video"
},
&secure_media["fallback_url"],
Some(&secure_media["hls_url"]),
)
} else if crosspost_parent_media["fallback_url"].is_string() {
(
if crosspost_parent_media["is_gif"].as_bool().unwrap_or(false) {
"gif"
} else {
"video"
},
&crosspost_parent_media["fallback_url"],
Some(&crosspost_parent_media["hls_url"]),
)
2021-02-25 18:29:23 +13:00
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
// Handle images, whether GIFs or pics
let preview = &data["preview"]["images"][0];
let mp4 = &preview["variants"]["mp4"];
if mp4.is_object() {
// Return the mp4 if the media is a gif
("gif", &mp4["source"]["url"], None)
2021-02-25 18:29:23 +13:00
} else {
// Return the picture if the media is an image
if data["domain"] == "i.redd.it" {
("image", &data["url"], None)
2021-02-25 18:29:23 +13:00
} else {
("image", &preview["source"]["url"], None)
2021-02-25 18:29:23 +13:00
}
}
} else if data["is_self"].as_bool().unwrap_or_default() {
// If type is self, return permalink
("self", &data["permalink"], None)
2021-02-25 18:29:23 +13:00
} else if data["is_gallery"].as_bool().unwrap_or_default() {
// If this post contains a gallery of images
gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]);
("gallery", &data["url"], None)
2021-02-25 18:29:23 +13:00
} else {
// If type can't be determined, return url
("link", &data["url"], None)
2021-02-25 18:29:23 +13:00
};
let source = &data["preview"]["images"][0]["source"];
2021-02-26 08:43:58 +13:00
let url = if post_type == "self" || post_type == "link" {
url_val.as_str().unwrap_or_default().to_string()
} else {
2021-03-09 15:49:06 +13:00
format_url(url_val.as_str().unwrap_or_default())
2021-02-26 08:43:58 +13:00
};
let alt_url = alt_url_val.map_or(String::new(), |val| format_url(val.as_str().unwrap_or_default()));
2021-02-25 18:29:23 +13:00
(
post_type.to_string(),
Self {
2021-02-26 08:43:58 +13:00
url,
alt_url,
2021-02-25 18:29:23 +13:00
width: source["width"].as_i64().unwrap_or_default(),
height: source["height"].as_i64().unwrap_or_default(),
poster: format_url(source["url"].as_str().unwrap_or_default()),
},
gallery,
)
}
}
2021-02-07 09:05:11 +13:00
pub struct GalleryMedia {
pub url: String,
pub width: i64,
pub height: i64,
pub caption: String,
2021-02-08 14:33:54 +13:00
pub outbound_url: String,
2021-02-07 09:05:11 +13:00
}
2021-02-25 18:29:23 +13:00
impl GalleryMedia {
fn parse(items: &Value, metadata: &Value) -> Vec<Self> {
2021-02-26 06:07:45 +13:00
items
.as_array()
2021-02-25 18:29:23 +13:00
.unwrap_or(&Vec::new())
.iter()
.map(|item| {
// For each image in gallery
let media_id = item["media_id"].as_str().unwrap_or_default();
let image = &metadata[media_id]["s"];
// Construct gallery items
Self {
url: format_url(image["u"].as_str().unwrap_or_default()),
width: image["x"].as_i64().unwrap_or_default(),
height: image["y"].as_i64().unwrap_or_default(),
caption: item["caption"].as_str().unwrap_or_default().to_string(),
outbound_url: item["outbound_url"].as_str().unwrap_or_default().to_string(),
}
})
.collect::<Vec<Self>>()
}
}
2020-11-18 08:37:40 +13:00
// Post containing content, metadata and media
pub struct Post {
pub id: String,
2020-11-18 08:37:40 +13:00
pub title: String,
pub community: String,
pub body: String,
2021-01-17 12:02:24 +13:00
pub author: Author,
pub permalink: String,
pub score: (String, String),
pub upvote_ratio: i64,
pub post_type: String,
2020-12-23 15:29:43 +13:00
pub flair: Flair,
2020-12-30 16:01:02 +13:00
pub flags: Flags,
2021-01-18 09:58:12 +13:00
pub thumbnail: Media,
pub media: Media,
2021-01-12 11:08:12 +13:00
pub domain: String,
pub rel_time: String,
pub created: String,
pub comments: (String, String),
2021-02-07 09:05:11 +13:00
pub gallery: Vec<GalleryMedia>,
pub awards: Awards,
2020-11-18 08:37:40 +13:00
}
2021-02-25 18:29:23 +13:00
impl Post {
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
pub async fn fetch(path: &str, quarantine: bool) -> Result<(Vec<Self>, String), String> {
2021-02-25 18:29:23 +13:00
let res;
let post_list;
// Send a request to the url
match json(path.to_string(), quarantine).await {
2021-02-25 18:29:23 +13:00
// If success, receive JSON in response
Ok(response) => {
res = response;
}
// If the Reddit API returns an error, exit this function
Err(msg) => return Err(msg),
}
// Fetch the list of posts from the JSON response
match res["data"]["children"].as_array() {
Some(list) => post_list = list,
None => return Err("No posts found".to_string()),
}
let mut posts: Vec<Self> = Vec::new();
// For each post from posts list
for post in post_list {
let data = &post["data"];
2021-03-10 04:22:17 +13:00
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
2021-02-25 18:29:23 +13:00
let score = data["score"].as_i64().unwrap_or_default();
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
2021-03-12 17:15:26 +13:00
let title = esc!(post, "title");
2021-02-25 18:29:23 +13:00
// Determine the type of media along with the media URL
let (post_type, media, gallery) = Media::parse(&data).await;
let awards = Awards::parse(&data["all_awardings"]);
2021-02-25 18:29:23 +13:00
// selftext_html is set for text posts when browsing.
let mut body = rewrite_urls(&val(post, "selftext_html"));
if body == "" {
body = rewrite_urls(&val(post, "body_html"))
}
2021-02-25 18:29:23 +13:00
posts.push(Self {
id: val(post, "id"),
title,
2021-02-25 18:29:23 +13:00
community: val(post, "subreddit"),
body,
2021-02-25 18:29:23 +13:00
author: Author {
name: val(post, "author"),
flair: Flair {
flair_parts: FlairPart::parse(
data["author_flair_type"].as_str().unwrap_or_default(),
data["author_flair_richtext"].as_array(),
data["author_flair_text"].as_str(),
),
2021-03-12 17:15:26 +13:00
text: esc!(post, "link_flair_text"),
2021-02-25 18:29:23 +13:00
background_color: val(post, "author_flair_background_color"),
foreground_color: val(post, "author_flair_text_color"),
},
distinguished: val(post, "distinguished"),
},
score: if data["hide_score"].as_bool().unwrap_or_default() {
("\u{2022}".to_string(), "Hidden".to_string())
2021-02-25 18:29:23 +13:00
} else {
format_num(score)
},
upvote_ratio: ratio as i64,
post_type,
thumbnail: Media {
url: format_url(val(post, "thumbnail").as_str()),
alt_url: String::new(),
2021-02-25 18:29:23 +13:00
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
poster: "".to_string(),
},
media,
domain: val(post, "domain"),
flair: Flair {
flair_parts: FlairPart::parse(
data["link_flair_type"].as_str().unwrap_or_default(),
data["link_flair_richtext"].as_array(),
data["link_flair_text"].as_str(),
),
2021-03-12 17:15:26 +13:00
text: esc!(post, "link_flair_text"),
2021-02-25 18:29:23 +13:00
background_color: val(post, "link_flair_background_color"),
foreground_color: if val(post, "link_flair_text_color") == "dark" {
"black".to_string()
} else {
"white".to_string()
},
},
flags: Flags {
nsfw: data["over_18"].as_bool().unwrap_or_default(),
stickied: data["stickied"].as_bool().unwrap_or_default(),
},
permalink: val(post, "permalink"),
rel_time,
created,
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
gallery,
awards,
2021-02-25 18:29:23 +13:00
});
}
Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string()))
}
}
#[derive(Template)]
#[template(path = "comment.html", escape = "none")]
2020-11-18 08:37:40 +13:00
// Comment with content, post, score and data/time that it was posted
pub struct Comment {
pub id: String,
pub kind: String,
pub parent_id: String,
pub parent_kind: String,
pub post_link: String,
pub post_author: String,
2020-11-18 08:37:40 +13:00
pub body: String,
2021-01-17 12:02:24 +13:00
pub author: Author,
pub score: (String, String),
pub rel_time: String,
pub created: String,
2021-02-15 11:53:09 +13:00
pub edited: (String, String),
2020-12-20 16:54:46 +13:00
pub replies: Vec<Comment>,
pub highlighted: bool,
pub awards: Awards,
pub collapsed: bool,
pub is_filtered: bool,
2020-11-18 08:37:40 +13:00
}
#[derive(Default, Clone)]
pub struct Award {
pub name: String,
pub icon_url: String,
pub description: String,
pub count: i64,
}
impl std::fmt::Display for Award {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{} {} {}", self.name, self.icon_url, self.description)
}
}
pub struct Awards(pub Vec<Award>);
impl std::ops::Deref for Awards {
type Target = Vec<Award>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl std::fmt::Display for Awards {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
self.iter().fold(Ok(()), |result, award| result.and_then(|_| writeln!(f, "{}", award)))
}
}
// Convert Reddit awards JSON to Awards struct
impl Awards {
pub fn parse(items: &Value) -> Self {
let parsed = items.as_array().unwrap_or(&Vec::new()).iter().fold(Vec::new(), |mut awards, item| {
let name = item["name"].as_str().unwrap_or_default().to_string();
let icon_url = format_url(item["resized_icons"][0]["url"].as_str().unwrap_or_default());
let description = item["description"].as_str().unwrap_or_default().to_string();
let count: i64 = i64::from_str(&item["count"].to_string()).unwrap_or(1);
awards.push(Award {
name,
icon_url,
description,
count,
});
awards
});
Self(parsed)
}
}
2021-02-25 18:29:23 +13:00
#[derive(Template)]
#[template(path = "error.html", escape = "none")]
pub struct ErrorTemplate {
pub msg: String,
pub prefs: Preferences,
pub url: String,
2021-02-25 18:29:23 +13:00
}
2021-01-09 17:55:40 +13:00
#[derive(Default)]
2020-11-18 08:37:40 +13:00
// User struct containing metadata about user
pub struct User {
pub name: String,
pub title: String,
2020-11-18 08:37:40 +13:00
pub icon: String,
pub karma: i64,
2020-12-24 19:16:04 +13:00
pub created: String,
2020-11-18 08:37:40 +13:00
pub banner: String,
2020-11-30 15:50:29 +13:00
pub description: String,
2020-11-18 08:37:40 +13:00
}
2020-12-29 15:42:46 +13:00
#[derive(Default)]
2020-11-18 08:37:40 +13:00
// Subreddit struct containing metadata about community
pub struct Subreddit {
pub name: String,
pub title: String,
pub description: String,
2020-12-29 15:42:46 +13:00
pub info: String,
// pub moderators: Vec<String>,
2020-11-18 08:37:40 +13:00
pub icon: String,
pub members: (String, String),
pub active: (String, String),
2021-01-02 19:21:43 +13:00
pub wiki: bool,
2020-11-18 08:37:40 +13:00
}
2020-11-20 10:49:32 +13:00
// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
#[derive(serde::Deserialize)]
pub struct Params {
2020-12-30 14:11:47 +13:00
pub t: Option<String>,
2021-01-01 12:54:13 +13:00
pub q: Option<String>,
2020-11-20 10:49:32 +13:00
pub sort: Option<String>,
pub after: Option<String>,
2020-11-30 15:50:29 +13:00
pub before: Option<String>,
2020-11-20 10:49:32 +13:00
}
2021-01-11 15:15:34 +13:00
#[derive(Default)]
2021-01-09 14:35:04 +13:00
pub struct Preferences {
2021-01-11 15:15:34 +13:00
pub theme: String,
2021-01-09 17:55:40 +13:00
pub front_page: String,
2021-01-09 14:35:04 +13:00
pub layout: String,
2021-01-11 10:08:36 +13:00
pub wide: String,
2021-01-31 18:43:46 +13:00
pub show_nsfw: String,
pub hide_hls_notification: String,
pub use_hls: String,
2021-10-26 17:27:55 +13:00
pub autoplay_videos: String,
2021-01-09 14:50:03 +13:00
pub comment_sort: String,
pub post_sort: String,
pub subscriptions: Vec<String>,
pub filters: Vec<String>,
2021-01-09 14:35:04 +13:00
}
2021-02-25 18:29:23 +13:00
impl Preferences {
// Build preferences from cookies
2021-03-18 11:30:33 +13:00
pub fn new(req: Request<Body>) -> Self {
2021-02-25 18:29:23 +13:00
Self {
theme: setting(&req, "theme"),
front_page: setting(&req, "front_page"),
layout: setting(&req, "layout"),
wide: setting(&req, "wide"),
show_nsfw: setting(&req, "show_nsfw"),
use_hls: setting(&req, "use_hls"),
hide_hls_notification: setting(&req, "hide_hls_notification"),
2021-10-26 17:27:55 +13:00
autoplay_videos: setting(&req, "autoplay_videos"),
comment_sort: setting(&req, "comment_sort"),
post_sort: setting(&req, "post_sort"),
subscriptions: setting(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
filters: setting(&req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
2021-02-25 18:29:23 +13:00
}
}
}
/// Gets a `HashSet` of filters from the cookie in the given `Request`.
pub fn get_filters(req: &Request<Body>) -> HashSet<String> {
setting(&req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect::<HashSet<String>>()
}
/// Filters a `Vec<Post>` by the given `HashSet` of filters (each filter being a subreddit name or a user name). If a
/// `Post`'s subreddit or author is found in the filters, it is removed. Returns `true` if _all_ posts were filtered
/// out, or `false` otherwise.
pub fn filter_posts(posts: &mut Vec<Post>, filters: &HashSet<String>) -> bool {
if posts.is_empty() {
false
} else {
posts.retain(|p| !filters.contains(&p.community) && !filters.contains(&["u_", &p.author.name].concat()));
posts.is_empty()
}
}
2020-12-01 18:10:08 +13:00
//
2020-12-08 07:53:22 +13:00
// FORMATTING
2020-12-01 18:10:08 +13:00
//
2021-02-25 18:29:23 +13:00
// Grab a query parameter from a url
pub fn param(path: &str, value: &str) -> Option<String> {
Some(
Url::parse(format!("https://libredd.it/{}", path).as_str())
.ok()?
.query_pairs()
.into_owned()
.collect::<HashMap<_, _>>()
.get(value)?
.clone(),
)
2021-01-01 12:54:13 +13:00
}
// Retrieve the value of a setting by name
pub fn setting(req: &Request<Body>, name: &str) -> String {
// Parse a cookie value from request
req
.cookie(name)
.unwrap_or_else(|| {
// If there is no cookie for this setting, try receiving a default from an environment variable
if let Ok(default) = std::env::var(format!("LIBREDDIT_DEFAULT_{}", name.to_uppercase())) {
Cookie::new(name, default)
} else {
Cookie::named(name)
}
})
.value()
.to_string()
2021-01-06 15:04:49 +13:00
}
2021-01-03 17:50:23 +13:00
// Detect and redirect in the event of a random subreddit
pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> {
if sub == "random" || sub == "randnsfw" {
let new_sub = json(format!("/r/{}/about.json?raw_json=1", sub), false).await?["data"]["display_name"]
.as_str()
.unwrap_or_default()
.to_string();
Ok(redirect(format!("/r/{}{}", new_sub, additional)))
} else {
Err("No redirect needed".to_string())
}
}
2020-12-26 15:06:33 +13:00
// Direct urls to proxy if proxy is enabled
2021-01-12 14:47:14 +13:00
pub fn format_url(url: &str) -> String {
2021-01-09 14:35:04 +13:00
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
2021-01-05 16:26:41 +13:00
String::new()
} else {
2021-05-21 07:24:06 +12:00
Url::parse(url).map_or(String::new(), |parsed| {
let domain = parsed.domain().unwrap_or_default();
let capture = |regex: &str, format: &str, segments: i16| {
Regex::new(regex).map_or(String::new(), |re| {
re.captures(url).map_or(String::new(), |caps| match segments {
1 => [format, &caps[1]].join(""),
2 => [format, &caps[1], "/", &caps[2]].join(""),
_ => String::new(),
})
})
};
macro_rules! chain {
() => {
{
String::new()
}
};
2021-05-21 07:24:06 +12:00
( $first_fn:expr, $($other_fns:expr), *) => {
{
let result = $first_fn;
if result.is_empty() {
chain!($($other_fns,)*)
}
2021-05-21 07:24:06 +12:00
else
{
2021-05-21 07:24:06 +12:00
result
}
2021-05-21 07:24:06 +12:00
}
};
}
2021-05-21 07:24:06 +12:00
match domain {
"v.redd.it" => chain!(
capture(r"https://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$|\?source=fallback))", "/vid/", 2),
2021-05-21 07:24:06 +12:00
capture(r"https://v\.redd\.it/(.+)/(HLSPlaylist\.m3u8.*)$", "/hls/", 2)
),
"i.redd.it" => capture(r"https://i\.redd\.it/(.*)", "/img/", 1),
"a.thumbs.redditmedia.com" => capture(r"https://a\.thumbs\.redditmedia\.com/(.*)", "/thumb/a/", 1),
"b.thumbs.redditmedia.com" => capture(r"https://b\.thumbs\.redditmedia\.com/(.*)", "/thumb/b/", 1),
"emoji.redditmedia.com" => capture(r"https://emoji\.redditmedia\.com/(.*)/(.*)", "/emoji/", 2),
"preview.redd.it" => capture(r"https://preview\.redd\.it/(.*)", "/preview/pre/", 1),
"external-preview.redd.it" => capture(r"https://external\-preview\.redd\.it/(.*)", "/preview/external-pre/", 1),
"styles.redditmedia.com" => capture(r"https://styles\.redditmedia\.com/(.*)", "/style/", 1),
"www.redditstatic.com" => capture(r"https://www\.redditstatic\.com/(.*)", "/static/", 1),
_ => String::new(),
}
2021-05-21 07:24:06 +12:00
})
2021-01-05 16:26:41 +13:00
}
2020-12-01 18:10:08 +13:00
}
2021-01-03 07:58:21 +13:00
// Rewrite Reddit links to Libreddit in body of text
pub fn rewrite_urls(input_text: &str) -> String {
2021-09-10 12:28:55 +12:00
let text1 =
Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|)(reddit\.com|redd\.it)/"#).map_or(String::new(), |re| re.replace_all(input_text, r#"href="/"#).to_string());
// Rewrite external media previews to Libreddit
2021-05-21 07:24:06 +12:00
Regex::new(r"https://external-preview\.redd\.it(.*)[^?]").map_or(String::new(), |re| {
if re.is_match(&text1) {
re.replace_all(&text1, format_url(re.find(&text1).map(|x| x.as_str()).unwrap_or_default())).to_string()
} else {
text1
2021-05-09 17:09:47 +12:00
}
2021-05-21 07:24:06 +12:00
})
2021-01-03 07:58:21 +13:00
}
// Format vote count to a string that will be displayed.
// Append `m` and `k` for millions and thousands respectively, and
// round to the nearest tenth.
pub fn format_num(num: i64) -> (String, String) {
let truncated = if num >= 1_000_000 || num <= -1_000_000 {
format!("{:.1}m", num as f64 / 1_000_000.0)
2021-03-20 18:04:44 +13:00
} else if num >= 1000 || num <= -1000 {
format!("{:.1}k", num as f64 / 1_000.0)
2020-12-08 08:36:05 +13:00
} else {
num.to_string()
};
(truncated, num.to_string())
2020-12-08 07:53:22 +13:00
}
2021-02-25 18:29:23 +13:00
// Parse a relative and absolute time from a UNIX timestamp
2021-03-10 04:22:17 +13:00
pub fn time(created: f64) -> (String, String) {
let time = OffsetDateTime::from_unix_timestamp(created.round() as i64);
2021-01-13 07:59:32 +13:00
let time_delta = OffsetDateTime::now_utc() - time;
2021-01-15 12:13:52 +13:00
// If the time difference is more than a month, show full date
let rel_time = if time_delta > Duration::days(30) {
2021-01-15 12:13:52 +13:00
time.format("%b %d '%y")
// Otherwise, show relative date/time
2021-01-14 13:31:24 +13:00
} else if time_delta.whole_days() > 0 {
format!("{}d ago", time_delta.whole_days())
2021-01-13 07:59:32 +13:00
} else if time_delta.whole_hours() > 0 {
format!("{}h ago", time_delta.whole_hours())
} else {
format!("{}m ago", time_delta.whole_minutes())
};
2021-02-15 11:53:09 +13:00
(rel_time, time.format("%b %d %Y, %H:%M:%S UTC"))
2021-01-13 07:59:32 +13:00
}
2020-11-18 08:37:40 +13:00
// val() function used to parse JSON from Reddit APIs
2021-01-15 06:53:54 +13:00
pub fn val(j: &Value, k: &str) -> String {
2021-01-22 18:25:51 +13:00
j["data"][k].as_str().unwrap_or_default().to_string()
2020-11-18 08:37:40 +13:00
}
// Escape < and > to accurately render HTML
2021-03-12 17:15:26 +13:00
#[macro_export]
macro_rules! esc {
($f:expr) => {
$f.replace('&', "&amp;").replace('<', "&lt;").replace('>', "&gt;")
2021-03-12 17:15:26 +13:00
};
($j:expr, $k:expr) => {
$j["data"][$k].as_str().unwrap_or_default().to_string().replace('<', "&lt;").replace('>', "&gt;")
};
}
2020-11-20 17:42:18 +13:00
//
// NETWORKING
//
2021-03-18 11:30:33 +13:00
pub fn template(t: impl Template) -> Result<Response<Body>, String> {
Ok(
Response::builder()
.status(200)
.header("content-type", "text/html")
.body(t.render().unwrap_or_default().into())
.unwrap_or_default(),
)
2021-02-14 12:02:38 +13:00
}
2021-03-18 11:30:33 +13:00
pub fn redirect(path: String) -> Response<Body> {
Response::builder()
.status(302)
.header("content-type", "text/html")
2021-02-14 12:02:38 +13:00
.header("Location", &path)
2021-03-18 11:30:33 +13:00
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path).into())
.unwrap_or_default()
}
2021-03-18 11:30:33 +13:00
pub async fn error(req: Request<Body>, msg: String) -> Result<Response<Body>, String> {
let url = req.uri().to_string();
2021-02-26 06:07:45 +13:00
let body = ErrorTemplate {
msg,
prefs: Preferences::new(req),
2021-11-15 15:51:36 +13:00
url,
2021-02-26 06:07:45 +13:00
}
.render()
.unwrap_or_default();
2021-03-18 11:30:33 +13:00
Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
2021-01-01 18:03:44 +13:00
}
#[cfg(test)]
mod tests {
use super::format_num;
2021-11-24 19:23:29 +13:00
#[test]
fn format_num_works() {
assert_eq!(format_num(567), ("567".to_string(), "567".to_string()));
assert_eq!(format_num(1234), ("1.2k".to_string(), "1234".to_string()));
assert_eq!(format_num(1999), ("2.0k".to_string(), "1999".to_string()));
assert_eq!(format_num(1001), ("1.0k".to_string(), "1001".to_string()));
assert_eq!(format_num(1_999_999), ("2.0m".to_string(), "1999999".to_string()));
}
}