2023-06-04 05:32:46 +12:00
|
|
|
use crate::config::get_setting;
|
2020-11-21 19:05:27 +13:00
|
|
|
//
|
|
|
|
// CRATES
|
|
|
|
//
|
2022-05-21 17:28:31 +12:00
|
|
|
use crate::{client::json, server::RequestExt};
|
2021-01-01 18:03:44 +13:00
|
|
|
use askama::Template;
|
2021-03-18 11:30:33 +13:00
|
|
|
use cookie::Cookie;
|
|
|
|
use hyper::{Body, Request, Response};
|
2023-05-01 15:22:10 +12:00
|
|
|
use once_cell::sync::Lazy;
|
2021-01-03 07:58:21 +13:00
|
|
|
use regex::Regex;
|
2022-05-21 14:20:44 +12:00
|
|
|
use rust_embed::RustEmbed;
|
2021-03-18 11:30:33 +13:00
|
|
|
use serde_json::Value;
|
2021-11-26 17:02:04 +13:00
|
|
|
use std::collections::{HashMap, HashSet};
|
2023-01-03 22:39:45 +13:00
|
|
|
use std::env;
|
2021-11-25 15:08:27 +13:00
|
|
|
use std::str::FromStr;
|
2022-05-21 14:20:44 +12:00
|
|
|
use time::{macros::format_description, Duration, OffsetDateTime};
|
2021-03-18 11:30:33 +13:00
|
|
|
use url::Url;
|
2020-11-21 19:05:27 +13:00
|
|
|
|
2022-11-04 17:04:34 +13:00
|
|
|
/// Write a message to stderr on debug mode. This function is a no-op on
|
|
|
|
/// release code.
|
|
|
|
#[macro_export]
|
|
|
|
macro_rules! dbg_msg {
|
|
|
|
($x:expr) => {
|
|
|
|
#[cfg(debug_assertions)]
|
|
|
|
eprintln!("{}:{}: {}", file!(), line!(), $x.to_string())
|
|
|
|
};
|
|
|
|
|
|
|
|
($($x:expr),+) => {
|
|
|
|
#[cfg(debug_assertions)]
|
|
|
|
dbg_msg!(format!($($x),+))
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2023-01-03 22:39:45 +13:00
|
|
|
/// Identifies whether or not the page is a subreddit, a user page, or a post.
|
|
|
|
/// This is used by the NSFW landing template to determine the mesage to convey
|
|
|
|
/// to the user.
|
|
|
|
#[derive(PartialEq, Eq)]
|
|
|
|
pub enum ResourceType {
|
|
|
|
Subreddit,
|
|
|
|
User,
|
|
|
|
Post,
|
|
|
|
}
|
|
|
|
|
2021-01-13 10:43:03 +13:00
|
|
|
// Post flair with content, background color and foreground color
|
2021-01-14 09:52:00 +13:00
|
|
|
pub struct Flair {
|
|
|
|
pub flair_parts: Vec<FlairPart>,
|
2021-02-21 10:59:16 +13:00
|
|
|
pub text: String,
|
2021-01-13 10:43:03 +13:00
|
|
|
pub background_color: String,
|
|
|
|
pub foreground_color: String,
|
|
|
|
}
|
|
|
|
|
2021-01-15 12:13:52 +13:00
|
|
|
// Part of flair, either emoji or text
|
2021-12-27 18:18:20 +13:00
|
|
|
#[derive(Clone)]
|
2021-01-14 09:52:00 +13:00
|
|
|
pub struct FlairPart {
|
|
|
|
pub flair_part_type: String,
|
2021-01-13 10:43:03 +13:00
|
|
|
pub value: String,
|
|
|
|
}
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
impl FlairPart {
|
|
|
|
pub fn parse(flair_type: &str, rich_flair: Option<&Vec<Value>>, text_flair: Option<&str>) -> Vec<Self> {
|
|
|
|
// Parse type of flair
|
|
|
|
match flair_type {
|
|
|
|
// If flair contains emojis and text
|
|
|
|
"richtext" => match rich_flair {
|
|
|
|
Some(rich) => rich
|
|
|
|
.iter()
|
|
|
|
// For each part of the flair, extract text and emojis
|
|
|
|
.map(|part| {
|
|
|
|
let value = |name: &str| part[name].as_str().unwrap_or_default();
|
|
|
|
Self {
|
|
|
|
flair_part_type: value("e").to_string(),
|
|
|
|
value: match value("e") {
|
2022-05-21 17:28:31 +12:00
|
|
|
"text" => value("t").to_string(),
|
2021-02-25 18:29:23 +13:00
|
|
|
"emoji" => format_url(value("u")),
|
|
|
|
_ => String::new(),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect::<Vec<Self>>(),
|
|
|
|
None => Vec::new(),
|
|
|
|
},
|
|
|
|
// If flair contains only text
|
|
|
|
"text" => match text_flair {
|
|
|
|
Some(text) => vec![Self {
|
|
|
|
flair_part_type: "text".to_string(),
|
2022-05-21 17:28:31 +12:00
|
|
|
value: text.to_string(),
|
2021-02-25 18:29:23 +13:00
|
|
|
}],
|
|
|
|
None => Vec::new(),
|
|
|
|
},
|
|
|
|
_ => Vec::new(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-17 12:02:24 +13:00
|
|
|
pub struct Author {
|
|
|
|
pub name: String,
|
|
|
|
pub flair: Flair,
|
|
|
|
pub distinguished: String,
|
|
|
|
}
|
|
|
|
|
2023-03-24 01:21:09 +13:00
|
|
|
pub struct Poll {
|
|
|
|
pub poll_options: Vec<PollOption>,
|
|
|
|
pub voting_end_timestamp: (String, String),
|
|
|
|
pub total_vote_count: u64,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Poll {
|
|
|
|
pub fn parse(poll_data: &Value) -> Option<Self> {
|
2023-04-02 01:26:04 +13:00
|
|
|
poll_data.as_object()?;
|
2023-04-18 10:00:41 +12:00
|
|
|
|
2023-03-24 01:21:09 +13:00
|
|
|
let total_vote_count = poll_data["total_vote_count"].as_u64()?;
|
|
|
|
// voting_end_timestamp is in the format of milliseconds
|
|
|
|
let voting_end_timestamp = time(poll_data["voting_end_timestamp"].as_f64()? / 1000.0);
|
2023-04-02 01:26:04 +13:00
|
|
|
let poll_options = PollOption::parse(&poll_data["options"])?;
|
2023-03-24 01:21:09 +13:00
|
|
|
|
|
|
|
Some(Self {
|
|
|
|
poll_options,
|
|
|
|
total_vote_count,
|
2023-04-18 10:00:41 +12:00
|
|
|
voting_end_timestamp,
|
2023-03-24 01:21:09 +13:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn most_votes(&self) -> u64 {
|
2023-04-08 20:41:12 +12:00
|
|
|
self.poll_options.iter().filter_map(|o| o.vote_count).max().unwrap_or(0)
|
2023-03-24 01:21:09 +13:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct PollOption {
|
|
|
|
pub id: u64,
|
|
|
|
pub text: String,
|
2023-04-18 10:00:41 +12:00
|
|
|
pub vote_count: Option<u64>,
|
2023-03-24 01:21:09 +13:00
|
|
|
}
|
|
|
|
|
|
|
|
impl PollOption {
|
2023-04-02 01:26:04 +13:00
|
|
|
pub fn parse(options: &Value) -> Option<Vec<Self>> {
|
2023-04-18 10:00:41 +12:00
|
|
|
Some(
|
|
|
|
options
|
|
|
|
.as_array()?
|
|
|
|
.iter()
|
|
|
|
.filter_map(|option| {
|
|
|
|
// For each poll option
|
|
|
|
|
|
|
|
// we can't just use as_u64() because "id": String("...") and serde would parse it as None
|
|
|
|
let id = option["id"].as_str()?.parse::<u64>().ok()?;
|
|
|
|
let text = option["text"].as_str()?.to_owned();
|
|
|
|
let vote_count = option["vote_count"].as_u64();
|
|
|
|
|
|
|
|
// Construct PollOption items
|
|
|
|
Some(Self { id, text, vote_count })
|
|
|
|
})
|
|
|
|
.collect::<Vec<Self>>(),
|
|
|
|
)
|
2023-03-24 01:21:09 +13:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-30 16:01:02 +13:00
|
|
|
// Post flags with nsfw and stickied
|
|
|
|
pub struct Flags {
|
|
|
|
pub nsfw: bool,
|
2021-01-01 12:54:13 +13:00
|
|
|
pub stickied: bool,
|
2020-12-30 16:01:02 +13:00
|
|
|
}
|
2020-11-18 08:37:40 +13:00
|
|
|
|
2021-12-20 14:07:20 +13:00
|
|
|
#[derive(Debug)]
|
2021-01-18 09:58:12 +13:00
|
|
|
pub struct Media {
|
|
|
|
pub url: String,
|
2021-05-10 13:25:52 +12:00
|
|
|
pub alt_url: String,
|
2021-01-18 09:58:12 +13:00
|
|
|
pub width: i64,
|
|
|
|
pub height: i64,
|
2021-02-08 13:22:14 +13:00
|
|
|
pub poster: String,
|
2021-01-18 09:58:12 +13:00
|
|
|
}
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
impl Media {
|
|
|
|
pub async fn parse(data: &Value) -> (String, Self, Vec<GalleryMedia>) {
|
|
|
|
let mut gallery = Vec::new();
|
|
|
|
|
2021-12-20 14:07:20 +13:00
|
|
|
// Define the various known places that Reddit might put video URLs.
|
|
|
|
let data_preview = &data["preview"]["reddit_video_preview"];
|
|
|
|
let secure_media = &data["secure_media"]["reddit_video"];
|
|
|
|
let crosspost_parent_media = &data["crosspost_parent_list"][0]["secure_media"]["reddit_video"];
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
// If post is a video, return the video
|
2021-12-20 14:07:20 +13:00
|
|
|
let (post_type, url_val, alt_url_val) = if data_preview["fallback_url"].is_string() {
|
|
|
|
(
|
2021-12-28 07:15:25 +13:00
|
|
|
if data_preview["is_gif"].as_bool().unwrap_or(false) { "gif" } else { "video" },
|
2021-12-20 14:07:20 +13:00
|
|
|
&data_preview["fallback_url"],
|
|
|
|
Some(&data_preview["hls_url"]),
|
|
|
|
)
|
|
|
|
} else if secure_media["fallback_url"].is_string() {
|
2021-05-10 13:25:52 +12:00
|
|
|
(
|
2021-12-28 07:15:25 +13:00
|
|
|
if secure_media["is_gif"].as_bool().unwrap_or(false) { "gif" } else { "video" },
|
2021-12-20 14:07:20 +13:00
|
|
|
&secure_media["fallback_url"],
|
|
|
|
Some(&secure_media["hls_url"]),
|
2021-05-10 13:25:52 +12:00
|
|
|
)
|
2021-12-20 14:07:20 +13:00
|
|
|
} else if crosspost_parent_media["fallback_url"].is_string() {
|
2021-05-10 13:25:52 +12:00
|
|
|
(
|
2021-12-28 07:15:25 +13:00
|
|
|
if crosspost_parent_media["is_gif"].as_bool().unwrap_or(false) { "gif" } else { "video" },
|
2021-12-20 14:07:20 +13:00
|
|
|
&crosspost_parent_media["fallback_url"],
|
|
|
|
Some(&crosspost_parent_media["hls_url"]),
|
2021-05-10 13:25:52 +12:00
|
|
|
)
|
2021-02-25 18:29:23 +13:00
|
|
|
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
|
|
|
// Handle images, whether GIFs or pics
|
|
|
|
let preview = &data["preview"]["images"][0];
|
|
|
|
let mp4 = &preview["variants"]["mp4"];
|
|
|
|
|
|
|
|
if mp4.is_object() {
|
|
|
|
// Return the mp4 if the media is a gif
|
2021-05-10 13:25:52 +12:00
|
|
|
("gif", &mp4["source"]["url"], None)
|
2021-02-25 18:29:23 +13:00
|
|
|
} else {
|
|
|
|
// Return the picture if the media is an image
|
|
|
|
if data["domain"] == "i.redd.it" {
|
2021-05-10 13:25:52 +12:00
|
|
|
("image", &data["url"], None)
|
2021-02-25 18:29:23 +13:00
|
|
|
} else {
|
2021-05-10 13:25:52 +12:00
|
|
|
("image", &preview["source"]["url"], None)
|
2021-02-25 18:29:23 +13:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if data["is_self"].as_bool().unwrap_or_default() {
|
|
|
|
// If type is self, return permalink
|
2021-05-10 13:25:52 +12:00
|
|
|
("self", &data["permalink"], None)
|
2021-02-25 18:29:23 +13:00
|
|
|
} else if data["is_gallery"].as_bool().unwrap_or_default() {
|
|
|
|
// If this post contains a gallery of images
|
|
|
|
gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]);
|
|
|
|
|
2021-05-10 13:25:52 +12:00
|
|
|
("gallery", &data["url"], None)
|
2023-03-04 01:50:05 +13:00
|
|
|
} else if data["is_reddit_media_domain"].as_bool().unwrap_or_default() && data["domain"] == "i.redd.it" {
|
|
|
|
// If this post contains a reddit media (image) URL.
|
|
|
|
("image", &data["url"], None)
|
2021-02-25 18:29:23 +13:00
|
|
|
} else {
|
|
|
|
// If type can't be determined, return url
|
2021-05-10 13:25:52 +12:00
|
|
|
("link", &data["url"], None)
|
2021-02-25 18:29:23 +13:00
|
|
|
};
|
|
|
|
|
|
|
|
let source = &data["preview"]["images"][0]["source"];
|
|
|
|
|
2021-05-10 13:25:52 +12:00
|
|
|
let alt_url = alt_url_val.map_or(String::new(), |val| format_url(val.as_str().unwrap_or_default()));
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
(
|
|
|
|
post_type.to_string(),
|
|
|
|
Self {
|
2021-12-27 18:18:20 +13:00
|
|
|
url: format_url(url_val.as_str().unwrap_or_default()),
|
2021-05-10 13:25:52 +12:00
|
|
|
alt_url,
|
2023-03-04 01:50:05 +13:00
|
|
|
// Note: in the data["is_reddit_media_domain"] path above
|
|
|
|
// width and height will be 0.
|
2021-02-25 18:29:23 +13:00
|
|
|
width: source["width"].as_i64().unwrap_or_default(),
|
|
|
|
height: source["height"].as_i64().unwrap_or_default(),
|
|
|
|
poster: format_url(source["url"].as_str().unwrap_or_default()),
|
|
|
|
},
|
|
|
|
gallery,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-07 09:05:11 +13:00
|
|
|
pub struct GalleryMedia {
|
|
|
|
pub url: String,
|
|
|
|
pub width: i64,
|
|
|
|
pub height: i64,
|
|
|
|
pub caption: String,
|
2021-02-08 14:33:54 +13:00
|
|
|
pub outbound_url: String,
|
2021-02-07 09:05:11 +13:00
|
|
|
}
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
impl GalleryMedia {
|
|
|
|
fn parse(items: &Value, metadata: &Value) -> Vec<Self> {
|
2021-02-26 06:07:45 +13:00
|
|
|
items
|
|
|
|
.as_array()
|
2021-02-25 18:29:23 +13:00
|
|
|
.unwrap_or(&Vec::new())
|
|
|
|
.iter()
|
|
|
|
.map(|item| {
|
|
|
|
// For each image in gallery
|
|
|
|
let media_id = item["media_id"].as_str().unwrap_or_default();
|
|
|
|
let image = &metadata[media_id]["s"];
|
2023-03-09 17:04:26 +13:00
|
|
|
let image_type = &metadata[media_id]["m"];
|
|
|
|
|
|
|
|
let url = if image_type == "image/gif" {
|
|
|
|
image["gif"].as_str().unwrap_or_default()
|
|
|
|
} else {
|
|
|
|
image["u"].as_str().unwrap_or_default()
|
|
|
|
};
|
2021-02-25 18:29:23 +13:00
|
|
|
|
|
|
|
// Construct gallery items
|
|
|
|
Self {
|
2023-03-09 17:04:26 +13:00
|
|
|
url: format_url(url),
|
2021-02-25 18:29:23 +13:00
|
|
|
width: image["x"].as_i64().unwrap_or_default(),
|
|
|
|
height: image["y"].as_i64().unwrap_or_default(),
|
|
|
|
caption: item["caption"].as_str().unwrap_or_default().to_string(),
|
|
|
|
outbound_url: item["outbound_url"].as_str().unwrap_or_default().to_string(),
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect::<Vec<Self>>()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-18 08:37:40 +13:00
|
|
|
// Post containing content, metadata and media
|
|
|
|
pub struct Post {
|
2021-01-04 10:06:49 +13:00
|
|
|
pub id: String,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub title: String,
|
|
|
|
pub community: String,
|
|
|
|
pub body: String,
|
2021-01-17 12:02:24 +13:00
|
|
|
pub author: Author,
|
2021-01-04 10:06:49 +13:00
|
|
|
pub permalink: String,
|
2023-03-24 01:21:09 +13:00
|
|
|
pub poll: Option<Poll>,
|
2021-03-21 11:42:47 +13:00
|
|
|
pub score: (String, String),
|
2021-01-04 10:06:49 +13:00
|
|
|
pub upvote_ratio: i64,
|
2020-12-01 17:33:55 +13:00
|
|
|
pub post_type: String,
|
2020-12-23 15:29:43 +13:00
|
|
|
pub flair: Flair,
|
2020-12-30 16:01:02 +13:00
|
|
|
pub flags: Flags,
|
2021-01-18 09:58:12 +13:00
|
|
|
pub thumbnail: Media,
|
|
|
|
pub media: Media,
|
2021-01-12 11:08:12 +13:00
|
|
|
pub domain: String,
|
2021-01-17 08:40:32 +13:00
|
|
|
pub rel_time: String,
|
|
|
|
pub created: String,
|
2022-11-10 05:16:51 +13:00
|
|
|
pub num_duplicates: u64,
|
2021-03-21 11:42:47 +13:00
|
|
|
pub comments: (String, String),
|
2021-02-07 09:05:11 +13:00
|
|
|
pub gallery: Vec<GalleryMedia>,
|
2021-11-25 15:08:27 +13:00
|
|
|
pub awards: Awards,
|
2023-01-03 22:39:45 +13:00
|
|
|
pub nsfw: bool,
|
2020-11-18 08:37:40 +13:00
|
|
|
}
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
impl Post {
|
|
|
|
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
|
2021-11-26 17:02:04 +13:00
|
|
|
pub async fn fetch(path: &str, quarantine: bool) -> Result<(Vec<Self>, String), String> {
|
2021-02-25 18:29:23 +13:00
|
|
|
// Send a request to the url
|
2022-05-21 14:20:44 +12:00
|
|
|
let res = match json(path.to_string(), quarantine).await {
|
2021-02-25 18:29:23 +13:00
|
|
|
// If success, receive JSON in response
|
2022-05-21 14:20:44 +12:00
|
|
|
Ok(response) => response,
|
2021-02-25 18:29:23 +13:00
|
|
|
// If the Reddit API returns an error, exit this function
|
|
|
|
Err(msg) => return Err(msg),
|
2022-05-21 14:20:44 +12:00
|
|
|
};
|
2021-02-25 18:29:23 +13:00
|
|
|
|
|
|
|
// Fetch the list of posts from the JSON response
|
2022-05-21 14:20:44 +12:00
|
|
|
let post_list = match res["data"]["children"].as_array() {
|
|
|
|
Some(list) => list,
|
2021-02-25 18:29:23 +13:00
|
|
|
None => return Err("No posts found".to_string()),
|
2022-05-21 14:20:44 +12:00
|
|
|
};
|
2021-02-25 18:29:23 +13:00
|
|
|
|
|
|
|
let mut posts: Vec<Self> = Vec::new();
|
|
|
|
|
|
|
|
// For each post from posts list
|
|
|
|
for post in post_list {
|
|
|
|
let data = &post["data"];
|
|
|
|
|
2021-03-10 04:22:17 +13:00
|
|
|
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
|
2021-02-25 18:29:23 +13:00
|
|
|
let score = data["score"].as_i64().unwrap_or_default();
|
|
|
|
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
2022-05-21 17:28:31 +12:00
|
|
|
let title = val(post, "title");
|
2021-02-25 18:29:23 +13:00
|
|
|
|
|
|
|
// Determine the type of media along with the media URL
|
2021-12-27 18:18:20 +13:00
|
|
|
let (post_type, media, gallery) = Media::parse(data).await;
|
2021-11-25 15:08:27 +13:00
|
|
|
let awards = Awards::parse(&data["all_awardings"]);
|
2021-02-25 18:29:23 +13:00
|
|
|
|
2021-11-22 12:17:52 +13:00
|
|
|
// selftext_html is set for text posts when browsing.
|
|
|
|
let mut body = rewrite_urls(&val(post, "selftext_html"));
|
2021-12-27 18:18:20 +13:00
|
|
|
if body.is_empty() {
|
|
|
|
body = rewrite_urls(&val(post, "body_html"));
|
2021-11-21 10:13:50 +13:00
|
|
|
}
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
posts.push(Self {
|
|
|
|
id: val(post, "id"),
|
2021-11-26 17:02:04 +13:00
|
|
|
title,
|
2021-02-25 18:29:23 +13:00
|
|
|
community: val(post, "subreddit"),
|
2021-11-21 10:13:50 +13:00
|
|
|
body,
|
2021-02-25 18:29:23 +13:00
|
|
|
author: Author {
|
|
|
|
name: val(post, "author"),
|
|
|
|
flair: Flair {
|
|
|
|
flair_parts: FlairPart::parse(
|
|
|
|
data["author_flair_type"].as_str().unwrap_or_default(),
|
|
|
|
data["author_flair_richtext"].as_array(),
|
|
|
|
data["author_flair_text"].as_str(),
|
|
|
|
),
|
2022-05-21 17:28:31 +12:00
|
|
|
text: val(post, "link_flair_text"),
|
2021-02-25 18:29:23 +13:00
|
|
|
background_color: val(post, "author_flair_background_color"),
|
|
|
|
foreground_color: val(post, "author_flair_text_color"),
|
|
|
|
},
|
|
|
|
distinguished: val(post, "distinguished"),
|
|
|
|
},
|
|
|
|
score: if data["hide_score"].as_bool().unwrap_or_default() {
|
2021-03-21 11:42:47 +13:00
|
|
|
("\u{2022}".to_string(), "Hidden".to_string())
|
2021-02-25 18:29:23 +13:00
|
|
|
} else {
|
|
|
|
format_num(score)
|
|
|
|
},
|
|
|
|
upvote_ratio: ratio as i64,
|
|
|
|
post_type,
|
|
|
|
thumbnail: Media {
|
|
|
|
url: format_url(val(post, "thumbnail").as_str()),
|
2021-05-10 13:25:52 +12:00
|
|
|
alt_url: String::new(),
|
2021-02-25 18:29:23 +13:00
|
|
|
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
|
|
|
|
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
|
|
|
|
poster: "".to_string(),
|
|
|
|
},
|
|
|
|
media,
|
|
|
|
domain: val(post, "domain"),
|
|
|
|
flair: Flair {
|
|
|
|
flair_parts: FlairPart::parse(
|
|
|
|
data["link_flair_type"].as_str().unwrap_or_default(),
|
|
|
|
data["link_flair_richtext"].as_array(),
|
|
|
|
data["link_flair_text"].as_str(),
|
|
|
|
),
|
2022-05-21 17:28:31 +12:00
|
|
|
text: val(post, "link_flair_text"),
|
2021-02-25 18:29:23 +13:00
|
|
|
background_color: val(post, "link_flair_background_color"),
|
|
|
|
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
|
|
|
"black".to_string()
|
|
|
|
} else {
|
|
|
|
"white".to_string()
|
|
|
|
},
|
|
|
|
},
|
|
|
|
flags: Flags {
|
|
|
|
nsfw: data["over_18"].as_bool().unwrap_or_default(),
|
2022-11-10 05:16:51 +13:00
|
|
|
stickied: data["stickied"].as_bool().unwrap_or_default() || data["pinned"].as_bool().unwrap_or_default(),
|
2021-02-25 18:29:23 +13:00
|
|
|
},
|
|
|
|
permalink: val(post, "permalink"),
|
2023-03-24 01:21:09 +13:00
|
|
|
poll: Poll::parse(&data["poll_data"]),
|
2021-02-25 18:29:23 +13:00
|
|
|
rel_time,
|
|
|
|
created,
|
2022-11-10 05:16:51 +13:00
|
|
|
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
|
2021-02-25 18:29:23 +13:00
|
|
|
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
|
|
|
|
gallery,
|
2021-11-25 15:08:27 +13:00
|
|
|
awards,
|
2023-01-03 22:39:45 +13:00
|
|
|
nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(),
|
2021-02-25 18:29:23 +13:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-11 07:48:51 +13:00
|
|
|
#[derive(Template)]
|
2022-05-21 17:28:31 +12:00
|
|
|
#[template(path = "comment.html")]
|
2020-11-18 08:37:40 +13:00
|
|
|
// Comment with content, post, score and data/time that it was posted
|
|
|
|
pub struct Comment {
|
2020-12-21 17:52:15 +13:00
|
|
|
pub id: String,
|
2021-02-11 07:48:51 +13:00
|
|
|
pub kind: String,
|
2021-02-13 06:16:59 +13:00
|
|
|
pub parent_id: String,
|
|
|
|
pub parent_kind: String,
|
2021-02-11 07:48:51 +13:00
|
|
|
pub post_link: String,
|
|
|
|
pub post_author: String,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub body: String,
|
2021-01-17 12:02:24 +13:00
|
|
|
pub author: Author,
|
2021-03-21 11:42:47 +13:00
|
|
|
pub score: (String, String),
|
2021-01-17 08:40:32 +13:00
|
|
|
pub rel_time: String,
|
|
|
|
pub created: String,
|
2021-02-15 11:53:09 +13:00
|
|
|
pub edited: (String, String),
|
2020-12-20 16:54:46 +13:00
|
|
|
pub replies: Vec<Comment>,
|
2021-02-13 06:16:59 +13:00
|
|
|
pub highlighted: bool,
|
2021-11-25 15:08:27 +13:00
|
|
|
pub awards: Awards,
|
2021-11-19 18:42:53 +13:00
|
|
|
pub collapsed: bool,
|
2021-11-26 17:02:04 +13:00
|
|
|
pub is_filtered: bool,
|
2023-03-09 17:30:41 +13:00
|
|
|
pub more_count: i64,
|
2023-01-02 15:39:38 +13:00
|
|
|
pub prefs: Preferences,
|
2020-11-18 08:37:40 +13:00
|
|
|
}
|
|
|
|
|
2021-11-25 15:08:27 +13:00
|
|
|
#[derive(Default, Clone)]
|
|
|
|
pub struct Award {
|
|
|
|
pub name: String,
|
|
|
|
pub icon_url: String,
|
|
|
|
pub description: String,
|
|
|
|
pub count: i64,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl std::fmt::Display for Award {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
|
|
write!(f, "{} {} {}", self.name, self.icon_url, self.description)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct Awards(pub Vec<Award>);
|
|
|
|
|
|
|
|
impl std::ops::Deref for Awards {
|
|
|
|
type Target = Vec<Award>;
|
|
|
|
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl std::fmt::Display for Awards {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
|
|
self.iter().fold(Ok(()), |result, award| result.and_then(|_| writeln!(f, "{}", award)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Convert Reddit awards JSON to Awards struct
|
|
|
|
impl Awards {
|
|
|
|
pub fn parse(items: &Value) -> Self {
|
|
|
|
let parsed = items.as_array().unwrap_or(&Vec::new()).iter().fold(Vec::new(), |mut awards, item| {
|
|
|
|
let name = item["name"].as_str().unwrap_or_default().to_string();
|
2021-11-29 11:47:50 +13:00
|
|
|
let icon_url = format_url(item["resized_icons"][0]["url"].as_str().unwrap_or_default());
|
2021-11-25 15:08:27 +13:00
|
|
|
let description = item["description"].as_str().unwrap_or_default().to_string();
|
|
|
|
let count: i64 = i64::from_str(&item["count"].to_string()).unwrap_or(1);
|
|
|
|
|
|
|
|
awards.push(Award {
|
|
|
|
name,
|
|
|
|
icon_url,
|
|
|
|
description,
|
|
|
|
count,
|
|
|
|
});
|
|
|
|
|
|
|
|
awards
|
|
|
|
});
|
|
|
|
|
|
|
|
Self(parsed)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
#[derive(Template)]
|
2022-05-21 17:28:31 +12:00
|
|
|
#[template(path = "error.html")]
|
2021-02-25 18:29:23 +13:00
|
|
|
pub struct ErrorTemplate {
|
|
|
|
pub msg: String,
|
|
|
|
pub prefs: Preferences,
|
2021-11-15 15:39:33 +13:00
|
|
|
pub url: String,
|
2021-02-25 18:29:23 +13:00
|
|
|
}
|
|
|
|
|
2023-01-03 22:39:45 +13:00
|
|
|
/// Template for NSFW landing page. The landing page is displayed when a page's
|
|
|
|
/// content is wholly NSFW, but a user has not enabled the option to view NSFW
|
|
|
|
/// posts.
|
|
|
|
#[derive(Template)]
|
|
|
|
#[template(path = "nsfwlanding.html")]
|
|
|
|
pub struct NSFWLandingTemplate {
|
|
|
|
/// Identifier for the resource. This is either a subreddit name or a
|
|
|
|
/// username. (In the case of the latter, set is_user to true.)
|
|
|
|
pub res: String,
|
|
|
|
|
|
|
|
/// Identifies whether or not the resource is a subreddit, a user page,
|
|
|
|
/// or a post.
|
|
|
|
pub res_type: ResourceType,
|
|
|
|
|
|
|
|
/// User preferences.
|
|
|
|
pub prefs: Preferences,
|
|
|
|
|
|
|
|
/// Request URL.
|
|
|
|
pub url: String,
|
|
|
|
}
|
|
|
|
|
2021-01-09 17:55:40 +13:00
|
|
|
#[derive(Default)]
|
2020-11-18 08:37:40 +13:00
|
|
|
// User struct containing metadata about user
|
|
|
|
pub struct User {
|
|
|
|
pub name: String,
|
2021-01-01 17:21:56 +13:00
|
|
|
pub title: String,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub icon: String,
|
|
|
|
pub karma: i64,
|
2020-12-24 19:16:04 +13:00
|
|
|
pub created: String,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub banner: String,
|
2020-11-30 15:50:29 +13:00
|
|
|
pub description: String,
|
2023-01-03 22:39:45 +13:00
|
|
|
pub nsfw: bool,
|
2020-11-18 08:37:40 +13:00
|
|
|
}
|
|
|
|
|
2020-12-29 15:42:46 +13:00
|
|
|
#[derive(Default)]
|
2020-11-18 08:37:40 +13:00
|
|
|
// Subreddit struct containing metadata about community
|
|
|
|
pub struct Subreddit {
|
|
|
|
pub name: String,
|
|
|
|
pub title: String,
|
|
|
|
pub description: String,
|
2020-12-29 15:42:46 +13:00
|
|
|
pub info: String,
|
2021-06-12 06:03:36 +12:00
|
|
|
// pub moderators: Vec<String>,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub icon: String,
|
2021-03-21 11:42:47 +13:00
|
|
|
pub members: (String, String),
|
|
|
|
pub active: (String, String),
|
2021-01-02 19:21:43 +13:00
|
|
|
pub wiki: bool,
|
2023-01-03 22:39:45 +13:00
|
|
|
pub nsfw: bool,
|
2020-11-18 08:37:40 +13:00
|
|
|
}
|
|
|
|
|
2020-11-20 10:49:32 +13:00
|
|
|
// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
|
|
|
|
#[derive(serde::Deserialize)]
|
|
|
|
pub struct Params {
|
2020-12-30 14:11:47 +13:00
|
|
|
pub t: Option<String>,
|
2021-01-01 12:54:13 +13:00
|
|
|
pub q: Option<String>,
|
2020-11-20 10:49:32 +13:00
|
|
|
pub sort: Option<String>,
|
|
|
|
pub after: Option<String>,
|
2020-11-30 15:50:29 +13:00
|
|
|
pub before: Option<String>,
|
2020-11-20 10:49:32 +13:00
|
|
|
}
|
|
|
|
|
2021-01-11 15:15:34 +13:00
|
|
|
#[derive(Default)]
|
2021-01-09 14:35:04 +13:00
|
|
|
pub struct Preferences {
|
2022-05-21 13:41:31 +12:00
|
|
|
pub available_themes: Vec<String>,
|
2021-01-11 15:15:34 +13:00
|
|
|
pub theme: String,
|
2021-01-09 17:55:40 +13:00
|
|
|
pub front_page: String,
|
2021-01-09 14:35:04 +13:00
|
|
|
pub layout: String,
|
2021-01-11 10:08:36 +13:00
|
|
|
pub wide: String,
|
2021-01-31 18:43:46 +13:00
|
|
|
pub show_nsfw: String,
|
2022-09-27 14:35:23 +13:00
|
|
|
pub blur_nsfw: String,
|
2021-05-10 13:25:52 +12:00
|
|
|
pub hide_hls_notification: String,
|
|
|
|
pub use_hls: String,
|
2021-10-26 17:27:55 +13:00
|
|
|
pub autoplay_videos: String,
|
2022-06-19 09:53:30 +12:00
|
|
|
pub fixed_navbar: String,
|
2023-12-27 09:42:41 +13:00
|
|
|
pub disable_visit_reddit_confirmation: String,
|
2021-01-09 14:50:03 +13:00
|
|
|
pub comment_sort: String,
|
2021-03-26 17:41:58 +13:00
|
|
|
pub post_sort: String,
|
2021-02-14 09:55:23 +13:00
|
|
|
pub subscriptions: Vec<String>,
|
2021-11-26 17:02:04 +13:00
|
|
|
pub filters: Vec<String>,
|
2023-01-02 15:39:38 +13:00
|
|
|
pub hide_awards: String,
|
2023-03-23 16:08:20 +13:00
|
|
|
pub hide_score: String,
|
2021-01-09 14:35:04 +13:00
|
|
|
}
|
|
|
|
|
2022-05-21 13:41:31 +12:00
|
|
|
#[derive(RustEmbed)]
|
|
|
|
#[folder = "static/themes/"]
|
|
|
|
#[include = "*.css"]
|
|
|
|
pub struct ThemeAssets;
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
impl Preferences {
|
|
|
|
// Build preferences from cookies
|
2023-01-02 15:39:38 +13:00
|
|
|
pub fn new(req: &Request<Body>) -> Self {
|
2022-05-21 13:41:31 +12:00
|
|
|
// Read available theme names from embedded css files.
|
|
|
|
// Always make the default "system" theme available.
|
|
|
|
let mut themes = vec!["system".to_string()];
|
|
|
|
for file in ThemeAssets::iter() {
|
|
|
|
let chunks: Vec<&str> = file.as_ref().split(".css").collect();
|
|
|
|
themes.push(chunks[0].to_owned())
|
|
|
|
}
|
2021-02-25 18:29:23 +13:00
|
|
|
Self {
|
2022-05-21 13:41:31 +12:00
|
|
|
available_themes: themes,
|
2023-12-27 12:27:25 +13:00
|
|
|
theme: setting(req, "theme"),
|
|
|
|
front_page: setting(req, "front_page"),
|
|
|
|
layout: setting(req, "layout"),
|
|
|
|
wide: setting(req, "wide"),
|
|
|
|
show_nsfw: setting(req, "show_nsfw"),
|
|
|
|
blur_nsfw: setting(req, "blur_nsfw"),
|
|
|
|
use_hls: setting(req, "use_hls"),
|
|
|
|
hide_hls_notification: setting(req, "hide_hls_notification"),
|
|
|
|
autoplay_videos: setting(req, "autoplay_videos"),
|
|
|
|
fixed_navbar: setting_or_default(req, "fixed_navbar", "on".to_string()),
|
2023-12-27 09:42:41 +13:00
|
|
|
disable_visit_reddit_confirmation: setting(req, "disable_visit_reddit_confirmation"),
|
2023-12-27 12:27:25 +13:00
|
|
|
comment_sort: setting(req, "comment_sort"),
|
|
|
|
post_sort: setting(req, "post_sort"),
|
|
|
|
subscriptions: setting(req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
|
|
|
filters: setting(req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
|
|
|
hide_awards: setting(req, "hide_awards"),
|
|
|
|
hide_score: setting(req, "hide_score"),
|
2021-02-25 18:29:23 +13:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-26 17:02:04 +13:00
|
|
|
/// Gets a `HashSet` of filters from the cookie in the given `Request`.
|
|
|
|
pub fn get_filters(req: &Request<Body>) -> HashSet<String> {
|
2021-12-27 18:18:20 +13:00
|
|
|
setting(req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect::<HashSet<String>>()
|
2021-11-26 17:02:04 +13:00
|
|
|
}
|
|
|
|
|
2022-11-10 05:16:51 +13:00
|
|
|
/// Filters a `Vec<Post>` by the given `HashSet` of filters (each filter being
|
|
|
|
/// a subreddit name or a user name). If a `Post`'s subreddit or author is
|
|
|
|
/// found in the filters, it is removed.
|
|
|
|
///
|
|
|
|
/// The first value of the return tuple is the number of posts filtered. The
|
|
|
|
/// second return value is `true` if all posts were filtered.
|
|
|
|
pub fn filter_posts(posts: &mut Vec<Post>, filters: &HashSet<String>) -> (u64, bool) {
|
|
|
|
// This is the length of the Vec<Post> prior to applying the filter.
|
|
|
|
let lb: u64 = posts.len().try_into().unwrap_or(0);
|
|
|
|
|
2021-11-26 17:02:04 +13:00
|
|
|
if posts.is_empty() {
|
2022-11-10 05:16:51 +13:00
|
|
|
(0, false)
|
2021-11-26 17:02:04 +13:00
|
|
|
} else {
|
2022-11-10 05:16:51 +13:00
|
|
|
posts.retain(|p| !(filters.contains(&p.community) || filters.contains(&["u_", &p.author.name].concat())));
|
|
|
|
|
|
|
|
// Get the length of the Vec<Post> after applying the filter.
|
|
|
|
// If lb > la, then at least one post was removed.
|
|
|
|
let la: u64 = posts.len().try_into().unwrap_or(0);
|
|
|
|
|
|
|
|
(lb - la, posts.is_empty())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Creates a [`Post`] from a provided JSON.
|
|
|
|
pub async fn parse_post(post: &serde_json::Value) -> Post {
|
|
|
|
// Grab UTC time as unix timestamp
|
|
|
|
let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default());
|
|
|
|
// Parse post score and upvote ratio
|
|
|
|
let score = post["data"]["score"].as_i64().unwrap_or_default();
|
|
|
|
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
|
|
|
|
|
|
|
// Determine the type of media along with the media URL
|
|
|
|
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
|
|
|
|
|
|
|
let awards: Awards = Awards::parse(&post["data"]["all_awardings"]);
|
|
|
|
|
|
|
|
let permalink = val(post, "permalink");
|
|
|
|
|
2023-03-24 01:21:09 +13:00
|
|
|
let poll = Poll::parse(&post["data"]["poll_data"]);
|
|
|
|
|
2022-11-10 05:16:51 +13:00
|
|
|
let body = if val(post, "removed_by_category") == "moderator" {
|
|
|
|
format!(
|
2023-06-04 05:32:46 +12:00
|
|
|
"<div class=\"md\"><p>[removed] — <a href=\"https://{}{}\">view removed post</a></p></div>",
|
2023-12-27 12:25:52 +13:00
|
|
|
get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or(String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)),
|
2022-11-10 05:16:51 +13:00
|
|
|
permalink
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
rewrite_urls(&val(post, "selftext_html"))
|
|
|
|
};
|
|
|
|
|
|
|
|
// Build a post using data parsed from Reddit post API
|
|
|
|
Post {
|
|
|
|
id: val(post, "id"),
|
|
|
|
title: val(post, "title"),
|
|
|
|
community: val(post, "subreddit"),
|
|
|
|
body,
|
|
|
|
author: Author {
|
|
|
|
name: val(post, "author"),
|
|
|
|
flair: Flair {
|
|
|
|
flair_parts: FlairPart::parse(
|
|
|
|
post["data"]["author_flair_type"].as_str().unwrap_or_default(),
|
|
|
|
post["data"]["author_flair_richtext"].as_array(),
|
|
|
|
post["data"]["author_flair_text"].as_str(),
|
|
|
|
),
|
|
|
|
text: val(post, "link_flair_text"),
|
|
|
|
background_color: val(post, "author_flair_background_color"),
|
|
|
|
foreground_color: val(post, "author_flair_text_color"),
|
|
|
|
},
|
|
|
|
distinguished: val(post, "distinguished"),
|
|
|
|
},
|
|
|
|
permalink,
|
2023-03-24 01:21:09 +13:00
|
|
|
poll,
|
2022-11-10 05:16:51 +13:00
|
|
|
score: format_num(score),
|
|
|
|
upvote_ratio: ratio as i64,
|
|
|
|
post_type,
|
|
|
|
media,
|
|
|
|
thumbnail: Media {
|
|
|
|
url: format_url(val(post, "thumbnail").as_str()),
|
|
|
|
alt_url: String::new(),
|
|
|
|
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
|
|
|
|
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
|
|
|
|
poster: String::new(),
|
|
|
|
},
|
|
|
|
flair: Flair {
|
|
|
|
flair_parts: FlairPart::parse(
|
|
|
|
post["data"]["link_flair_type"].as_str().unwrap_or_default(),
|
|
|
|
post["data"]["link_flair_richtext"].as_array(),
|
|
|
|
post["data"]["link_flair_text"].as_str(),
|
|
|
|
),
|
|
|
|
text: val(post, "link_flair_text"),
|
|
|
|
background_color: val(post, "link_flair_background_color"),
|
|
|
|
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
|
|
|
"black".to_string()
|
|
|
|
} else {
|
|
|
|
"white".to_string()
|
|
|
|
},
|
|
|
|
},
|
|
|
|
flags: Flags {
|
|
|
|
nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(),
|
|
|
|
stickied: post["data"]["stickied"].as_bool().unwrap_or_default() || post["data"]["pinned"].as_bool().unwrap_or(false),
|
|
|
|
},
|
|
|
|
domain: val(post, "domain"),
|
|
|
|
rel_time,
|
|
|
|
created,
|
|
|
|
num_duplicates: post["data"]["num_duplicates"].as_u64().unwrap_or(0),
|
|
|
|
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
|
|
|
|
gallery,
|
|
|
|
awards,
|
2023-01-03 22:39:45 +13:00
|
|
|
nsfw: post["data"]["over_18"].as_bool().unwrap_or_default(),
|
2021-11-26 17:02:04 +13:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-01 18:10:08 +13:00
|
|
|
//
|
2020-12-08 07:53:22 +13:00
|
|
|
// FORMATTING
|
2020-12-01 18:10:08 +13:00
|
|
|
//
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
// Grab a query parameter from a url
|
2021-05-17 03:53:39 +12:00
|
|
|
pub fn param(path: &str, value: &str) -> Option<String> {
|
|
|
|
Some(
|
|
|
|
Url::parse(format!("https://libredd.it/{}", path).as_str())
|
|
|
|
.ok()?
|
|
|
|
.query_pairs()
|
|
|
|
.into_owned()
|
|
|
|
.collect::<HashMap<_, _>>()
|
|
|
|
.get(value)?
|
2021-06-12 06:03:36 +12:00
|
|
|
.clone(),
|
2021-05-17 03:53:39 +12:00
|
|
|
)
|
2021-01-01 12:54:13 +13:00
|
|
|
}
|
|
|
|
|
2021-05-16 08:59:42 +12:00
|
|
|
// Retrieve the value of a setting by name
|
|
|
|
pub fn setting(req: &Request<Body>, name: &str) -> String {
|
|
|
|
// Parse a cookie value from request
|
|
|
|
req
|
|
|
|
.cookie(name)
|
|
|
|
.unwrap_or_else(|| {
|
2023-01-03 22:55:22 +13:00
|
|
|
// If there is no cookie for this setting, try receiving a default from the config
|
2023-12-27 12:25:52 +13:00
|
|
|
if let Some(default) = crate::config::get_setting(&format!("REDLIB_DEFAULT_{}", name.to_uppercase())) {
|
2021-05-16 08:59:42 +12:00
|
|
|
Cookie::new(name, default)
|
|
|
|
} else {
|
2023-12-27 10:24:53 +13:00
|
|
|
Cookie::from(name)
|
2021-05-16 08:59:42 +12:00
|
|
|
}
|
|
|
|
})
|
|
|
|
.value()
|
|
|
|
.to_string()
|
2021-01-06 15:04:49 +13:00
|
|
|
}
|
2021-01-03 17:50:23 +13:00
|
|
|
|
2022-06-19 09:53:30 +12:00
|
|
|
// Retrieve the value of a setting by name or the default value
|
|
|
|
pub fn setting_or_default(req: &Request<Body>, name: &str, default: String) -> String {
|
|
|
|
let value = setting(req, name);
|
|
|
|
if !value.is_empty() {
|
|
|
|
value
|
|
|
|
} else {
|
|
|
|
default
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-10 03:40:49 +12:00
|
|
|
// Detect and redirect in the event of a random subreddit
|
|
|
|
pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> {
|
2021-11-26 17:02:04 +13:00
|
|
|
if sub == "random" || sub == "randnsfw" {
|
2021-05-17 03:53:39 +12:00
|
|
|
let new_sub = json(format!("/r/{}/about.json?raw_json=1", sub), false).await?["data"]["display_name"]
|
2021-05-10 03:40:49 +12:00
|
|
|
.as_str()
|
|
|
|
.unwrap_or_default()
|
|
|
|
.to_string();
|
2021-05-16 08:59:42 +12:00
|
|
|
Ok(redirect(format!("/r/{}{}", new_sub, additional)))
|
2021-05-10 03:40:49 +12:00
|
|
|
} else {
|
2021-05-16 08:59:42 +12:00
|
|
|
Err("No redirect needed".to_string())
|
2021-05-10 03:40:49 +12:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-12-27 09:11:16 +13:00
|
|
|
static REGEX_URL_WWW: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://www\.reddit\.com/(.*)").unwrap());
|
|
|
|
static REGEX_URL_OLD: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://old\.reddit\.com/(.*)").unwrap());
|
|
|
|
static REGEX_URL_NP: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://np\.reddit\.com/(.*)").unwrap());
|
|
|
|
static REGEX_URL_PLAIN: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://reddit\.com/(.*)").unwrap());
|
|
|
|
static REGEX_URL_VIDEOS: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$|\?source=fallback))").unwrap());
|
|
|
|
static REGEX_URL_VIDEOS_HLS: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://v\.redd\.it/(.+)/(HLSPlaylist\.m3u8.*)$").unwrap());
|
|
|
|
static REGEX_URL_IMAGES: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://i\.redd\.it/(.*)").unwrap());
|
|
|
|
static REGEX_URL_THUMBS_A: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://a\.thumbs\.redditmedia\.com/(.*)").unwrap());
|
|
|
|
static REGEX_URL_THUMBS_B: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://b\.thumbs\.redditmedia\.com/(.*)").unwrap());
|
|
|
|
static REGEX_URL_EMOJI: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://emoji\.redditmedia\.com/(.*)/(.*)").unwrap());
|
|
|
|
static REGEX_URL_PREVIEW: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://preview\.redd\.it/(.*)").unwrap());
|
|
|
|
static REGEX_URL_EXTERNAL_PREVIEW: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://external\-preview\.redd\.it/(.*)").unwrap());
|
|
|
|
static REGEX_URL_STYLES: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://styles\.redditmedia\.com/(.*)").unwrap());
|
|
|
|
static REGEX_URL_STATIC_MEDIA: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://www\.redditstatic\.com/(.*)").unwrap());
|
2023-05-01 15:22:10 +12:00
|
|
|
|
2020-12-26 15:06:33 +13:00
|
|
|
// Direct urls to proxy if proxy is enabled
|
2021-01-12 14:47:14 +13:00
|
|
|
pub fn format_url(url: &str) -> String {
|
2021-01-09 14:35:04 +13:00
|
|
|
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
|
2021-01-05 16:26:41 +13:00
|
|
|
String::new()
|
|
|
|
} else {
|
2021-12-28 20:16:01 +13:00
|
|
|
Url::parse(url).map_or(url.to_string(), |parsed| {
|
2021-05-21 07:24:06 +12:00
|
|
|
let domain = parsed.domain().unwrap_or_default();
|
|
|
|
|
2023-05-01 15:22:10 +12:00
|
|
|
let capture = |regex: &Regex, format: &str, segments: i16| {
|
|
|
|
regex.captures(url).map_or(String::new(), |caps| match segments {
|
|
|
|
1 => [format, &caps[1]].join(""),
|
|
|
|
2 => [format, &caps[1], "/", &caps[2]].join(""),
|
|
|
|
_ => String::new(),
|
2021-05-21 07:24:06 +12:00
|
|
|
})
|
|
|
|
};
|
|
|
|
|
|
|
|
macro_rules! chain {
|
|
|
|
() => {
|
|
|
|
{
|
|
|
|
String::new()
|
|
|
|
}
|
2021-02-20 18:46:44 +13:00
|
|
|
};
|
|
|
|
|
2021-05-21 07:24:06 +12:00
|
|
|
( $first_fn:expr, $($other_fns:expr), *) => {
|
|
|
|
{
|
|
|
|
let result = $first_fn;
|
|
|
|
if result.is_empty() {
|
|
|
|
chain!($($other_fns,)*)
|
2021-05-10 13:25:52 +12:00
|
|
|
}
|
2021-05-21 07:24:06 +12:00
|
|
|
else
|
2021-05-10 13:25:52 +12:00
|
|
|
{
|
2021-05-21 07:24:06 +12:00
|
|
|
result
|
2021-05-10 13:25:52 +12:00
|
|
|
}
|
2021-05-21 07:24:06 +12:00
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
2021-05-10 13:25:52 +12:00
|
|
|
|
2021-05-21 07:24:06 +12:00
|
|
|
match domain {
|
2023-05-01 15:55:36 +12:00
|
|
|
"www.reddit.com" => capture(®EX_URL_WWW, "/", 1),
|
|
|
|
"old.reddit.com" => capture(®EX_URL_OLD, "/", 1),
|
|
|
|
"np.reddit.com" => capture(®EX_URL_NP, "/", 1),
|
|
|
|
"reddit.com" => capture(®EX_URL_PLAIN, "/", 1),
|
|
|
|
"v.redd.it" => chain!(capture(®EX_URL_VIDEOS, "/vid/", 2), capture(®EX_URL_VIDEOS_HLS, "/hls/", 2)),
|
|
|
|
"i.redd.it" => capture(®EX_URL_IMAGES, "/img/", 1),
|
|
|
|
"a.thumbs.redditmedia.com" => capture(®EX_URL_THUMBS_A, "/thumb/a/", 1),
|
|
|
|
"b.thumbs.redditmedia.com" => capture(®EX_URL_THUMBS_B, "/thumb/b/", 1),
|
|
|
|
"emoji.redditmedia.com" => capture(®EX_URL_EMOJI, "/emoji/", 2),
|
|
|
|
"preview.redd.it" => capture(®EX_URL_PREVIEW, "/preview/pre/", 1),
|
|
|
|
"external-preview.redd.it" => capture(®EX_URL_EXTERNAL_PREVIEW, "/preview/external-pre/", 1),
|
|
|
|
"styles.redditmedia.com" => capture(®EX_URL_STYLES, "/style/", 1),
|
|
|
|
"www.redditstatic.com" => capture(®EX_URL_STATIC_MEDIA, "/static/", 1),
|
2021-12-28 15:00:19 +13:00
|
|
|
_ => url.to_string(),
|
2021-02-20 18:46:44 +13:00
|
|
|
}
|
2021-05-21 07:24:06 +12:00
|
|
|
})
|
2021-01-05 16:26:41 +13:00
|
|
|
}
|
2020-12-01 18:10:08 +13:00
|
|
|
}
|
|
|
|
|
2023-12-27 09:42:41 +13:00
|
|
|
static REDDIT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|new\.|)(reddit\.com|redd\.it)/"#).unwrap());
|
2023-12-27 09:11:16 +13:00
|
|
|
static REDDIT_PREVIEW_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://external-preview\.redd\.it(.*)[^?]").unwrap());
|
2023-05-01 15:22:10 +12:00
|
|
|
|
2023-12-27 12:25:52 +13:00
|
|
|
// Rewrite Reddit links to Redlib in body of text
|
2021-05-09 13:22:26 +12:00
|
|
|
pub fn rewrite_urls(input_text: &str) -> String {
|
2023-05-01 15:22:10 +12:00
|
|
|
let text1 =
|
2023-12-27 12:25:52 +13:00
|
|
|
// Rewrite Reddit links to Redlib
|
2023-05-01 15:22:10 +12:00
|
|
|
REDDIT_REGEX.replace_all(input_text, r#"href="/"#)
|
|
|
|
.to_string()
|
|
|
|
// Remove (html-encoded) "\" from URLs.
|
|
|
|
.replace("%5C", "")
|
2023-12-27 09:42:41 +13:00
|
|
|
.replace("\\_", "_");
|
2021-05-09 13:22:26 +12:00
|
|
|
|
2023-12-27 12:25:52 +13:00
|
|
|
// Rewrite external media previews to Redlib
|
2023-05-01 15:22:10 +12:00
|
|
|
if REDDIT_PREVIEW_REGEX.is_match(&text1) {
|
|
|
|
REDDIT_PREVIEW_REGEX
|
|
|
|
.replace_all(&text1, format_url(REDDIT_PREVIEW_REGEX.find(&text1).map(|x| x.as_str()).unwrap_or_default()))
|
|
|
|
.to_string()
|
|
|
|
} else {
|
|
|
|
text1
|
|
|
|
}
|
2021-01-03 07:58:21 +13:00
|
|
|
}
|
|
|
|
|
2021-11-21 17:07:45 +13:00
|
|
|
// Format vote count to a string that will be displayed.
|
|
|
|
// Append `m` and `k` for millions and thousands respectively, and
|
|
|
|
// round to the nearest tenth.
|
2021-03-21 11:42:47 +13:00
|
|
|
pub fn format_num(num: i64) -> (String, String) {
|
|
|
|
let truncated = if num >= 1_000_000 || num <= -1_000_000 {
|
2021-11-21 17:07:45 +13:00
|
|
|
format!("{:.1}m", num as f64 / 1_000_000.0)
|
2021-03-20 18:04:44 +13:00
|
|
|
} else if num >= 1000 || num <= -1000 {
|
2021-11-21 17:07:45 +13:00
|
|
|
format!("{:.1}k", num as f64 / 1_000.0)
|
2020-12-08 08:36:05 +13:00
|
|
|
} else {
|
|
|
|
num.to_string()
|
2021-03-21 11:42:47 +13:00
|
|
|
};
|
|
|
|
|
|
|
|
(truncated, num.to_string())
|
2020-12-08 07:53:22 +13:00
|
|
|
}
|
|
|
|
|
2021-02-25 18:29:23 +13:00
|
|
|
// Parse a relative and absolute time from a UNIX timestamp
|
2021-03-10 04:22:17 +13:00
|
|
|
pub fn time(created: f64) -> (String, String) {
|
2021-12-30 08:38:35 +13:00
|
|
|
let time = OffsetDateTime::from_unix_timestamp(created.round() as i64).unwrap_or(OffsetDateTime::UNIX_EPOCH);
|
2023-04-18 10:00:41 +12:00
|
|
|
let now = OffsetDateTime::now_utc();
|
|
|
|
let min = time.min(now);
|
|
|
|
let max = time.max(now);
|
2023-03-24 01:18:48 +13:00
|
|
|
let time_delta = max - min;
|
2021-01-17 08:40:32 +13:00
|
|
|
|
2021-01-15 12:13:52 +13:00
|
|
|
// If the time difference is more than a month, show full date
|
2023-03-24 01:18:48 +13:00
|
|
|
let mut rel_time = if time_delta > Duration::days(30) {
|
2021-12-30 09:48:57 +13:00
|
|
|
time.format(format_description!("[month repr:short] [day] '[year repr:last_two]")).unwrap_or_default()
|
2021-01-15 12:13:52 +13:00
|
|
|
// Otherwise, show relative date/time
|
2021-01-14 13:31:24 +13:00
|
|
|
} else if time_delta.whole_days() > 0 {
|
2023-03-24 01:18:48 +13:00
|
|
|
format!("{}d", time_delta.whole_days())
|
2021-01-13 07:59:32 +13:00
|
|
|
} else if time_delta.whole_hours() > 0 {
|
2023-03-24 01:18:48 +13:00
|
|
|
format!("{}h", time_delta.whole_hours())
|
2021-01-13 07:59:32 +13:00
|
|
|
} else {
|
2023-03-24 01:18:48 +13:00
|
|
|
format!("{}m", time_delta.whole_minutes())
|
2021-01-17 08:40:32 +13:00
|
|
|
};
|
|
|
|
|
2023-04-02 00:21:15 +13:00
|
|
|
if time_delta <= Duration::days(30) {
|
2023-04-18 10:00:41 +12:00
|
|
|
if now < time {
|
2023-04-02 00:21:15 +13:00
|
|
|
rel_time += " left";
|
|
|
|
} else {
|
|
|
|
rel_time += " ago";
|
|
|
|
}
|
2023-03-24 01:18:48 +13:00
|
|
|
}
|
|
|
|
|
2022-05-21 14:20:44 +12:00
|
|
|
(
|
|
|
|
rel_time,
|
|
|
|
time
|
|
|
|
.format(format_description!("[month repr:short] [day] [year], [hour]:[minute]:[second] UTC"))
|
|
|
|
.unwrap_or_default(),
|
|
|
|
)
|
2021-01-13 07:59:32 +13:00
|
|
|
}
|
|
|
|
|
2020-11-18 08:37:40 +13:00
|
|
|
// val() function used to parse JSON from Reddit APIs
|
2021-01-15 06:53:54 +13:00
|
|
|
pub fn val(j: &Value, k: &str) -> String {
|
2021-01-22 18:25:51 +13:00
|
|
|
j["data"][k].as_str().unwrap_or_default().to_string()
|
2020-11-18 08:37:40 +13:00
|
|
|
}
|
|
|
|
|
2020-11-20 17:42:18 +13:00
|
|
|
//
|
|
|
|
// NETWORKING
|
|
|
|
//
|
|
|
|
|
2021-03-18 11:30:33 +13:00
|
|
|
pub fn template(t: impl Template) -> Result<Response<Body>, String> {
|
|
|
|
Ok(
|
|
|
|
Response::builder()
|
|
|
|
.status(200)
|
|
|
|
.header("content-type", "text/html")
|
|
|
|
.body(t.render().unwrap_or_default().into())
|
|
|
|
.unwrap_or_default(),
|
|
|
|
)
|
2021-02-14 12:02:38 +13:00
|
|
|
}
|
|
|
|
|
2021-03-18 11:30:33 +13:00
|
|
|
pub fn redirect(path: String) -> Response<Body> {
|
|
|
|
Response::builder()
|
|
|
|
.status(302)
|
|
|
|
.header("content-type", "text/html")
|
2021-02-14 12:02:38 +13:00
|
|
|
.header("Location", &path)
|
2021-03-18 11:30:33 +13:00
|
|
|
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path).into())
|
|
|
|
.unwrap_or_default()
|
2021-02-10 06:38:52 +13:00
|
|
|
}
|
|
|
|
|
2022-11-05 21:29:04 +13:00
|
|
|
/// Renders a generic error landing page.
|
|
|
|
pub async fn error(req: Request<Body>, msg: impl ToString) -> Result<Response<Body>, String> {
|
2021-11-15 15:39:33 +13:00
|
|
|
let url = req.uri().to_string();
|
2021-02-26 06:07:45 +13:00
|
|
|
let body = ErrorTemplate {
|
2022-11-05 21:29:04 +13:00
|
|
|
msg: msg.to_string(),
|
2023-01-02 15:39:38 +13:00
|
|
|
prefs: Preferences::new(&req),
|
2021-11-15 15:51:36 +13:00
|
|
|
url,
|
2021-02-26 06:07:45 +13:00
|
|
|
}
|
|
|
|
.render()
|
|
|
|
.unwrap_or_default();
|
2021-02-10 06:38:52 +13:00
|
|
|
|
2021-03-18 11:30:33 +13:00
|
|
|
Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
2021-01-01 18:03:44 +13:00
|
|
|
}
|
2021-11-21 17:07:45 +13:00
|
|
|
|
2023-12-27 12:25:52 +13:00
|
|
|
/// Returns true if the config/env variable `REDLIB_SFW_ONLY` carries the
|
2023-01-03 22:39:45 +13:00
|
|
|
/// value `on`.
|
|
|
|
///
|
|
|
|
/// If this variable is set as such, the instance will operate in SFW-only
|
|
|
|
/// mode; all NSFW content will be filtered. Attempts to access NSFW
|
|
|
|
/// subreddits or posts or userpages for users Reddit has deemed NSFW will
|
|
|
|
/// be denied.
|
|
|
|
pub fn sfw_only() -> bool {
|
2023-12-27 12:25:52 +13:00
|
|
|
match crate::config::get_setting("REDLIB_SFW_ONLY") {
|
2023-01-03 22:55:22 +13:00
|
|
|
Some(val) => val == "on",
|
|
|
|
None => false,
|
2023-01-03 22:39:45 +13:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-23 19:18:35 +13:00
|
|
|
// Determines if a request shoud redirect to a nsfw landing gate.
|
2023-04-20 02:37:47 +12:00
|
|
|
pub fn should_be_nsfw_gated(req: &Request<Body>, req_url: &str) -> bool {
|
2023-03-24 07:09:33 +13:00
|
|
|
let sfw_instance = sfw_only();
|
2023-04-20 02:37:47 +12:00
|
|
|
let gate_nsfw = (setting(req, "show_nsfw") != "on") || sfw_instance;
|
2023-03-23 19:18:35 +13:00
|
|
|
|
2023-03-24 07:09:33 +13:00
|
|
|
// Nsfw landing gate should not be bypassed on a sfw only instance,
|
2023-03-24 08:36:04 +13:00
|
|
|
let bypass_gate = !sfw_instance && req_url.contains("&bypass_nsfw_landing");
|
2023-03-23 19:18:35 +13:00
|
|
|
|
2023-03-24 07:09:33 +13:00
|
|
|
gate_nsfw && !bypass_gate
|
2023-03-23 19:18:35 +13:00
|
|
|
}
|
|
|
|
|
2023-01-03 22:39:45 +13:00
|
|
|
/// Renders the landing page for NSFW content when the user has not enabled
|
|
|
|
/// "show NSFW posts" in settings.
|
2023-03-23 19:18:35 +13:00
|
|
|
pub async fn nsfw_landing(req: Request<Body>, req_url: String) -> Result<Response<Body>, String> {
|
2023-01-03 22:39:45 +13:00
|
|
|
let res_type: ResourceType;
|
|
|
|
|
|
|
|
// Determine from the request URL if the resource is a subreddit, a user
|
|
|
|
// page, or a post.
|
|
|
|
let res: String = if !req.param("name").unwrap_or_default().is_empty() {
|
|
|
|
res_type = ResourceType::User;
|
|
|
|
req.param("name").unwrap_or_default()
|
|
|
|
} else if !req.param("id").unwrap_or_default().is_empty() {
|
|
|
|
res_type = ResourceType::Post;
|
|
|
|
req.param("id").unwrap_or_default()
|
|
|
|
} else {
|
|
|
|
res_type = ResourceType::Subreddit;
|
|
|
|
req.param("sub").unwrap_or_default()
|
|
|
|
};
|
|
|
|
|
|
|
|
let body = NSFWLandingTemplate {
|
|
|
|
res,
|
|
|
|
res_type,
|
2023-01-04 07:12:27 +13:00
|
|
|
prefs: Preferences::new(&req),
|
2023-03-23 19:18:35 +13:00
|
|
|
url: req_url,
|
2023-01-03 22:39:45 +13:00
|
|
|
}
|
|
|
|
.render()
|
|
|
|
.unwrap_or_default();
|
|
|
|
|
|
|
|
Ok(Response::builder().status(403).header("content-type", "text/html").body(body.into()).unwrap_or_default())
|
|
|
|
}
|
|
|
|
|
2021-11-21 17:07:45 +13:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2022-11-06 18:51:56 +13:00
|
|
|
use super::{format_num, format_url, rewrite_urls};
|
2021-11-21 17:07:45 +13:00
|
|
|
|
2021-11-24 19:23:29 +13:00
|
|
|
#[test]
|
|
|
|
fn format_num_works() {
|
|
|
|
assert_eq!(format_num(567), ("567".to_string(), "567".to_string()));
|
|
|
|
assert_eq!(format_num(1234), ("1.2k".to_string(), "1234".to_string()));
|
|
|
|
assert_eq!(format_num(1999), ("2.0k".to_string(), "1999".to_string()));
|
|
|
|
assert_eq!(format_num(1001), ("1.0k".to_string(), "1001".to_string()));
|
|
|
|
assert_eq!(format_num(1_999_999), ("2.0m".to_string(), "1999999".to_string()));
|
|
|
|
}
|
2022-03-27 09:26:30 +13:00
|
|
|
|
|
|
|
#[test]
|
2023-12-27 09:42:41 +13:00
|
|
|
fn rewrite_urls_removes_backslashes_and_rewrites_url() {
|
2022-03-27 09:26:30 +13:00
|
|
|
assert_eq!(
|
2023-12-27 09:42:41 +13:00
|
|
|
rewrite_urls(
|
|
|
|
"<a href=\"https://new.reddit.com/r/linux%5C_gaming/comments/x/just%5C_a%5C_test%5C/\">https://new.reddit.com/r/linux\\_gaming/comments/x/just\\_a\\_test/</a>"
|
|
|
|
),
|
|
|
|
"<a href=\"/r/linux_gaming/comments/x/just_a_test/\">https://new.reddit.com/r/linux_gaming/comments/x/just_a_test/</a>"
|
|
|
|
);
|
|
|
|
assert_eq!(
|
|
|
|
rewrite_urls(
|
|
|
|
"e.g. <a href=\"https://www.reddit.com/r/linux%5C_gaming/comments/ql9j15/anyone%5C_else%5C_confused%5C_with%5C_linus%5C_linux%5C_issues/\">https://www.reddit.com/r/linux\\_gaming/comments/ql9j15/anyone\\_else\\_confused\\_with\\_linus\\_linux\\_issues/</a>"
|
|
|
|
),
|
|
|
|
"e.g. <a href=\"/r/linux_gaming/comments/ql9j15/anyone_else_confused_with_linus_linux_issues/\">https://www.reddit.com/r/linux_gaming/comments/ql9j15/anyone_else_confused_with_linus_linux_issues/</a>"
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn rewrite_urls_keeps_intentional_backslashes() {
|
|
|
|
assert_eq!(
|
2023-12-27 12:27:25 +13:00
|
|
|
rewrite_urls("printf \"\\npolkit.addRule(function(action, subject)"),
|
2023-12-27 09:42:41 +13:00
|
|
|
"printf \"\\npolkit.addRule(function(action, subject)"
|
|
|
|
);
|
2022-03-27 09:26:30 +13:00
|
|
|
}
|
2022-11-06 18:51:56 +13:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_format_url() {
|
|
|
|
assert_eq!(format_url("https://a.thumbs.redditmedia.com/XYZ.jpg"), "/thumb/a/XYZ.jpg");
|
|
|
|
assert_eq!(format_url("https://emoji.redditmedia.com/a/b"), "/emoji/a/b");
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
format_url("https://external-preview.redd.it/foo.jpg?auto=webp&s=bar"),
|
|
|
|
"/preview/external-pre/foo.jpg?auto=webp&s=bar"
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(format_url("https://i.redd.it/foobar.jpg"), "/img/foobar.jpg");
|
|
|
|
assert_eq!(
|
|
|
|
format_url("https://preview.redd.it/qwerty.jpg?auto=webp&s=asdf"),
|
|
|
|
"/preview/pre/qwerty.jpg?auto=webp&s=asdf"
|
|
|
|
);
|
|
|
|
assert_eq!(format_url("https://v.redd.it/foo/DASH_360.mp4?source=fallback"), "/vid/foo/360.mp4");
|
|
|
|
assert_eq!(
|
|
|
|
format_url("https://v.redd.it/foo/HLSPlaylist.m3u8?a=bar&v=1&f=sd"),
|
|
|
|
"/hls/foo/HLSPlaylist.m3u8?a=bar&v=1&f=sd"
|
|
|
|
);
|
|
|
|
assert_eq!(format_url("https://www.redditstatic.com/gold/awards/icon/icon.png"), "/static/gold/awards/icon/icon.png");
|
|
|
|
|
|
|
|
assert_eq!(format_url(""), "");
|
|
|
|
assert_eq!(format_url("self"), "");
|
|
|
|
assert_eq!(format_url("default"), "");
|
|
|
|
assert_eq!(format_url("nsfw"), "");
|
|
|
|
assert_eq!(format_url("spoiler"), "");
|
|
|
|
}
|
2021-12-28 07:15:25 +13:00
|
|
|
}
|
2023-12-31 11:28:19 +13:00
|
|
|
|
|
|
|
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
|
|
|
|
async fn test_fetching_subreddit_quarantined() {
|
|
|
|
let subreddit = Post::fetch("/r/drugs", true).await;
|
|
|
|
assert!(subreddit.is_ok());
|
|
|
|
assert!(!subreddit.unwrap().0.is_empty());
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
|
|
|
|
async fn test_fetching_nsfw_subreddit() {
|
|
|
|
let subreddit = Post::fetch("/r/randnsfw", false).await;
|
|
|
|
assert!(subreddit.is_ok());
|
|
|
|
assert!(!subreddit.unwrap().0.is_empty());
|
|
|
|
}
|