2020-11-21 19:05:27 +13:00
|
|
|
//
|
|
|
|
// CRATES
|
|
|
|
//
|
|
|
|
use chrono::{TimeZone, Utc};
|
2020-11-30 15:50:29 +13:00
|
|
|
use serde_json::{from_str, Value};
|
2021-01-01 12:54:13 +13:00
|
|
|
use url::Url;
|
2020-12-01 17:57:15 +13:00
|
|
|
// use surf::{client, get, middleware::Redirect};
|
2020-11-21 19:05:27 +13:00
|
|
|
|
2020-12-01 18:10:08 +13:00
|
|
|
#[cfg(feature = "proxy")]
|
|
|
|
use base64::encode;
|
|
|
|
|
2020-11-20 17:42:18 +13:00
|
|
|
//
|
|
|
|
// STRUCTS
|
|
|
|
//
|
2020-11-18 08:37:40 +13:00
|
|
|
// Post flair with text, background color and foreground color
|
|
|
|
pub struct Flair(pub String, pub String, pub String);
|
2020-12-30 16:01:02 +13:00
|
|
|
// Post flags with nsfw and stickied
|
|
|
|
pub struct Flags {
|
|
|
|
pub nsfw: bool,
|
2021-01-01 12:54:13 +13:00
|
|
|
pub stickied: bool,
|
2020-12-30 16:01:02 +13:00
|
|
|
}
|
2020-11-18 08:37:40 +13:00
|
|
|
|
|
|
|
// Post containing content, metadata and media
|
|
|
|
pub struct Post {
|
|
|
|
pub title: String,
|
|
|
|
pub community: String,
|
|
|
|
pub body: String,
|
|
|
|
pub author: String,
|
2020-12-21 08:29:23 +13:00
|
|
|
pub author_flair: Flair,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub url: String,
|
|
|
|
pub score: String,
|
2020-12-01 17:33:55 +13:00
|
|
|
pub post_type: String,
|
2020-12-23 15:29:43 +13:00
|
|
|
pub flair: Flair,
|
2020-12-30 16:01:02 +13:00
|
|
|
pub flags: Flags,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub media: String,
|
|
|
|
pub time: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
// Comment with content, post, score and data/time that it was posted
|
|
|
|
pub struct Comment {
|
2020-12-21 17:52:15 +13:00
|
|
|
pub id: String,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub body: String,
|
|
|
|
pub author: String,
|
2020-12-21 08:29:23 +13:00
|
|
|
pub flair: Flair,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub score: String,
|
2020-11-30 15:50:29 +13:00
|
|
|
pub time: String,
|
2020-12-20 16:54:46 +13:00
|
|
|
pub replies: Vec<Comment>,
|
2020-11-18 08:37:40 +13:00
|
|
|
}
|
|
|
|
|
|
|
|
// User struct containing metadata about user
|
|
|
|
pub struct User {
|
|
|
|
pub name: String,
|
|
|
|
pub icon: String,
|
|
|
|
pub karma: i64,
|
2020-12-24 19:16:04 +13:00
|
|
|
pub created: String,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub banner: String,
|
2020-11-30 15:50:29 +13:00
|
|
|
pub description: String,
|
2020-11-18 08:37:40 +13:00
|
|
|
}
|
|
|
|
|
2020-12-29 15:42:46 +13:00
|
|
|
#[derive(Default)]
|
2020-11-18 08:37:40 +13:00
|
|
|
// Subreddit struct containing metadata about community
|
|
|
|
pub struct Subreddit {
|
|
|
|
pub name: String,
|
|
|
|
pub title: String,
|
|
|
|
pub description: String,
|
2020-12-29 15:42:46 +13:00
|
|
|
pub info: String,
|
2020-11-18 08:37:40 +13:00
|
|
|
pub icon: String,
|
2020-11-23 13:43:23 +13:00
|
|
|
pub members: String,
|
2020-11-30 15:50:29 +13:00
|
|
|
pub active: String,
|
2020-11-18 08:37:40 +13:00
|
|
|
}
|
|
|
|
|
2020-11-20 10:49:32 +13:00
|
|
|
// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
|
|
|
|
#[derive(serde::Deserialize)]
|
|
|
|
pub struct Params {
|
2020-12-30 14:11:47 +13:00
|
|
|
pub t: Option<String>,
|
2021-01-01 12:54:13 +13:00
|
|
|
pub q: Option<String>,
|
2020-11-20 10:49:32 +13:00
|
|
|
pub sort: Option<String>,
|
|
|
|
pub after: Option<String>,
|
2020-11-30 15:50:29 +13:00
|
|
|
pub before: Option<String>,
|
2020-11-20 10:49:32 +13:00
|
|
|
}
|
|
|
|
|
2020-11-20 17:42:18 +13:00
|
|
|
// Error template
|
|
|
|
#[derive(askama::Template)]
|
|
|
|
#[template(path = "error.html", escape = "none")]
|
|
|
|
pub struct ErrorTemplate {
|
2020-11-30 15:50:29 +13:00
|
|
|
pub message: String,
|
2020-11-20 17:42:18 +13:00
|
|
|
}
|
|
|
|
|
2020-12-01 18:10:08 +13:00
|
|
|
//
|
2020-12-08 07:53:22 +13:00
|
|
|
// FORMATTING
|
2020-12-01 18:10:08 +13:00
|
|
|
//
|
|
|
|
|
2021-01-01 12:54:13 +13:00
|
|
|
// Grab a query param from a url
|
|
|
|
pub async fn param(path: &String, value: &str) -> String {
|
|
|
|
let url = Url::parse(format!("https://reddit.com/{}", path).as_str()).unwrap();
|
|
|
|
let pairs: std::collections::HashMap<_, _> = url.query_pairs().into_owned().collect();
|
|
|
|
pairs.get(value).unwrap_or(&String::new()).to_owned()
|
|
|
|
}
|
|
|
|
|
2020-12-26 15:06:33 +13:00
|
|
|
// Direct urls to proxy if proxy is enabled
|
2020-12-24 17:36:49 +13:00
|
|
|
pub async fn format_url(url: String) -> String {
|
|
|
|
if url.is_empty() {
|
|
|
|
return String::new();
|
|
|
|
};
|
|
|
|
|
2020-12-01 18:10:08 +13:00
|
|
|
#[cfg(feature = "proxy")]
|
2020-12-15 13:35:04 +13:00
|
|
|
return "/proxy/".to_string() + encode(url).as_str();
|
2020-12-01 18:10:08 +13:00
|
|
|
|
|
|
|
#[cfg(not(feature = "proxy"))]
|
|
|
|
return url.to_string();
|
|
|
|
}
|
|
|
|
|
2020-12-26 15:06:33 +13:00
|
|
|
// Append `m` and `k` for millions and thousands respectively
|
2020-12-08 07:53:22 +13:00
|
|
|
pub fn format_num(num: i64) -> String {
|
2020-12-08 08:36:05 +13:00
|
|
|
if num > 1000000 {
|
|
|
|
format!("{}m", num / 1000000)
|
|
|
|
} else if num > 1000 {
|
|
|
|
format!("{}k", num / 1000)
|
|
|
|
} else {
|
|
|
|
num.to_string()
|
|
|
|
}
|
2020-12-08 07:53:22 +13:00
|
|
|
}
|
|
|
|
|
2020-11-20 17:42:18 +13:00
|
|
|
//
|
|
|
|
// JSON PARSING
|
|
|
|
//
|
|
|
|
|
2020-11-18 08:37:40 +13:00
|
|
|
// val() function used to parse JSON from Reddit APIs
|
|
|
|
pub async fn val(j: &serde_json::Value, k: &str) -> String {
|
|
|
|
String::from(j["data"][k].as_str().unwrap_or(""))
|
|
|
|
}
|
|
|
|
|
|
|
|
// nested_val() function used to parse JSON from Reddit APIs
|
|
|
|
pub async fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
|
|
|
|
String::from(j["data"][n][k].as_str().unwrap())
|
2020-11-18 13:03:28 +13:00
|
|
|
}
|
|
|
|
|
2020-12-26 15:06:33 +13:00
|
|
|
// Fetch posts of a user or subreddit
|
2021-01-01 12:54:13 +13:00
|
|
|
pub async fn fetch_posts(path: String, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
|
2020-11-21 19:05:27 +13:00
|
|
|
// Send a request to the url, receive JSON in response
|
2021-01-01 12:54:13 +13:00
|
|
|
let req = request(path.clone()).await;
|
2020-11-21 19:05:27 +13:00
|
|
|
|
|
|
|
// If the Reddit API returns an error, exit this function
|
|
|
|
if req.is_err() {
|
|
|
|
return Err(req.err().unwrap());
|
|
|
|
}
|
|
|
|
|
|
|
|
// Otherwise, grab the JSON output from the request
|
|
|
|
let res = req.unwrap();
|
|
|
|
|
|
|
|
// Fetch the list of posts from the JSON response
|
|
|
|
let post_list = res["data"]["children"].as_array().unwrap();
|
|
|
|
|
|
|
|
let mut posts: Vec<Post> = Vec::new();
|
|
|
|
|
2020-12-23 15:29:43 +13:00
|
|
|
for post in post_list {
|
2020-11-21 19:05:27 +13:00
|
|
|
let img = if val(post, "thumbnail").await.starts_with("https:/") {
|
2020-12-24 17:36:49 +13:00
|
|
|
format_url(val(post, "thumbnail").await).await
|
2020-11-21 19:05:27 +13:00
|
|
|
} else {
|
|
|
|
String::new()
|
|
|
|
};
|
|
|
|
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap().round() as i64;
|
|
|
|
let score = post["data"]["score"].as_i64().unwrap();
|
|
|
|
let title = val(post, "title").await;
|
|
|
|
|
|
|
|
posts.push(Post {
|
|
|
|
title: if title.is_empty() { fallback_title.to_owned() } else { title },
|
|
|
|
community: val(post, "subreddit").await,
|
2020-12-29 15:42:46 +13:00
|
|
|
body: val(post, "body_html").await,
|
2020-11-21 19:05:27 +13:00
|
|
|
author: val(post, "author").await,
|
2020-12-21 08:29:23 +13:00
|
|
|
author_flair: Flair(
|
|
|
|
val(post, "author_flair_text").await,
|
|
|
|
val(post, "author_flair_background_color").await,
|
|
|
|
val(post, "author_flair_text_color").await,
|
|
|
|
),
|
2020-12-08 07:53:22 +13:00
|
|
|
score: format_num(score),
|
2020-12-01 17:33:55 +13:00
|
|
|
post_type: "link".to_string(),
|
2020-11-21 19:05:27 +13:00
|
|
|
media: img,
|
|
|
|
flair: Flair(
|
|
|
|
val(post, "link_flair_text").await,
|
|
|
|
val(post, "link_flair_background_color").await,
|
|
|
|
if val(post, "link_flair_text_color").await == "dark" {
|
|
|
|
"black".to_string()
|
|
|
|
} else {
|
|
|
|
"white".to_string()
|
|
|
|
},
|
|
|
|
),
|
2020-12-30 16:01:02 +13:00
|
|
|
flags: Flags {
|
|
|
|
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
2021-01-01 12:54:13 +13:00
|
|
|
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
2020-12-30 16:01:02 +13:00
|
|
|
},
|
2020-12-23 15:29:43 +13:00
|
|
|
url: val(post, "permalink").await,
|
|
|
|
time: Utc.timestamp(unix_time, 0).format("%b %e '%y").to_string(),
|
2020-11-21 19:05:27 +13:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-01-01 12:54:13 +13:00
|
|
|
dbg!(path);
|
2020-12-28 09:36:10 +13:00
|
|
|
|
2020-11-21 19:05:27 +13:00
|
|
|
Ok((posts, res["data"]["after"].as_str().unwrap_or("").to_string()))
|
|
|
|
}
|
|
|
|
|
2020-11-20 17:42:18 +13:00
|
|
|
//
|
|
|
|
// NETWORKING
|
|
|
|
//
|
|
|
|
|
2020-11-19 15:50:59 +13:00
|
|
|
// Make a request to a Reddit API and parse the JSON response
|
2021-01-01 12:54:13 +13:00
|
|
|
pub async fn request(path: String) -> Result<serde_json::Value, &'static str> {
|
|
|
|
let url = format!("https://www.reddit.com/{}", path);
|
2020-12-22 14:17:40 +13:00
|
|
|
|
2020-11-19 15:50:59 +13:00
|
|
|
// --- actix-web::client ---
|
|
|
|
// let client = actix_web::client::Client::default();
|
|
|
|
// let res = client
|
|
|
|
// .get(url)
|
|
|
|
// .send()
|
|
|
|
// .await?
|
|
|
|
// .body()
|
|
|
|
// .limit(1000000)
|
|
|
|
// .await?;
|
|
|
|
|
|
|
|
// let body = std::str::from_utf8(res.as_ref())?; // .as_ref converts Bytes to [u8]
|
|
|
|
|
|
|
|
// --- surf ---
|
2020-12-01 17:57:15 +13:00
|
|
|
// let req = get(&url).header("User-Agent", "libreddit");
|
|
|
|
// let client = client().with(Redirect::new(5));
|
|
|
|
// let mut res = client.send(req).await.unwrap();
|
|
|
|
// let success = res.status().is_success();
|
|
|
|
// let body = res.body_string().await.unwrap();
|
2020-12-01 18:10:08 +13:00
|
|
|
|
2020-12-01 17:57:15 +13:00
|
|
|
// --- reqwest ---
|
|
|
|
let res = reqwest::get(&url).await.unwrap();
|
|
|
|
// Read the status from the response
|
2020-11-21 15:31:11 +13:00
|
|
|
let success = res.status().is_success();
|
2020-12-01 17:57:15 +13:00
|
|
|
// Read the body of the response
|
|
|
|
let body = res.text().await.unwrap();
|
2020-12-01 18:10:08 +13:00
|
|
|
|
2020-11-19 15:50:59 +13:00
|
|
|
// Parse the response from Reddit as JSON
|
2020-11-26 10:53:30 +13:00
|
|
|
let json: Value = from_str(body.as_str()).unwrap_or(Value::Null);
|
2020-11-20 10:49:32 +13:00
|
|
|
|
2020-11-20 17:42:18 +13:00
|
|
|
if !success {
|
2020-11-20 18:08:22 +13:00
|
|
|
println!("! {} - {}", url, "Page not found");
|
|
|
|
Err("Page not found")
|
2020-11-26 10:53:30 +13:00
|
|
|
} else if json == Value::Null {
|
2020-11-20 17:42:18 +13:00
|
|
|
println!("! {} - {}", url, "Failed to parse page JSON data");
|
|
|
|
Err("Failed to parse page JSON data")
|
|
|
|
} else {
|
2020-11-20 18:08:22 +13:00
|
|
|
Ok(json)
|
2020-11-20 17:42:18 +13:00
|
|
|
}
|
2020-11-19 15:50:59 +13:00
|
|
|
}
|