redsunlib/src/utils.rs

215 lines
5.3 KiB
Rust
Raw Normal View History

2020-11-21 19:05:27 +13:00
//
// CRATES
//
use chrono::{TimeZone, Utc};
2020-11-30 15:50:29 +13:00
use serde_json::{from_str, Value};
2020-12-01 17:57:15 +13:00
// use surf::{client, get, middleware::Redirect};
2020-11-21 19:05:27 +13:00
2020-12-01 18:10:08 +13:00
#[cfg(feature = "proxy")]
use base64::encode;
2020-11-20 17:42:18 +13:00
//
// STRUCTS
//
2020-11-18 13:03:28 +13:00
#[allow(dead_code)]
2020-11-18 08:37:40 +13:00
// Post flair with text, background color and foreground color
pub struct Flair(pub String, pub String, pub String);
2020-11-18 13:03:28 +13:00
#[allow(dead_code)]
2020-11-18 08:37:40 +13:00
// Post containing content, metadata and media
pub struct Post {
pub title: String,
pub community: String,
pub body: String,
pub author: String,
pub url: String,
pub score: String,
pub post_type: String,
2020-11-18 08:37:40 +13:00
pub media: String,
pub time: String,
2020-11-30 15:50:29 +13:00
pub flair: Flair,
2020-11-18 08:37:40 +13:00
}
#[allow(dead_code)]
// Comment with content, post, score and data/time that it was posted
pub struct Comment {
pub body: String,
pub author: String,
pub score: String,
2020-11-30 15:50:29 +13:00
pub time: String,
2020-11-18 08:37:40 +13:00
}
#[allow(dead_code)]
// User struct containing metadata about user
pub struct User {
pub name: String,
pub icon: String,
pub karma: i64,
pub banner: String,
2020-11-30 15:50:29 +13:00
pub description: String,
2020-11-18 08:37:40 +13:00
}
#[allow(dead_code)]
// Subreddit struct containing metadata about community
pub struct Subreddit {
pub name: String,
pub title: String,
pub description: String,
pub icon: String,
2020-11-23 13:43:23 +13:00
pub members: String,
2020-11-30 15:50:29 +13:00
pub active: String,
2020-11-18 08:37:40 +13:00
}
2020-11-20 10:49:32 +13:00
// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
#[derive(serde::Deserialize)]
pub struct Params {
pub sort: Option<String>,
pub after: Option<String>,
2020-11-30 15:50:29 +13:00
pub before: Option<String>,
2020-11-20 10:49:32 +13:00
}
2020-11-20 17:42:18 +13:00
// Error template
#[derive(askama::Template)]
#[template(path = "error.html", escape = "none")]
pub struct ErrorTemplate {
2020-11-30 15:50:29 +13:00
pub message: String,
2020-11-20 17:42:18 +13:00
}
2020-12-01 18:10:08 +13:00
//
2020-12-08 07:53:22 +13:00
// FORMATTING
2020-12-01 18:10:08 +13:00
//
pub async fn format_url(url: &str) -> String {
#[cfg(feature = "proxy")]
return "/imageproxy/".to_string() + encode(url).as_str();
#[cfg(not(feature = "proxy"))]
return url.to_string();
}
2020-12-08 07:53:22 +13:00
pub fn format_num(num: i64) -> String {
2020-12-08 08:36:05 +13:00
if num > 1000000 {
format!("{}m", num / 1000000)
} else if num > 1000 {
format!("{}k", num / 1000)
} else {
num.to_string()
}
2020-12-08 07:53:22 +13:00
}
2020-11-20 17:42:18 +13:00
//
// JSON PARSING
//
2020-11-18 13:03:28 +13:00
#[allow(dead_code)]
2020-11-18 08:37:40 +13:00
// val() function used to parse JSON from Reddit APIs
pub async fn val(j: &serde_json::Value, k: &str) -> String {
String::from(j["data"][k].as_str().unwrap_or(""))
}
#[allow(dead_code)]
// nested_val() function used to parse JSON from Reddit APIs
pub async fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
String::from(j["data"][n][k].as_str().unwrap())
2020-11-18 13:03:28 +13:00
}
2020-11-21 19:05:27 +13:00
#[allow(dead_code)]
pub async fn fetch_posts(url: String, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
// Send a request to the url, receive JSON in response
let req = request(url).await;
// If the Reddit API returns an error, exit this function
if req.is_err() {
return Err(req.err().unwrap());
}
// Otherwise, grab the JSON output from the request
let res = req.unwrap();
// Fetch the list of posts from the JSON response
let post_list = res["data"]["children"].as_array().unwrap();
let mut posts: Vec<Post> = Vec::new();
for post in post_list.iter() {
let img = if val(post, "thumbnail").await.starts_with("https:/") {
2020-12-01 18:10:08 +13:00
format_url(val(post, "thumbnail").await.as_str()).await
2020-11-21 19:05:27 +13:00
} else {
String::new()
};
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap().round() as i64;
let score = post["data"]["score"].as_i64().unwrap();
let title = val(post, "title").await;
posts.push(Post {
title: if title.is_empty() { fallback_title.to_owned() } else { title },
community: val(post, "subreddit").await,
2020-11-24 08:33:43 +13:00
body: val(post, "body").await,
2020-11-21 19:05:27 +13:00
author: val(post, "author").await,
2020-12-08 07:53:22 +13:00
score: format_num(score),
post_type: "link".to_string(),
2020-11-21 19:05:27 +13:00
media: img,
url: val(post, "permalink").await,
time: Utc.timestamp(unix_time, 0).format("%b %e '%y").to_string(),
flair: Flair(
val(post, "link_flair_text").await,
val(post, "link_flair_background_color").await,
if val(post, "link_flair_text_color").await == "dark" {
"black".to_string()
} else {
"white".to_string()
},
),
});
}
Ok((posts, res["data"]["after"].as_str().unwrap_or("").to_string()))
}
2020-11-20 17:42:18 +13:00
//
// NETWORKING
//
2020-11-19 15:50:59 +13:00
// Make a request to a Reddit API and parse the JSON response
#[allow(dead_code)]
2020-11-20 17:42:18 +13:00
pub async fn request(url: String) -> Result<serde_json::Value, &'static str> {
2020-11-19 15:50:59 +13:00
// --- actix-web::client ---
// let client = actix_web::client::Client::default();
// let res = client
// .get(url)
// .send()
// .await?
// .body()
// .limit(1000000)
// .await?;
// let body = std::str::from_utf8(res.as_ref())?; // .as_ref converts Bytes to [u8]
// --- surf ---
2020-12-01 17:57:15 +13:00
// let req = get(&url).header("User-Agent", "libreddit");
// let client = client().with(Redirect::new(5));
// let mut res = client.send(req).await.unwrap();
// let success = res.status().is_success();
// let body = res.body_string().await.unwrap();
2020-12-01 18:10:08 +13:00
2020-12-01 17:57:15 +13:00
// --- reqwest ---
let res = reqwest::get(&url).await.unwrap();
// Read the status from the response
2020-11-21 15:31:11 +13:00
let success = res.status().is_success();
2020-12-01 17:57:15 +13:00
// Read the body of the response
let body = res.text().await.unwrap();
2020-12-01 18:10:08 +13:00
2020-11-19 15:50:59 +13:00
// Parse the response from Reddit as JSON
2020-11-26 10:53:30 +13:00
let json: Value = from_str(body.as_str()).unwrap_or(Value::Null);
2020-11-20 10:49:32 +13:00
2020-11-20 17:42:18 +13:00
if !success {
2020-11-20 18:08:22 +13:00
println!("! {} - {}", url, "Page not found");
Err("Page not found")
2020-11-26 10:53:30 +13:00
} else if json == Value::Null {
2020-11-20 17:42:18 +13:00
println!("! {} - {}", url, "Failed to parse page JSON data");
Err("Failed to parse page JSON data")
} else {
2020-11-20 18:08:22 +13:00
Ok(json)
2020-11-20 17:42:18 +13:00
}
2020-11-19 15:50:59 +13:00
}