Categorize utilities
This commit is contained in:
parent
83ba0fb913
commit
a3ec44149c
10
src/post.rs
10
src/post.rs
@ -53,7 +53,7 @@ pub async fn item(req: Request<()>) -> tide::Result {
|
|||||||
comments,
|
comments,
|
||||||
post,
|
post,
|
||||||
sort,
|
sort,
|
||||||
prefs: prefs(req),
|
prefs: Preferences::new(req),
|
||||||
single_thread: *single_thread,
|
single_thread: *single_thread,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -74,7 +74,7 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||||
|
|
||||||
// Determine the type of media along with the media URL
|
// Determine the type of media along with the media URL
|
||||||
let (post_type, media, gallery) = media(&post["data"]).await;
|
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
|
||||||
|
|
||||||
// Build a post using data parsed from Reddit post API
|
// Build a post using data parsed from Reddit post API
|
||||||
Post {
|
Post {
|
||||||
@ -85,7 +85,7 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
author: Author {
|
author: Author {
|
||||||
name: val(post, "author"),
|
name: val(post, "author"),
|
||||||
flair: Flair {
|
flair: Flair {
|
||||||
flair_parts: parse_rich_flair(
|
flair_parts: FlairPart::parse(
|
||||||
post["data"]["author_flair_type"].as_str().unwrap_or_default(),
|
post["data"]["author_flair_type"].as_str().unwrap_or_default(),
|
||||||
post["data"]["author_flair_richtext"].as_array(),
|
post["data"]["author_flair_richtext"].as_array(),
|
||||||
post["data"]["author_flair_text"].as_str(),
|
post["data"]["author_flair_text"].as_str(),
|
||||||
@ -108,7 +108,7 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
poster: "".to_string(),
|
poster: "".to_string(),
|
||||||
},
|
},
|
||||||
flair: Flair {
|
flair: Flair {
|
||||||
flair_parts: parse_rich_flair(
|
flair_parts: FlairPart::parse(
|
||||||
post["data"]["link_flair_type"].as_str().unwrap_or_default(),
|
post["data"]["link_flair_type"].as_str().unwrap_or_default(),
|
||||||
post["data"]["link_flair_richtext"].as_array(),
|
post["data"]["link_flair_richtext"].as_array(),
|
||||||
post["data"]["link_flair_text"].as_str(),
|
post["data"]["link_flair_text"].as_str(),
|
||||||
@ -184,7 +184,7 @@ async fn parse_comments(json: &serde_json::Value, post_link: &str, post_author:
|
|||||||
author: Author {
|
author: Author {
|
||||||
name: val(&comment, "author"),
|
name: val(&comment, "author"),
|
||||||
flair: Flair {
|
flair: Flair {
|
||||||
flair_parts: parse_rich_flair(
|
flair_parts: FlairPart::parse(
|
||||||
data["author_flair_type"].as_str().unwrap_or_default(),
|
data["author_flair_type"].as_str().unwrap_or_default(),
|
||||||
data["author_flair_richtext"].as_array(),
|
data["author_flair_richtext"].as_array(),
|
||||||
data["author_flair_text"].as_str(),
|
data["author_flair_text"].as_str(),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
use crate::utils::{cookie, error, fetch_posts, param, prefs, request, template, val, Post, Preferences};
|
use crate::utils::{cookie, error, param, request, template, val, Post, Preferences};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use tide::Request;
|
use tide::Request;
|
||||||
|
|
||||||
@ -50,7 +50,7 @@ pub async fn find(req: Request<()>) -> tide::Result {
|
|||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
match fetch_posts(&path, String::new()).await {
|
match Post::fetch(&path, String::new()).await {
|
||||||
Ok((posts, after)) => template(SearchTemplate {
|
Ok((posts, after)) => template(SearchTemplate {
|
||||||
posts,
|
posts,
|
||||||
subreddits,
|
subreddits,
|
||||||
@ -63,7 +63,7 @@ pub async fn find(req: Request<()>) -> tide::Result {
|
|||||||
after,
|
after,
|
||||||
restrict_sr: param(&path, "restrict_sr"),
|
restrict_sr: param(&path, "restrict_sr"),
|
||||||
},
|
},
|
||||||
prefs: prefs(req),
|
prefs: Preferences::new(req),
|
||||||
}),
|
}),
|
||||||
Err(msg) => error(req, msg).await,
|
Err(msg) => error(req, msg).await,
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
use crate::utils::{prefs, redirect, template, Preferences};
|
use crate::utils::{redirect, template, Preferences};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use tide::{http::Cookie, Request};
|
use tide::{http::Cookie, Request};
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
@ -28,7 +28,7 @@ pub struct SettingsForm {
|
|||||||
|
|
||||||
// Retrieve cookies from request "Cookie" header
|
// Retrieve cookies from request "Cookie" header
|
||||||
pub async fn get(req: Request<()>) -> tide::Result {
|
pub async fn get(req: Request<()>) -> tide::Result {
|
||||||
template(SettingsTemplate { prefs: prefs(req) })
|
template(SettingsTemplate { prefs: Preferences::new(req) })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set cookies using response "Set-Cookie" header
|
// Set cookies using response "Set-Cookie" header
|
||||||
|
@ -43,7 +43,7 @@ pub async fn page(req: Request<()>) -> tide::Result {
|
|||||||
|
|
||||||
let path = format!("/r/{}/{}.json?{}&raw_json=1", sub, sort, req.url().query().unwrap_or_default());
|
let path = format!("/r/{}/{}.json?{}&raw_json=1", sub, sort, req.url().query().unwrap_or_default());
|
||||||
|
|
||||||
match fetch_posts(&path, String::new()).await {
|
match Post::fetch(&path, String::new()).await {
|
||||||
Ok((posts, after)) => {
|
Ok((posts, after)) => {
|
||||||
// If you can get subreddit posts, also request subreddit metadata
|
// If you can get subreddit posts, also request subreddit metadata
|
||||||
let sub = if !sub.contains('+') && sub != subscribed && sub != "popular" && sub != "all" {
|
let sub = if !sub.contains('+') && sub != subscribed && sub != "popular" && sub != "all" {
|
||||||
@ -71,7 +71,7 @@ pub async fn page(req: Request<()>) -> tide::Result {
|
|||||||
posts,
|
posts,
|
||||||
sort: (sort, param(&path, "t")),
|
sort: (sort, param(&path, "t")),
|
||||||
ends: (param(&path, "after"), after),
|
ends: (param(&path, "after"), after),
|
||||||
prefs: prefs(req),
|
prefs: Preferences::new(req),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
Err(msg) => match msg.as_str() {
|
Err(msg) => match msg.as_str() {
|
||||||
@ -89,7 +89,7 @@ pub async fn subscriptions(req: Request<()>) -> tide::Result {
|
|||||||
let query = req.url().query().unwrap_or_default().to_string();
|
let query = req.url().query().unwrap_or_default().to_string();
|
||||||
let action: Vec<String> = req.url().path().split('/').map(String::from).collect();
|
let action: Vec<String> = req.url().path().split('/').map(String::from).collect();
|
||||||
|
|
||||||
let mut sub_list = prefs(req).subscriptions;
|
let mut sub_list = Preferences::new(req).subscriptions;
|
||||||
|
|
||||||
// Find each subreddit name (separated by '+') in sub parameter
|
// Find each subreddit name (separated by '+') in sub parameter
|
||||||
for part in sub.split('+') {
|
for part in sub.split('+') {
|
||||||
@ -143,7 +143,7 @@ pub async fn wiki(req: Request<()>) -> tide::Result {
|
|||||||
sub,
|
sub,
|
||||||
wiki: rewrite_urls(res["data"]["content_html"].as_str().unwrap_or_default()),
|
wiki: rewrite_urls(res["data"]["content_html"].as_str().unwrap_or_default()),
|
||||||
page,
|
page,
|
||||||
prefs: prefs(req),
|
prefs: Preferences::new(req),
|
||||||
}),
|
}),
|
||||||
Err(msg) => error(req, msg).await,
|
Err(msg) => error(req, msg).await,
|
||||||
}
|
}
|
||||||
|
@ -25,7 +25,7 @@ pub async fn profile(req: Request<()>) -> tide::Result {
|
|||||||
let username = req.param("name").unwrap_or("").to_string();
|
let username = req.param("name").unwrap_or("").to_string();
|
||||||
|
|
||||||
// Request user posts/comments from Reddit
|
// Request user posts/comments from Reddit
|
||||||
let posts = fetch_posts(&path, "Comment".to_string()).await;
|
let posts = Post::fetch(&path, "Comment".to_string()).await;
|
||||||
|
|
||||||
match posts {
|
match posts {
|
||||||
Ok((posts, after)) => {
|
Ok((posts, after)) => {
|
||||||
@ -37,7 +37,7 @@ pub async fn profile(req: Request<()>) -> tide::Result {
|
|||||||
posts,
|
posts,
|
||||||
sort: (sort, param(&path, "t")),
|
sort: (sort, param(&path, "t")),
|
||||||
ends: (param(&path, "after"), after),
|
ends: (param(&path, "after"), after),
|
||||||
prefs: prefs(req),
|
prefs: Preferences::new(req),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
// If there is an error show error page
|
// If there is an error show error page
|
||||||
|
491
src/utils.rs
491
src/utils.rs
@ -9,10 +9,6 @@ use std::collections::HashMap;
|
|||||||
use tide::{http::url::Url, http::Cookie, Request, Response};
|
use tide::{http::url::Url, http::Cookie, Request, Response};
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
//
|
|
||||||
// STRUCTS
|
|
||||||
//
|
|
||||||
|
|
||||||
// Post flair with content, background color and foreground color
|
// Post flair with content, background color and foreground color
|
||||||
pub struct Flair {
|
pub struct Flair {
|
||||||
pub flair_parts: Vec<FlairPart>,
|
pub flair_parts: Vec<FlairPart>,
|
||||||
@ -27,6 +23,42 @@ pub struct FlairPart {
|
|||||||
pub value: String,
|
pub value: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl FlairPart {
|
||||||
|
pub fn parse(flair_type: &str, rich_flair: Option<&Vec<Value>>, text_flair: Option<&str>) -> Vec<Self> {
|
||||||
|
// Parse type of flair
|
||||||
|
match flair_type {
|
||||||
|
// If flair contains emojis and text
|
||||||
|
"richtext" => match rich_flair {
|
||||||
|
Some(rich) => rich
|
||||||
|
.iter()
|
||||||
|
// For each part of the flair, extract text and emojis
|
||||||
|
.map(|part| {
|
||||||
|
let value = |name: &str| part[name].as_str().unwrap_or_default();
|
||||||
|
Self {
|
||||||
|
flair_part_type: value("e").to_string(),
|
||||||
|
value: match value("e") {
|
||||||
|
"text" => value("t").to_string(),
|
||||||
|
"emoji" => format_url(value("u")),
|
||||||
|
_ => String::new(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<Self>>(),
|
||||||
|
None => Vec::new(),
|
||||||
|
},
|
||||||
|
// If flair contains only text
|
||||||
|
"text" => match text_flair {
|
||||||
|
Some(text) => vec![Self {
|
||||||
|
flair_part_type: "text".to_string(),
|
||||||
|
value: text.to_string(),
|
||||||
|
}],
|
||||||
|
None => Vec::new(),
|
||||||
|
},
|
||||||
|
_ => Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct Author {
|
pub struct Author {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub flair: Flair,
|
pub flair: Flair,
|
||||||
@ -46,6 +78,61 @@ pub struct Media {
|
|||||||
pub poster: String,
|
pub poster: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Media {
|
||||||
|
pub async fn parse(data: &Value) -> (String, Self, Vec<GalleryMedia>) {
|
||||||
|
let mut gallery = Vec::new();
|
||||||
|
|
||||||
|
// If post is a video, return the video
|
||||||
|
let (post_type, url) = if data["preview"]["reddit_video_preview"]["fallback_url"].is_string() {
|
||||||
|
// Return reddit video
|
||||||
|
("video", &data["preview"]["reddit_video_preview"]["fallback_url"])
|
||||||
|
} else if data["secure_media"]["reddit_video"]["fallback_url"].is_string() {
|
||||||
|
// Return reddit video
|
||||||
|
("video", &data["secure_media"]["reddit_video"]["fallback_url"])
|
||||||
|
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
||||||
|
// Handle images, whether GIFs or pics
|
||||||
|
let preview = &data["preview"]["images"][0];
|
||||||
|
let mp4 = &preview["variants"]["mp4"];
|
||||||
|
|
||||||
|
if mp4.is_object() {
|
||||||
|
// Return the mp4 if the media is a gif
|
||||||
|
("gif", &mp4["source"]["url"])
|
||||||
|
} else {
|
||||||
|
// Return the picture if the media is an image
|
||||||
|
if data["domain"] == "i.redd.it" {
|
||||||
|
("image", &data["url"])
|
||||||
|
} else {
|
||||||
|
("image", &preview["source"]["url"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if data["is_self"].as_bool().unwrap_or_default() {
|
||||||
|
// If type is self, return permalink
|
||||||
|
("self", &data["permalink"])
|
||||||
|
} else if data["is_gallery"].as_bool().unwrap_or_default() {
|
||||||
|
// If this post contains a gallery of images
|
||||||
|
gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]);
|
||||||
|
|
||||||
|
("gallery", &data["url"])
|
||||||
|
} else {
|
||||||
|
// If type can't be determined, return url
|
||||||
|
("link", &data["url"])
|
||||||
|
};
|
||||||
|
|
||||||
|
let source = &data["preview"]["images"][0]["source"];
|
||||||
|
|
||||||
|
(
|
||||||
|
post_type.to_string(),
|
||||||
|
Self {
|
||||||
|
url: url.as_str().unwrap_or_default().to_string(),
|
||||||
|
width: source["width"].as_i64().unwrap_or_default(),
|
||||||
|
height: source["height"].as_i64().unwrap_or_default(),
|
||||||
|
poster: format_url(source["url"].as_str().unwrap_or_default()),
|
||||||
|
},
|
||||||
|
gallery,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct GalleryMedia {
|
pub struct GalleryMedia {
|
||||||
pub url: String,
|
pub url: String,
|
||||||
pub width: i64,
|
pub width: i64,
|
||||||
@ -54,6 +141,29 @@ pub struct GalleryMedia {
|
|||||||
pub outbound_url: String,
|
pub outbound_url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl GalleryMedia {
|
||||||
|
fn parse(items: &Value, metadata: &Value) -> Vec<Self> {
|
||||||
|
items.as_array()
|
||||||
|
.unwrap_or(&Vec::new())
|
||||||
|
.iter()
|
||||||
|
.map(|item| {
|
||||||
|
// For each image in gallery
|
||||||
|
let media_id = item["media_id"].as_str().unwrap_or_default();
|
||||||
|
let image = &metadata[media_id]["s"];
|
||||||
|
|
||||||
|
// Construct gallery items
|
||||||
|
Self {
|
||||||
|
url: format_url(image["u"].as_str().unwrap_or_default()),
|
||||||
|
width: image["x"].as_i64().unwrap_or_default(),
|
||||||
|
height: image["y"].as_i64().unwrap_or_default(),
|
||||||
|
caption: item["caption"].as_str().unwrap_or_default().to_string(),
|
||||||
|
outbound_url: item["outbound_url"].as_str().unwrap_or_default().to_string(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<Self>>()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Post containing content, metadata and media
|
// Post containing content, metadata and media
|
||||||
pub struct Post {
|
pub struct Post {
|
||||||
pub id: String,
|
pub id: String,
|
||||||
@ -76,6 +186,106 @@ pub struct Post {
|
|||||||
pub gallery: Vec<GalleryMedia>,
|
pub gallery: Vec<GalleryMedia>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Post {
|
||||||
|
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
|
||||||
|
pub async fn fetch(path: &str, fallback_title: String) -> Result<(Vec<Self>, String), String> {
|
||||||
|
let res;
|
||||||
|
let post_list;
|
||||||
|
|
||||||
|
// Send a request to the url
|
||||||
|
match request(path.to_string()).await {
|
||||||
|
// If success, receive JSON in response
|
||||||
|
Ok(response) => {
|
||||||
|
res = response;
|
||||||
|
}
|
||||||
|
// If the Reddit API returns an error, exit this function
|
||||||
|
Err(msg) => return Err(msg),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch the list of posts from the JSON response
|
||||||
|
match res["data"]["children"].as_array() {
|
||||||
|
Some(list) => post_list = list,
|
||||||
|
None => return Err("No posts found".to_string()),
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut posts: Vec<Self> = Vec::new();
|
||||||
|
|
||||||
|
// For each post from posts list
|
||||||
|
for post in post_list {
|
||||||
|
let data = &post["data"];
|
||||||
|
|
||||||
|
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
|
||||||
|
let score = data["score"].as_i64().unwrap_or_default();
|
||||||
|
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||||
|
let title = val(post, "title");
|
||||||
|
|
||||||
|
// Determine the type of media along with the media URL
|
||||||
|
let (post_type, media, gallery) = Media::parse(&data).await;
|
||||||
|
|
||||||
|
posts.push(Self {
|
||||||
|
id: val(post, "id"),
|
||||||
|
title: if title.is_empty() { fallback_title.to_owned() } else { title },
|
||||||
|
community: val(post, "subreddit"),
|
||||||
|
body: rewrite_urls(&val(post, "body_html")),
|
||||||
|
author: Author {
|
||||||
|
name: val(post, "author"),
|
||||||
|
flair: Flair {
|
||||||
|
flair_parts: FlairPart::parse(
|
||||||
|
data["author_flair_type"].as_str().unwrap_or_default(),
|
||||||
|
data["author_flair_richtext"].as_array(),
|
||||||
|
data["author_flair_text"].as_str(),
|
||||||
|
),
|
||||||
|
text: val(post, "link_flair_text"),
|
||||||
|
background_color: val(post, "author_flair_background_color"),
|
||||||
|
foreground_color: val(post, "author_flair_text_color"),
|
||||||
|
},
|
||||||
|
distinguished: val(post, "distinguished"),
|
||||||
|
},
|
||||||
|
score: if data["hide_score"].as_bool().unwrap_or_default() {
|
||||||
|
"•".to_string()
|
||||||
|
} else {
|
||||||
|
format_num(score)
|
||||||
|
},
|
||||||
|
upvote_ratio: ratio as i64,
|
||||||
|
post_type,
|
||||||
|
thumbnail: Media {
|
||||||
|
url: format_url(val(post, "thumbnail").as_str()),
|
||||||
|
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
|
||||||
|
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
|
||||||
|
poster: "".to_string(),
|
||||||
|
},
|
||||||
|
media,
|
||||||
|
domain: val(post, "domain"),
|
||||||
|
flair: Flair {
|
||||||
|
flair_parts: FlairPart::parse(
|
||||||
|
data["link_flair_type"].as_str().unwrap_or_default(),
|
||||||
|
data["link_flair_richtext"].as_array(),
|
||||||
|
data["link_flair_text"].as_str(),
|
||||||
|
),
|
||||||
|
text: val(post, "link_flair_text"),
|
||||||
|
background_color: val(post, "link_flair_background_color"),
|
||||||
|
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||||
|
"black".to_string()
|
||||||
|
} else {
|
||||||
|
"white".to_string()
|
||||||
|
},
|
||||||
|
},
|
||||||
|
flags: Flags {
|
||||||
|
nsfw: data["over_18"].as_bool().unwrap_or_default(),
|
||||||
|
stickied: data["stickied"].as_bool().unwrap_or_default(),
|
||||||
|
},
|
||||||
|
permalink: val(post, "permalink"),
|
||||||
|
rel_time,
|
||||||
|
created,
|
||||||
|
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
|
||||||
|
gallery,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(path = "comment.html", escape = "none")]
|
#[template(path = "comment.html", escape = "none")]
|
||||||
// Comment with content, post, score and data/time that it was posted
|
// Comment with content, post, score and data/time that it was posted
|
||||||
@ -96,6 +306,13 @@ pub struct Comment {
|
|||||||
pub highlighted: bool,
|
pub highlighted: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "error.html", escape = "none")]
|
||||||
|
pub struct ErrorTemplate {
|
||||||
|
pub msg: String,
|
||||||
|
pub prefs: Preferences,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
// User struct containing metadata about user
|
// User struct containing metadata about user
|
||||||
pub struct User {
|
pub struct User {
|
||||||
@ -131,14 +348,6 @@ pub struct Params {
|
|||||||
pub before: Option<String>,
|
pub before: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Error template
|
|
||||||
#[derive(Template)]
|
|
||||||
#[template(path = "error.html", escape = "none")]
|
|
||||||
pub struct ErrorTemplate {
|
|
||||||
pub msg: String,
|
|
||||||
pub prefs: Preferences,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Preferences {
|
pub struct Preferences {
|
||||||
pub theme: String,
|
pub theme: String,
|
||||||
@ -150,24 +359,26 @@ pub struct Preferences {
|
|||||||
pub subscriptions: Vec<String>,
|
pub subscriptions: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Preferences {
|
||||||
|
// Build preferences from cookies
|
||||||
|
pub fn new(req: Request<()>) -> Self {
|
||||||
|
Self {
|
||||||
|
theme: cookie(&req, "theme"),
|
||||||
|
front_page: cookie(&req, "front_page"),
|
||||||
|
layout: cookie(&req, "layout"),
|
||||||
|
wide: cookie(&req, "wide"),
|
||||||
|
show_nsfw: cookie(&req, "show_nsfw"),
|
||||||
|
comment_sort: cookie(&req, "comment_sort"),
|
||||||
|
subscriptions: cookie(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
// FORMATTING
|
// FORMATTING
|
||||||
//
|
//
|
||||||
|
|
||||||
// Build preferences from cookies
|
// Grab a query parameter from a url
|
||||||
pub fn prefs(req: Request<()>) -> Preferences {
|
|
||||||
Preferences {
|
|
||||||
theme: cookie(&req, "theme"),
|
|
||||||
front_page: cookie(&req, "front_page"),
|
|
||||||
layout: cookie(&req, "layout"),
|
|
||||||
wide: cookie(&req, "wide"),
|
|
||||||
show_nsfw: cookie(&req, "show_nsfw"),
|
|
||||||
comment_sort: cookie(&req, "comment_sort"),
|
|
||||||
subscriptions: cookie(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Grab a query param from a url
|
|
||||||
pub fn param(path: &str, value: &str) -> String {
|
pub fn param(path: &str, value: &str) -> String {
|
||||||
match Url::parse(format!("https://libredd.it/{}", path).as_str()) {
|
match Url::parse(format!("https://libredd.it/{}", path).as_str()) {
|
||||||
Ok(url) => url.query_pairs().into_owned().collect::<HashMap<_, _>>().get(value).unwrap_or(&String::new()).to_owned(),
|
Ok(url) => url.query_pairs().into_owned().collect::<HashMap<_, _>>().get(value).unwrap_or(&String::new()).to_owned(),
|
||||||
@ -175,7 +386,7 @@ pub fn param(path: &str, value: &str) -> String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse Cookie value from request
|
// Parse a cookie value from request
|
||||||
pub fn cookie(req: &Request<()>, name: &str) -> String {
|
pub fn cookie(req: &Request<()>, name: &str) -> String {
|
||||||
let cookie = req.cookie(name).unwrap_or_else(|| Cookie::named(name));
|
let cookie = req.cookie(name).unwrap_or_else(|| Cookie::named(name));
|
||||||
cookie.value().to_string()
|
cookie.value().to_string()
|
||||||
@ -240,119 +451,7 @@ pub fn format_num(num: i64) -> String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn media(data: &Value) -> (String, Media, Vec<GalleryMedia>) {
|
// Parse a relative and absolute time from a UNIX timestamp
|
||||||
let post_type;
|
|
||||||
let mut gallery = Vec::new();
|
|
||||||
|
|
||||||
// If post is a video, return the video
|
|
||||||
let url = if data["preview"]["reddit_video_preview"]["fallback_url"].is_string() {
|
|
||||||
// Return reddit video
|
|
||||||
post_type = "video";
|
|
||||||
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap_or_default())
|
|
||||||
} else if data["secure_media"]["reddit_video"]["fallback_url"].is_string() {
|
|
||||||
// Return reddit video
|
|
||||||
post_type = "video";
|
|
||||||
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap_or_default())
|
|
||||||
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
|
||||||
// Handle images, whether GIFs or pics
|
|
||||||
let preview = &data["preview"]["images"][0];
|
|
||||||
let mp4 = &preview["variants"]["mp4"];
|
|
||||||
|
|
||||||
if mp4.is_object() {
|
|
||||||
// Return the mp4 if the media is a gif
|
|
||||||
post_type = "gif";
|
|
||||||
format_url(mp4["source"]["url"].as_str().unwrap_or_default())
|
|
||||||
} else {
|
|
||||||
// Return the picture if the media is an image
|
|
||||||
post_type = "image";
|
|
||||||
if data["domain"] == "i.redd.it" {
|
|
||||||
format_url(data["url"].as_str().unwrap_or_default())
|
|
||||||
} else {
|
|
||||||
format_url(preview["source"]["url"].as_str().unwrap_or_default())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if data["is_self"].as_bool().unwrap_or_default() {
|
|
||||||
// If type is self, return permalink
|
|
||||||
post_type = "self";
|
|
||||||
data["permalink"].as_str().unwrap_or_default().to_string()
|
|
||||||
} else if data["is_gallery"].as_bool().unwrap_or_default() {
|
|
||||||
// If this post contains a gallery of images
|
|
||||||
post_type = "gallery";
|
|
||||||
gallery = data["gallery_data"]["items"]
|
|
||||||
.as_array()
|
|
||||||
.unwrap_or(&Vec::<Value>::new())
|
|
||||||
.iter()
|
|
||||||
.map(|item| {
|
|
||||||
// For each image in gallery
|
|
||||||
let media_id = item["media_id"].as_str().unwrap_or_default();
|
|
||||||
let image = &data["media_metadata"][media_id]["s"];
|
|
||||||
|
|
||||||
// Construct gallery items
|
|
||||||
GalleryMedia {
|
|
||||||
url: format_url(image["u"].as_str().unwrap_or_default()),
|
|
||||||
width: image["x"].as_i64().unwrap_or_default(),
|
|
||||||
height: image["y"].as_i64().unwrap_or_default(),
|
|
||||||
caption: item["caption"].as_str().unwrap_or_default().to_string(),
|
|
||||||
outbound_url: item["outbound_url"].as_str().unwrap_or_default().to_string(),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<GalleryMedia>>();
|
|
||||||
|
|
||||||
data["url"].as_str().unwrap_or_default().to_string()
|
|
||||||
} else {
|
|
||||||
// If type can't be determined, return url
|
|
||||||
post_type = "link";
|
|
||||||
data["url"].as_str().unwrap_or_default().to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
let source = &data["preview"]["images"][0]["source"];
|
|
||||||
|
|
||||||
(
|
|
||||||
post_type.to_string(),
|
|
||||||
Media {
|
|
||||||
url,
|
|
||||||
width: source["width"].as_i64().unwrap_or_default(),
|
|
||||||
height: source["height"].as_i64().unwrap_or_default(),
|
|
||||||
poster: format_url(source["url"].as_str().unwrap_or_default()),
|
|
||||||
},
|
|
||||||
gallery,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_rich_flair(flair_type: &str, rich_flair: Option<&Vec<Value>>, text_flair: Option<&str>) -> Vec<FlairPart> {
|
|
||||||
// Parse type of flair
|
|
||||||
match flair_type {
|
|
||||||
// If flair contains emojis and text
|
|
||||||
"richtext" => match rich_flair {
|
|
||||||
Some(rich) => rich
|
|
||||||
.iter()
|
|
||||||
// For each part of the flair, extract text and emojis
|
|
||||||
.map(|part| {
|
|
||||||
let value = |name: &str| part[name].as_str().unwrap_or_default();
|
|
||||||
FlairPart {
|
|
||||||
flair_part_type: value("e").to_string(),
|
|
||||||
value: match value("e") {
|
|
||||||
"text" => value("t").to_string(),
|
|
||||||
"emoji" => format_url(value("u")),
|
|
||||||
_ => String::new(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<FlairPart>>(),
|
|
||||||
None => Vec::new(),
|
|
||||||
},
|
|
||||||
// If flair contains only text
|
|
||||||
"text" => match text_flair {
|
|
||||||
Some(text) => vec![FlairPart {
|
|
||||||
flair_part_type: "text".to_string(),
|
|
||||||
value: text.to_string(),
|
|
||||||
}],
|
|
||||||
None => Vec::new(),
|
|
||||||
},
|
|
||||||
_ => Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn time(created: f64) -> (String, String) {
|
pub fn time(created: f64) -> (String, String) {
|
||||||
let time = OffsetDateTime::from_unix_timestamp(created.round() as i64);
|
let time = OffsetDateTime::from_unix_timestamp(created.round() as i64);
|
||||||
let time_delta = OffsetDateTime::now_utc() - time;
|
let time_delta = OffsetDateTime::now_utc() - time;
|
||||||
@ -381,104 +480,6 @@ pub fn val(j: &Value, k: &str) -> String {
|
|||||||
j["data"][k].as_str().unwrap_or_default().to_string()
|
j["data"][k].as_str().unwrap_or_default().to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
|
|
||||||
pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post>, String), String> {
|
|
||||||
let res;
|
|
||||||
let post_list;
|
|
||||||
|
|
||||||
// Send a request to the url
|
|
||||||
match request(path.to_string()).await {
|
|
||||||
// If success, receive JSON in response
|
|
||||||
Ok(response) => {
|
|
||||||
res = response;
|
|
||||||
}
|
|
||||||
// If the Reddit API returns an error, exit this function
|
|
||||||
Err(msg) => return Err(msg),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch the list of posts from the JSON response
|
|
||||||
match res["data"]["children"].as_array() {
|
|
||||||
Some(list) => post_list = list,
|
|
||||||
None => return Err("No posts found".to_string()),
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut posts: Vec<Post> = Vec::new();
|
|
||||||
|
|
||||||
// For each post from posts list
|
|
||||||
for post in post_list {
|
|
||||||
let data = &post["data"];
|
|
||||||
|
|
||||||
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
|
|
||||||
let score = data["score"].as_i64().unwrap_or_default();
|
|
||||||
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
|
||||||
let title = val(post, "title");
|
|
||||||
|
|
||||||
// Determine the type of media along with the media URL
|
|
||||||
let (post_type, media, gallery) = media(&data).await;
|
|
||||||
|
|
||||||
posts.push(Post {
|
|
||||||
id: val(post, "id"),
|
|
||||||
title: if title.is_empty() { fallback_title.to_owned() } else { title },
|
|
||||||
community: val(post, "subreddit"),
|
|
||||||
body: rewrite_urls(&val(post, "body_html")),
|
|
||||||
author: Author {
|
|
||||||
name: val(post, "author"),
|
|
||||||
flair: Flair {
|
|
||||||
flair_parts: parse_rich_flair(
|
|
||||||
data["author_flair_type"].as_str().unwrap_or_default(),
|
|
||||||
data["author_flair_richtext"].as_array(),
|
|
||||||
data["author_flair_text"].as_str(),
|
|
||||||
),
|
|
||||||
text: val(post, "link_flair_text"),
|
|
||||||
background_color: val(post, "author_flair_background_color"),
|
|
||||||
foreground_color: val(post, "author_flair_text_color"),
|
|
||||||
},
|
|
||||||
distinguished: val(post, "distinguished"),
|
|
||||||
},
|
|
||||||
score: if data["hide_score"].as_bool().unwrap_or_default() {
|
|
||||||
"•".to_string()
|
|
||||||
} else {
|
|
||||||
format_num(score)
|
|
||||||
},
|
|
||||||
upvote_ratio: ratio as i64,
|
|
||||||
post_type,
|
|
||||||
thumbnail: Media {
|
|
||||||
url: format_url(val(post, "thumbnail").as_str()),
|
|
||||||
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
|
|
||||||
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
|
|
||||||
poster: "".to_string(),
|
|
||||||
},
|
|
||||||
media,
|
|
||||||
domain: val(post, "domain"),
|
|
||||||
flair: Flair {
|
|
||||||
flair_parts: parse_rich_flair(
|
|
||||||
data["link_flair_type"].as_str().unwrap_or_default(),
|
|
||||||
data["link_flair_richtext"].as_array(),
|
|
||||||
data["link_flair_text"].as_str(),
|
|
||||||
),
|
|
||||||
text: val(post, "link_flair_text"),
|
|
||||||
background_color: val(post, "link_flair_background_color"),
|
|
||||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
|
||||||
"black".to_string()
|
|
||||||
} else {
|
|
||||||
"white".to_string()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
flags: Flags {
|
|
||||||
nsfw: data["over_18"].as_bool().unwrap_or_default(),
|
|
||||||
stickied: data["stickied"].as_bool().unwrap_or_default(),
|
|
||||||
},
|
|
||||||
permalink: val(post, "permalink"),
|
|
||||||
rel_time,
|
|
||||||
created,
|
|
||||||
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
|
|
||||||
gallery,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// NETWORKING
|
// NETWORKING
|
||||||
//
|
//
|
||||||
@ -496,7 +497,7 @@ pub fn redirect(path: String) -> Response {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn error(req: Request<()>, msg: String) -> tide::Result {
|
pub async fn error(req: Request<()>, msg: String) -> tide::Result {
|
||||||
let body = ErrorTemplate { msg, prefs: prefs(req) }.render().unwrap_or_default();
|
let body = ErrorTemplate { msg, prefs: Preferences::new(req) }.render().unwrap_or_default();
|
||||||
|
|
||||||
Ok(Response::builder(404).content_type("text/html").body(body).build())
|
Ok(Response::builder(404).content_type("text/html").body(body).build())
|
||||||
}
|
}
|
||||||
@ -532,14 +533,10 @@ pub async fn request(path: String) -> Result<Value, String> {
|
|||||||
Err(
|
Err(
|
||||||
json["reason"]
|
json["reason"]
|
||||||
.as_str()
|
.as_str()
|
||||||
.unwrap_or(
|
.unwrap_or(json["message"].as_str().unwrap_or_else(|| {
|
||||||
json["message"]
|
println!("{} - Error parsing reddit error", url);
|
||||||
.as_str()
|
"Error parsing reddit error"
|
||||||
.unwrap_or_else(|| {
|
}))
|
||||||
println!("{} - Error parsing reddit error", url);
|
|
||||||
"Error parsing reddit error"
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.to_string(),
|
.to_string(),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
|
Loading…
Reference in New Issue
Block a user