Parse GIFs correctly
This commit is contained in:
parent
7a33ed3434
commit
541c741bde
@ -91,7 +91,7 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
score: format_num(score),
|
score: format_num(score),
|
||||||
upvote_ratio: ratio as i64,
|
upvote_ratio: ratio as i64,
|
||||||
post_type,
|
post_type,
|
||||||
thumbnail: format_url(val(post, "thumbnail")),
|
thumbnail: format_url(val(post, "thumbnail").as_str()),
|
||||||
flair: Flair(
|
flair: Flair(
|
||||||
val(post, "link_flair_text"),
|
val(post, "link_flair_text"),
|
||||||
val(post, "link_flair_background_color"),
|
val(post, "link_flair_background_color"),
|
||||||
|
@ -105,7 +105,7 @@ async fn subreddit(sub: &str) -> Result<Subreddit, &'static str> {
|
|||||||
title: val(&res, "title"),
|
title: val(&res, "title"),
|
||||||
description: val(&res, "public_description"),
|
description: val(&res, "public_description"),
|
||||||
info: rewrite_url(&val(&res, "description_html").replace("\\", "")),
|
info: rewrite_url(&val(&res, "description_html").replace("\\", "")),
|
||||||
icon: format_url(icon),
|
icon: format_url(icon.as_str()),
|
||||||
members: format_num(members),
|
members: format_num(members),
|
||||||
active: format_num(active),
|
active: format_num(active),
|
||||||
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
||||||
|
@ -62,7 +62,7 @@ async fn user(name: &str) -> Result<User, &'static str> {
|
|||||||
Ok(User {
|
Ok(User {
|
||||||
name: name.to_string(),
|
name: name.to_string(),
|
||||||
title: nested_val(&res, "subreddit", "title"),
|
title: nested_val(&res, "subreddit", "title"),
|
||||||
icon: format_url(nested_val(&res, "subreddit", "icon_img")),
|
icon: format_url(nested_val(&res, "subreddit", "icon_img").as_str()),
|
||||||
karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
|
karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
|
||||||
created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"),
|
created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"),
|
||||||
banner: nested_val(&res, "subreddit", "banner_img"),
|
banner: nested_val(&res, "subreddit", "banner_img"),
|
||||||
|
48
src/utils.rs
48
src/utils.rs
@ -134,7 +134,7 @@ pub fn cookie(req: &HttpRequest, name: &str) -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Direct urls to proxy if proxy is enabled
|
// Direct urls to proxy if proxy is enabled
|
||||||
pub fn format_url(url: String) -> String {
|
pub fn format_url(url: &str) -> String {
|
||||||
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
|
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
|
||||||
String::new()
|
String::new()
|
||||||
} else {
|
} else {
|
||||||
@ -163,13 +163,17 @@ pub async fn media(data: &serde_json::Value) -> (String, String) {
|
|||||||
let post_type: &str;
|
let post_type: &str;
|
||||||
let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() {
|
let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() {
|
||||||
post_type = "video";
|
post_type = "video";
|
||||||
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap_or_default().to_string())
|
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap_or_default())
|
||||||
} else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() {
|
} else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() {
|
||||||
post_type = "video";
|
post_type = "video";
|
||||||
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap_or_default().to_string())
|
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap_or_default())
|
||||||
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
||||||
|
let preview = data["preview"]["images"][0].clone();
|
||||||
post_type = "image";
|
post_type = "image";
|
||||||
format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap_or_default().to_string())
|
match preview["variants"]["mp4"].as_object() {
|
||||||
|
Some(gif) => format_url(gif["source"]["url"].as_str().unwrap_or_default()),
|
||||||
|
None => format_url(preview["source"]["url"].as_str().unwrap_or_default())
|
||||||
|
}
|
||||||
} else if data["is_self"].as_bool().unwrap_or_default() {
|
} else if data["is_self"].as_bool().unwrap_or_default() {
|
||||||
post_type = "self";
|
post_type = "self";
|
||||||
data["permalink"].as_str().unwrap_or_default().to_string()
|
data["permalink"].as_str().unwrap_or_default().to_string()
|
||||||
@ -178,7 +182,7 @@ pub async fn media(data: &serde_json::Value) -> (String, String) {
|
|||||||
data["url"].as_str().unwrap_or_default().to_string()
|
data["url"].as_str().unwrap_or_default().to_string()
|
||||||
};
|
};
|
||||||
|
|
||||||
(post_type.to_string(), url)
|
(post_type.to_string(), url.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
@ -242,7 +246,7 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
|||||||
score: format_num(score),
|
score: format_num(score),
|
||||||
upvote_ratio: ratio as i64,
|
upvote_ratio: ratio as i64,
|
||||||
post_type,
|
post_type,
|
||||||
thumbnail: format_url(val(post, "thumbnail")),
|
thumbnail: format_url(val(post, "thumbnail").as_str()),
|
||||||
media,
|
media,
|
||||||
domain: val(post, "domain"),
|
domain: val(post, "domain"),
|
||||||
flair: Flair(
|
flair: Flair(
|
||||||
@ -284,37 +288,7 @@ pub async fn error(msg: String) -> HttpResponse {
|
|||||||
pub async fn request(path: &str) -> Result<serde_json::Value, &'static str> {
|
pub async fn request(path: &str) -> Result<serde_json::Value, &'static str> {
|
||||||
let url = format!("https://www.reddit.com{}", path);
|
let url = format!("https://www.reddit.com{}", path);
|
||||||
|
|
||||||
// match reqwest::get(&url).await {
|
// Send request using ureq
|
||||||
// Ok(res) => {
|
|
||||||
// // Read the status from the response
|
|
||||||
// match res.status().is_success() {
|
|
||||||
// true => {
|
|
||||||
// // Parse the response from Reddit as JSON
|
|
||||||
// match from_str(res.text().await.unwrap_or_default().as_str()) {
|
|
||||||
// Ok(json) => Ok(json),
|
|
||||||
// Err(_) => {
|
|
||||||
// #[cfg(debug_assertions)]
|
|
||||||
// dbg!(format!("{} - Failed to parse page JSON data", url));
|
|
||||||
// Err("Failed to parse page JSON data")
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// // If Reddit returns error, tell user Page Not Found
|
|
||||||
// false => {
|
|
||||||
// #[cfg(debug_assertions)]
|
|
||||||
// dbg!(format!("{} - Page not found", url));
|
|
||||||
// Err("Page not found")
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// // If can't send request to Reddit, return this to user
|
|
||||||
// Err(_e) => {
|
|
||||||
// #[cfg(debug_assertions)]
|
|
||||||
// dbg!(format!("{} - {}", url, _e));
|
|
||||||
// Err("Couldn't send request to Reddit")
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// Send request using reqwest
|
|
||||||
match ureq::get(&url).call() {
|
match ureq::get(&url).call() {
|
||||||
// If response is success
|
// If response is success
|
||||||
Ok(response) => {
|
Ok(response) => {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user