Compare commits

...

13 Commits

Author SHA1 Message Date
f49bff9853 Optimize Sequencing 2021-01-02 11:09:26 -08:00
4ec529cdb8 Rewrite Reddit Links to Libreddit 2021-01-02 10:58:21 -08:00
779de6f8af Fix Wiki Titles 2021-01-01 22:34:25 -08:00
0925a9b334 Add Wiki Pages 2021-01-01 22:21:43 -08:00
2f2ed6169d Optimize use of .unwrap() 2021-01-01 15:28:13 -08:00
59ef30c76d Remove .clone() in favor of borrowing 2021-01-01 12:55:09 -08:00
d43b49e7e4 Optimize Rust code with Clippy 2021-01-01 12:33:57 -08:00
64a92195dd Merge pull request #19 from somoso/patch-1
Fix posts overflowing on Safari on iOS
2021-01-01 11:52:21 -08:00
a7925ed62d Fix posts overflowing on Safari on iOS
In Safari, the value `anywhere` is not supported for property `overflow-wrap`. Once changed to `break-word`, it behaves like it does in Chrome and Firefox.
2021-01-01 15:46:36 +00:00
39ba50dada Error Page 2020-12-31 21:03:44 -08:00
bc1b29246d Update Screenshot 2020-12-31 20:23:19 -08:00
2d77a91150 Refactor Page Titles and Add Subreddit/User Titles 2020-12-31 20:21:56 -08:00
93c1db502d Fix Title and Navbar 2020-12-31 16:45:10 -08:00
17 changed files with 392 additions and 297 deletions

39
Cargo.lock generated
View File

@ -93,7 +93,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4ca8ce00b267af8ccebbd647de0d61e0674b6e61185cc7a592ff88772bed655"
dependencies = [
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -267,7 +267,7 @@ checksum = "ad26f77093333e0e7c6ffe54ebe3582d908a104e448723eec6d43d08b07143fb"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -346,7 +346,7 @@ checksum = "e5444eec77a9ec2bfe4524139e09195862e981400c4358d3b760cae634e4c4ee"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -357,7 +357,7 @@ checksum = "8d3a45e77e34375a7923b1e8febb049bb011f064714a8e17a1a616fef01da13d"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -562,7 +562,7 @@ checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -604,7 +604,7 @@ dependencies = [
"heck",
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -696,7 +696,7 @@ dependencies = [
"proc-macro-hack",
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -1006,13 +1006,14 @@ checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb"
[[package]]
name = "libreddit"
version = "0.2.4"
version = "0.2.5"
dependencies = [
"actix-web",
"askama",
"async-recursion",
"base64 0.13.0",
"chrono",
"regex",
"reqwest",
"serde",
"serde_json",
@ -1263,7 +1264,7 @@ checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -1274,7 +1275,7 @@ checksum = "f8e8d2bf0b23038a4424865103a4df472855692821aab4e4f5c3312d461d9e5f"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -1563,7 +1564,7 @@ checksum = "c84d3526699cd55261af4b941e4e725444df67aa4f9e6a3564f18030d12672df"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -1679,7 +1680,7 @@ dependencies = [
"quote 1.0.8",
"serde",
"serde_derive",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -1695,7 +1696,7 @@ dependencies = [
"serde_derive",
"serde_json",
"sha1",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -1717,9 +1718,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.56"
version = "1.0.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9802ddde94170d186eeee5005b798d9c159fa970403f1be19976d0cfb939b72"
checksum = "4211ce9909eb971f111059df92c45640aad50a619cf55cd76476be803c4c68e6"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
@ -1743,7 +1744,7 @@ checksum = "9be73a2caec27583d0046ef3796c3794f868a5bc813db689eed00c7631275cd1"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -1810,7 +1811,7 @@ dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
"standback",
"syn 1.0.56",
"syn 1.0.57",
]
[[package]]
@ -2093,7 +2094,7 @@ dependencies = [
"log",
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
"wasm-bindgen-shared",
]
@ -2127,7 +2128,7 @@ checksum = "b5a48c72f299d80557c7c62e37e7225369ecc0c963964059509fbafe917c7549"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.8",
"syn 1.0.56",
"syn 1.0.57",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]

View File

@ -3,7 +3,7 @@ name = "libreddit"
description = " Alternative private front-end to Reddit"
license = "AGPL-3.0"
repository = "https://github.com/spikecodes/libreddit"
version = "0.2.4"
version = "0.2.5"
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
edition = "2018"
@ -20,4 +20,5 @@ serde = "1.0.117"
serde_json = "1.0"
chrono = "0.4.19"
async-recursion = "0.3.1"
url = "2.2.0"
url = "2.2.0"
regex = "1"

View File

@ -34,7 +34,7 @@ Like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the
## Screenshot
![](https://i.ibb.co/hM6WPHq/image.png)
![](https://i.ibb.co/vLhBdL4/libreddit-rust.png)
## Instances

View File

@ -31,35 +31,41 @@ async fn main() -> std::io::Result<()> {
if args.len() > 1 {
for arg in args {
if arg.starts_with("--address=") || arg.starts_with("-a=") {
let split: Vec<&str> = arg.split("=").collect();
let split: Vec<&str> = arg.split('=').collect();
address = split[1].to_string();
}
}
}
// start http server
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), address.clone());
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), &address);
HttpServer::new(|| {
App::new()
// .default_service(web::get().to(subreddit::page))
// TRAILING SLASH MIDDLEWARE
.wrap(NormalizePath::default())
// DEFAULT SERVICE
.default_service(web::get().to(utils::error))
// GENERAL SERVICES
.route("/style.css/", web::get().to(style))
.route("/favicon.ico/", web::get().to(|| HttpResponse::Ok()))
.route("/favicon.ico/", web::get().to(HttpResponse::Ok))
.route("/robots.txt/", web::get().to(robots))
// PROXY SERVICE
.route("/proxy/{url:.*}/", web::get().to(proxy::handler))
// SEARCH SERVICES
.route("/search/", web::get().to(search::page))
.route("r/{sub}/search/", web::get().to(search::page))
.route("/search/", web::get().to(search::find))
.route("r/{sub}/search/", web::get().to(search::find))
// USER SERVICES
.route("/u/{username}/", web::get().to(user::profile))
.route("/user/{username}/", web::get().to(user::profile))
// WIKI SERVICES
.route("/wiki/", web::get().to(subreddit::wiki))
.route("/wiki/{page}/", web::get().to(subreddit::wiki))
.route("/r/{sub}/wiki/", web::get().to(subreddit::wiki))
.route("/r/{sub}/wiki/{page}/", web::get().to(subreddit::wiki))
// SUBREDDIT SERVICES
.route("/r/{sub}/", web::get().to(subreddit::page))
.route("/r/{sub}/{sort}/", web::get().to(subreddit::page))
.route("/r/{sub}/{sort:hot|new|top|rising}/", web::get().to(subreddit::page))
// POPULAR SERVICES
.route("/", web::get().to(subreddit::page))
.route("/{sort:best|hot|new|top|rising}/", web::get().to(subreddit::page))
@ -68,8 +74,8 @@ async fn main() -> std::io::Result<()> {
.route("/r/{sub}/comments/{id}/{title}/", web::get().to(post::item))
.route("/r/{sub}/comments/{id}/{title}/{comment_id}/", web::get().to(post::item))
})
.bind(address.clone())
.expect(format!("Cannot bind to the address: {}", address).as_str())
.bind(&address)
.unwrap_or_else(|_| panic!("Cannot bind to the address: {}", address))
.run()
.await
}

View File

@ -1,6 +1,6 @@
// CRATES
use crate::utils::{format_num, format_url, param, request, val, Comment, ErrorTemplate, Flags, Flair, Post};
use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result};
use crate::utils::{error, format_num, format_url, param, request, rewrite_url, val, Comment, Flags, Flair, Post};
use actix_web::{HttpRequest, HttpResponse, Result};
use async_recursion::async_recursion;
@ -18,7 +18,7 @@ struct PostTemplate {
pub async fn item(req: HttpRequest) -> Result<HttpResponse> {
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
let sort = param(&path, "sort").await;
let sort = param(&path, "sort");
let id = req.match_info().get("id").unwrap_or("").to_string();
// Log the post ID being fetched in debug mode
@ -26,27 +26,19 @@ pub async fn item(req: HttpRequest) -> Result<HttpResponse> {
dbg!(&id);
// Send a request to the url, receive JSON in response
let req = request(path.clone()).await;
// If the Reddit API returns an error, exit and send error page to user
if req.is_err() {
let s = ErrorTemplate {
message: req.err().unwrap().to_string(),
}
.render()
.unwrap();
return Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s));
} else {
match request(&path).await {
// Otherwise, grab the JSON output from the request
let res = req.unwrap();
Ok(res) => {
// Parse the JSON into Post and Comment structs
let post = parse_post(&res[0]).await.unwrap();
let comments = parse_comments(&res[1]).await.unwrap();
// Parse the JSON into Post and Comment structs
let post = parse_post(res[0].clone()).await.unwrap();
let comments = parse_comments(res[1].clone()).await.unwrap();
// Use the Post and Comment structs to generate a website to show users
let s = PostTemplate { comments, post, sort }.render().unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
// Use the Post and Comment structs to generate a website to show users
let s = PostTemplate { comments, post, sort }.render().unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
// If the Reddit API returns an error, exit and send error page to user
Err(msg) => error(msg.to_string()).await,
}
}
@ -55,13 +47,13 @@ async fn media(data: &serde_json::Value) -> (String, String) {
let post_type: &str;
let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() {
post_type = "video";
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap().to_string()).await
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap().to_string())
} else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() {
post_type = "video";
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap().to_string()).await
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap().to_string())
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
post_type = "image";
format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap().to_string()).await
format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap().to_string())
} else {
post_type = "link";
data["url"].as_str().unwrap().to_string()
@ -71,7 +63,7 @@ async fn media(data: &serde_json::Value) -> (String, String) {
}
// POSTS
async fn parse_post(json: serde_json::Value) -> Result<Post, &'static str> {
async fn parse_post(json: &serde_json::Value) -> Result<Post, &'static str> {
// Retrieve post (as opposed to comments) from JSON
let post_data: &serde_json::Value = &json["data"]["children"][0];
@ -85,22 +77,22 @@ async fn parse_post(json: serde_json::Value) -> Result<Post, &'static str> {
// Build a post using data parsed from Reddit post API
let post = Post {
title: val(post_data, "title").await,
community: val(post_data, "subreddit").await,
body: val(post_data, "selftext_html").await,
author: val(post_data, "author").await,
title: val(post_data, "title"),
community: val(post_data, "subreddit"),
body: rewrite_url(&val(post_data, "selftext_html")),
author: val(post_data, "author"),
author_flair: Flair(
val(post_data, "author_flair_text").await,
val(post_data, "author_flair_background_color").await,
val(post_data, "author_flair_text_color").await,
val(post_data, "author_flair_text"),
val(post_data, "author_flair_background_color"),
val(post_data, "author_flair_text_color"),
),
url: val(post_data, "permalink").await,
url: val(post_data, "permalink"),
score: format_num(score),
post_type: media.0,
flair: Flair(
val(post_data, "link_flair_text").await,
val(post_data, "link_flair_background_color").await,
if val(post_data, "link_flair_text_color").await == "dark" {
val(post_data, "link_flair_text"),
val(post_data, "link_flair_background_color"),
if val(post_data, "link_flair_text_color") == "dark" {
"black".to_string()
} else {
"white".to_string()
@ -119,7 +111,7 @@ async fn parse_post(json: serde_json::Value) -> Result<Post, &'static str> {
// COMMENTS
#[async_recursion]
async fn parse_comments(json: serde_json::Value) -> Result<Vec<Comment>, &'static str> {
async fn parse_comments(json: &serde_json::Value) -> Result<Vec<Comment>, &'static str> {
// Separate the comment JSON into a Vector of comments
let comment_data = json["data"]["children"].as_array().unwrap();
@ -133,25 +125,25 @@ async fn parse_comments(json: serde_json::Value) -> Result<Vec<Comment>, &'stati
}
let score = comment["data"]["score"].as_i64().unwrap_or(0);
let body = val(comment, "body_html").await;
let body = rewrite_url(&val(comment, "body_html"));
let replies: Vec<Comment> = if comment["data"]["replies"].is_object() {
parse_comments(comment["data"]["replies"].clone()).await.unwrap_or(Vec::new())
parse_comments(&comment["data"]["replies"]).await.unwrap_or_default()
} else {
Vec::new()
};
comments.push(Comment {
id: val(comment, "id").await,
body: body,
author: val(comment, "author").await,
id: val(comment, "id"),
body,
author: val(comment, "author"),
score: format_num(score),
time: Utc.timestamp(unix_time, 0).format("%b %e %Y %H:%M UTC").to_string(),
replies: replies,
replies,
flair: Flair(
val(comment, "author_flair_text").await,
val(comment, "author_flair_background_color").await,
val(comment, "author_flair_text_color").await,
val(comment, "author_flair_text"),
val(comment, "author_flair_background_color"),
val(comment, "author_flair_text_color"),
),
});
}

View File

@ -23,7 +23,7 @@ pub async fn handler(web::Path(url): web::Path<String>) -> Result<HttpResponse>
.send()
.await
.map_err(Error::from)
.and_then(|res| Ok(HttpResponse::build(res.status()).streaming(res)))
.map(|res| HttpResponse::build(res.status()).streaming(res))
} else {
Ok(HttpResponse::Ok().body(""))
}

View File

@ -1,6 +1,6 @@
// CRATES
use crate::utils::{fetch_posts, param, ErrorTemplate, Post};
use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result};
use crate::utils::{error, fetch_posts, param, Post};
use actix_web::{HttpRequest, HttpResponse, Result};
use askama::Template;
// STRUCTS
@ -16,37 +16,29 @@ struct SearchTemplate {
}
// SERVICES
pub async fn page(req: HttpRequest) -> Result<HttpResponse> {
pub async fn find(req: HttpRequest) -> Result<HttpResponse> {
let path = format!("{}.json?{}", req.path(), req.query_string());
let q = param(&path, "q").await;
let sort = if param(&path, "sort").await.is_empty() {
let q = param(&path, "q");
let sort = if param(&path, "sort").is_empty() {
"relevance".to_string()
} else {
param(&path, "sort").await
param(&path, "sort")
};
let sub = req.match_info().get("sub").unwrap_or("").to_string();
let posts = fetch_posts(path.clone(), String::new()).await;
if posts.is_err() {
let s = ErrorTemplate {
message: posts.err().unwrap().to_string(),
match fetch_posts(&path, String::new()).await {
Ok(posts) => {
let s = SearchTemplate {
posts: posts.0,
query: q,
sub,
sort: (sort, param(&path, "t")),
ends: (param(&path, "after"), posts.1),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
.render()
.unwrap();
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
} else {
let items = posts.unwrap();
let s = SearchTemplate {
posts: items.0,
query: q,
sub: sub,
sort: (sort, param(&path, "t").await),
ends: (param(&path, "after").await, items.1),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
Err(msg) => error(msg.to_string()).await,
}
}

View File

@ -1,8 +1,7 @@
// CRATES
use crate::utils::{fetch_posts, format_num, format_url, param, request, val, ErrorTemplate, Post, Subreddit};
use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result};
use crate::utils::{error, fetch_posts, format_num, format_url, param, request, rewrite_url, val, Post, Subreddit};
use actix_web::{HttpRequest, HttpResponse, Result};
use askama::Template;
use std::convert::TryInto;
// STRUCTS
#[derive(Template)]
@ -14,80 +13,93 @@ struct SubredditTemplate {
ends: (String, String),
}
#[derive(Template)]
#[template(path = "wiki.html", escape = "none")]
struct WikiTemplate {
sub: String,
wiki: String,
page: String,
}
// SERVICES
// web::Path(sub): web::Path<String>, params: web::Query<Params>
pub async fn page(req: HttpRequest) -> Result<HttpResponse> {
let path = format!("{}.json?{}", req.path(), req.query_string());
let sub = req.match_info().get("sub").unwrap_or("popular").to_string();
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
let sub_result = if !&sub.contains("+") && sub != "popular" {
subreddit(&sub).await
let sub_result = if !&sub.contains('+') && sub != "popular" {
subreddit(&sub).await.unwrap_or_default()
} else {
Ok(Subreddit::default())
Subreddit::default()
};
let posts = fetch_posts(path.clone(), String::new()).await;
if posts.is_err() {
let s = ErrorTemplate {
message: posts.err().unwrap().to_string(),
match fetch_posts(&path, String::new()).await {
Ok(items) => {
let s = SubredditTemplate {
sub: sub_result,
posts: items.0,
sort: (sort, param(&path, "t")),
ends: (param(&path, "after"), items.1),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
.render()
.unwrap();
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
} else {
let sub = sub_result.unwrap_or(Subreddit::default());
let items = posts.unwrap();
Err(msg) => error(msg.to_string()).await,
}
}
let s = SubredditTemplate {
sub: sub,
posts: items.0,
sort: (sort, param(&path, "t").await),
ends: (param(&path, "after").await, items.1),
pub async fn wiki(req: HttpRequest) -> Result<HttpResponse> {
let sub = req.match_info().get("sub").unwrap_or("reddit.com");
let page = req.match_info().get("page").unwrap_or("index");
let path: String = format!("r/{}/wiki/{}.json?raw_json=1", sub, page);
match request(&path).await {
Ok(res) => {
let s = WikiTemplate {
sub: sub.to_string(),
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap()),
page: page.to_string(),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
.render()
.unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
Err(msg) => error(msg.to_string()).await,
}
}
// SUBREDDIT
async fn subreddit(sub: &String) -> Result<Subreddit, &'static str> {
async fn subreddit(sub: &str) -> Result<Subreddit, &'static str> {
// Build the Reddit JSON API url
let url: String = format!("r/{}/about.json?raw_json=1", sub);
let path: String = format!("r/{}/about.json?raw_json=1", sub);
// Send a request to the url, receive JSON in response
let req = request(url).await;
// Send a request to the url
match request(&path).await {
// If success, receive JSON in response
Ok(res) => {
// Metadata regarding the subreddit
let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
// If the Reddit API returns an error, exit this function
if req.is_err() {
return Err(req.err().unwrap());
// Fetch subreddit icon either from the community_icon or icon_img value
let community_icon: &str = res["data"]["community_icon"].as_str().unwrap_or("").split('?').collect::<Vec<&str>>()[0];
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
let sub = Subreddit {
name: val(&res, "display_name"),
title: val(&res, "title"),
description: val(&res, "public_description"),
info: rewrite_url(&val(&res, "description_html").replace("\\", "")),
icon: format_url(icon),
members: format_num(members),
active: format_num(active),
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
};
Ok(sub)
}
// If the Reddit API returns an error, exit this function
Err(msg) => return Err(msg),
}
// Otherwise, grab the JSON output from the request
let res = req.unwrap();
// Metadata regarding the subreddit
let members = res["data"]["subscribers"].as_u64().unwrap_or(0);
let active = res["data"]["accounts_active"].as_u64().unwrap_or(0);
// Fetch subreddit icon either from the community_icon or icon_img value
let community_icon: &str = res["data"]["community_icon"].as_str().unwrap().split("?").collect::<Vec<&str>>()[0];
let icon = if community_icon.is_empty() {
val(&res, "icon_img").await
} else {
community_icon.to_string()
};
let sub = Subreddit {
name: val(&res, "display_name").await,
title: val(&res, "title").await,
description: val(&res, "public_description").await,
info: val(&res, "description_html").await.replace("\\", ""),
icon: format_url(icon).await,
members: format_num(members.try_into().unwrap()),
active: format_num(active.try_into().unwrap()),
};
Ok(sub)
}

View File

@ -1,6 +1,6 @@
// CRATES
use crate::utils::{fetch_posts, format_url, nested_val, param, request, ErrorTemplate, Post, User};
use actix_web::{http::StatusCode, HttpRequest, HttpResponse, Result};
use crate::utils::{error, fetch_posts, format_url, nested_val, param, request, Post, User};
use actix_web::{HttpRequest, HttpResponse, Result};
use askama::Template;
use chrono::{TimeZone, Utc};
@ -19,67 +19,58 @@ pub async fn profile(req: HttpRequest) -> Result<HttpResponse> {
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
// Retrieve other variables from Libreddit request
let sort = param(&path, "sort").await;
let sort = param(&path, "sort");
let username = req.match_info().get("username").unwrap_or("").to_string();
// Request user profile data and user posts/comments from Reddit
let user = user(&username).await;
let posts = fetch_posts(path.clone(), "Comment".to_string()).await;
let posts = fetch_posts(&path, "Comment".to_string()).await;
// If there is an error show error page
if user.is_err() || posts.is_err() {
let s = ErrorTemplate {
message: user.err().unwrap().to_string(),
match posts {
Ok(items) => {
let s = UserTemplate {
user: user.unwrap(),
posts: items.0,
sort: (sort, param(&path, "t")),
ends: (param(&path, "after"), items.1),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
.render()
.unwrap();
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
} else {
let posts_unwrapped = posts.unwrap();
let s = UserTemplate {
user: user.unwrap(),
posts: posts_unwrapped.0,
sort: (sort, param(&path, "t").await),
ends: (param(&path, "after").await, posts_unwrapped.1),
}
.render()
.unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
// If there is an error show error page
Err(msg) => error(msg.to_string()).await,
}
}
// SERVICES
// pub async fn page(web::Path(username): web::Path<String>, params: web::Query<Params>) -> Result<HttpResponse> {
// render(username, params.sort.clone(), params.t.clone(), (params.before.clone(), params.after.clone())).await
// }
// USER
async fn user(name: &String) -> Result<User, &'static str> {
// Build the Reddit JSON API url
let url: String = format!("user/{}/about.json", name);
async fn user(name: &str) -> Result<User, &'static str> {
// Build the Reddit JSON API path
let path: String = format!("user/{}/about.json", name);
// Send a request to the url, receive JSON in response
let req = request(url).await;
let res;
// If the Reddit API returns an error, exit this function
if req.is_err() {
return Err(req.err().unwrap());
// Send a request to the url
match request(&path).await {
// If success, receive JSON in response
Ok(response) => {
res = response;
}
// If the Reddit API returns an error, exit this function
Err(msg) => return Err(msg),
}
// Otherwise, grab the JSON output from the request
let res = req.unwrap();
// Grab creation date as unix timestamp
let created: i64 = res["data"]["created"].as_f64().unwrap().round() as i64;
// Parse the JSON output into a User struct
Ok(User {
name: name.to_string(),
icon: format_url(nested_val(&res, "subreddit", "icon_img").await).await,
title: nested_val(&res, "subreddit", "title"),
icon: format_url(nested_val(&res, "subreddit", "icon_img")),
karma: res["data"]["total_karma"].as_i64().unwrap(),
created: Utc.timestamp(created, 0).format("%b %e, %Y").to_string(),
banner: nested_val(&res, "subreddit", "banner_img").await,
description: nested_val(&res, "subreddit", "public_description").await,
banner: nested_val(&res, "subreddit", "banner_img"),
description: nested_val(&res, "subreddit", "public_description"),
})
}

View File

@ -1,8 +1,11 @@
//
// CRATES
//
use actix_web::{http::StatusCode, HttpResponse, Result};
use askama::Template;
use chrono::{TimeZone, Utc};
use serde_json::{from_str, Value};
use regex::Regex;
use serde_json::from_str;
use url::Url;
// use surf::{client, get, middleware::Redirect};
@ -50,6 +53,7 @@ pub struct Comment {
// User struct containing metadata about user
pub struct User {
pub name: String,
pub title: String,
pub icon: String,
pub karma: i64,
pub created: String,
@ -67,6 +71,7 @@ pub struct Subreddit {
pub icon: String,
pub members: String,
pub active: String,
pub wiki: bool,
}
// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
@ -80,7 +85,7 @@ pub struct Params {
}
// Error template
#[derive(askama::Template)]
#[derive(Template)]
#[template(path = "error.html", escape = "none")]
pub struct ErrorTemplate {
pub message: String,
@ -91,14 +96,14 @@ pub struct ErrorTemplate {
//
// Grab a query param from a url
pub async fn param(path: &String, value: &str) -> String {
pub fn param(path: &str, value: &str) -> String {
let url = Url::parse(format!("https://reddit.com/{}", path).as_str()).unwrap();
let pairs: std::collections::HashMap<_, _> = url.query_pairs().into_owned().collect();
pairs.get(value).unwrap_or(&String::new()).to_owned()
}
// Direct urls to proxy if proxy is enabled
pub async fn format_url(url: String) -> String {
pub fn format_url(url: String) -> String {
if url.is_empty() {
return String::new();
};
@ -110,6 +115,12 @@ pub async fn format_url(url: String) -> String {
return url.to_string();
}
// Rewrite Reddit links to Libreddit in body of text
pub fn rewrite_url(text: &str) -> String {
let re = Regex::new(r#"href="(https://|http://|)(www.|)(reddit).(com)/"#).unwrap();
re.replace_all(text, r#"href="/"#).to_string()
}
// Append `m` and `k` for millions and thousands respectively
pub fn format_num(num: i64) -> String {
if num > 1000000 {
@ -126,60 +137,65 @@ pub fn format_num(num: i64) -> String {
//
// val() function used to parse JSON from Reddit APIs
pub async fn val(j: &serde_json::Value, k: &str) -> String {
String::from(j["data"][k].as_str().unwrap_or(""))
pub fn val(j: &serde_json::Value, k: &str) -> String {
String::from(j["data"][k].as_str().unwrap_or_default())
}
// nested_val() function used to parse JSON from Reddit APIs
pub async fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
String::from(j["data"][n][k].as_str().unwrap())
pub fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
String::from(j["data"][n][k].as_str().unwrap_or_default())
}
// Fetch posts of a user or subreddit
pub async fn fetch_posts(path: String, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
// Send a request to the url, receive JSON in response
let req = request(path.clone()).await;
pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
let res;
let post_list;
// If the Reddit API returns an error, exit this function
if req.is_err() {
return Err(req.err().unwrap());
// Send a request to the url
match request(&path).await {
// If success, receive JSON in response
Ok(response) => {
res = response;
}
// If the Reddit API returns an error, exit this function
Err(msg) => return Err(msg),
}
// Otherwise, grab the JSON output from the request
let res = req.unwrap();
// Fetch the list of posts from the JSON response
let post_list = res["data"]["children"].as_array().unwrap();
match res["data"]["children"].as_array() {
Some(list) => post_list = list,
None => return Err("No posts found"),
}
let mut posts: Vec<Post> = Vec::new();
for post in post_list {
let img = if val(post, "thumbnail").await.starts_with("https:/") {
format_url(val(post, "thumbnail").await).await
let img = if val(post, "thumbnail").starts_with("https:/") {
format_url(val(post, "thumbnail"))
} else {
String::new()
};
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap().round() as i64;
let score = post["data"]["score"].as_i64().unwrap();
let title = val(post, "title").await;
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap_or_default().round() as i64;
let score = post["data"]["score"].as_i64().unwrap_or_default();
let title = val(post, "title");
posts.push(Post {
title: if title.is_empty() { fallback_title.to_owned() } else { title },
community: val(post, "subreddit").await,
body: val(post, "body_html").await,
author: val(post, "author").await,
community: val(post, "subreddit"),
body: rewrite_url(&val(post, "body_html")),
author: val(post, "author"),
author_flair: Flair(
val(post, "author_flair_text").await,
val(post, "author_flair_background_color").await,
val(post, "author_flair_text_color").await,
val(post, "author_flair_text"),
val(post, "author_flair_background_color"),
val(post, "author_flair_text_color"),
),
score: format_num(score),
post_type: "link".to_string(),
media: img,
flair: Flair(
val(post, "link_flair_text").await,
val(post, "link_flair_background_color").await,
if val(post, "link_flair_text_color").await == "dark" {
val(post, "link_flair_text"),
val(post, "link_flair_background_color"),
if val(post, "link_flair_text_color") == "dark" {
"black".to_string()
} else {
"white".to_string()
@ -189,13 +205,11 @@ pub async fn fetch_posts(path: String, fallback_title: String) -> Result<(Vec<Po
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
},
url: val(post, "permalink").await,
url: val(post, "permalink"),
time: Utc.timestamp(unix_time, 0).format("%b %e '%y").to_string(),
});
}
dbg!(path);
Ok((posts, res["data"]["after"].as_str().unwrap_or("").to_string()))
}
@ -203,8 +217,14 @@ pub async fn fetch_posts(path: String, fallback_title: String) -> Result<(Vec<Po
// NETWORKING
//
pub async fn error(message: String) -> Result<HttpResponse> {
let msg = if message.is_empty() { "Page not found".to_string() } else { message };
let body = ErrorTemplate { message: msg }.render().unwrap_or_default();
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(body))
}
// Make a request to a Reddit API and parse the JSON response
pub async fn request(path: String) -> Result<serde_json::Value, &'static str> {
pub async fn request(path: &str) -> Result<serde_json::Value, &'static str> {
let url = format!("https://www.reddit.com/{}", path);
// --- actix-web::client ---
@ -229,20 +249,22 @@ pub async fn request(path: String) -> Result<serde_json::Value, &'static str> {
// --- reqwest ---
let res = reqwest::get(&url).await.unwrap();
// Read the status from the response
let success = res.status().is_success();
// Read the body of the response
let body = res.text().await.unwrap();
// Parse the response from Reddit as JSON
let json: Value = from_str(body.as_str()).unwrap_or(Value::Null);
if !success {
println!("! {} - {}", url, "Page not found");
Err("Page not found")
} else if json == Value::Null {
println!("! {} - {}", url, "Failed to parse page JSON data");
Err("Failed to parse page JSON data")
} else {
Ok(json)
match res.status().is_success() {
true => {
// Parse the response from Reddit as JSON
match from_str(res.text().await.unwrap_or_default().as_str()) {
Ok(json) => Ok(json),
Err(_) => {
#[cfg(debug_assertions)]
dbg!(format!("{} - Failed to parse page JSON data", url));
Err("Failed to parse page JSON data")
}
}
}
false => {
#[cfg(debug_assertions)]
dbg!(format!("{} - Page not found", url));
Err("Page not found")
}
}
}

View File

@ -24,7 +24,6 @@
body {
background: var(--background);
visibility: visible !important;
}
nav {
@ -35,7 +34,7 @@ nav {
background: var(--outside);
padding: 5px 15px;
font-size: 20px;
height: 40px;
min-height: 40px;
}
nav #lib, nav #github, nav #version { color: white; }
@ -44,11 +43,17 @@ nav #version { opacity: 25%; }
main {
display: flex;
justify-content: center;
max-width: 750px;
max-width: 1000px;
padding: 10px 20px;
margin: 20px auto;
}
#column_one {
max-width: 750px;
border-radius: 5px;
overflow: hidden;
}
footer {
display: flex;
justify-content: center;
@ -98,18 +103,22 @@ aside {
height: max-content;
background: var(--outside);
border-radius: 5px;
overflow: hidden;
}
#sidebar, #sidebar_contents {
margin-top: 20px;
}
#user *, #subreddit * { text-align: center; }
#subreddit { padding: 0; }
#sub_meta { padding: 20px; }
#sidebar, #sidebar_contents { margin-top: 20px; }
#sidebar_label {
border: 2px solid var(--highlighted);
padding: 10px;
}
#user_icon, #subreddit_icon {
#user_icon, #sub_icon {
width: 100px;
height: 100px;
border: 2px solid var(--accent);
@ -118,28 +127,55 @@ aside {
margin: 10px;
}
#user_name, #subreddit_name {
margin-top: 10px;
#user_title, #sub_title {
margin: 0 20px;
font-size: 20px;
font-weight: bold;
}
#user_description, #subreddit_description {
margin: 10px 20px;
text-align: center;
#user_description, #sub_description {
margin: 0 20px;
font-size: 15px;
}
#user_details, #subreddit_details {
#user_name, #sub_name, #user_icon, #sub_icon, #user_description, #sub_description { margin-bottom: 20px; }
#user_details, #sub_details {
display: grid;
grid-template-columns: repeat(2, 1fr);
margin-top: 15px;
grid-column-gap: 20px;
}
#user_details > label, #subreddit_details > label {
#user_details > label, #sub_details > label {
color: var(--accent);
font-size: 15px;
}
/* Wiki Pages */
#wiki {
background: var(--foreground);
padding: 35px;
}
#top {
background: var(--highlighted);
font-size: 18px;
width: 100%;
display: flex;
}
#top > * {
flex-grow: 1;
text-align: center;
height: 40px;
line-height: 40px;
}
#top > div {
border-bottom: 2px solid white;
}
/* Sorting and Search */
select {
@ -198,7 +234,7 @@ input[type="submit"]:hover { color: var(--accent); }
#search_sort {
background: var(--highlighted);
border-radius: 5px;
overflow: hidden;
overflow: auto;
}
#search_sort > #search {
@ -230,7 +266,7 @@ input[type="submit"]:hover { color: var(--accent); }
box-shadow: var(--black-contrast);
background: var(--outside);
display: flex;
overflow: hidden;
overflow: auto;
}
#sort_options > a, footer > a {
@ -273,7 +309,7 @@ input[type="submit"]:hover { color: var(--accent); }
.post_left, .post_right {
display: flex;
flex-direction: column;
overflow-wrap: anywhere;
overflow-wrap: break-word;
}
.post_left {
@ -496,11 +532,11 @@ input[type="submit"]:hover { color: var(--accent); }
}
.md a {
text-decoration: underline;
color: var(--accent);
}
.md li { margin: 10px 0; }
.toc_child { list-style: none; }
.md pre {
background: var(--outside);
@ -566,12 +602,13 @@ td, th {
main { flex-direction: column-reverse; }
nav { flex-direction: column; }
aside {
margin: 20px 0 0 0;
aside, #subreddit, #user {
margin: 0;
max-width: 100%;
}
#user, #sidebar {
margin: 20px 0;
}
}
#user, #sidebar { margin: 20px 0; }
#logo { margin: 5px auto; }
#searchbox { width: 100%; }
#github { display: none; }
}

View File

@ -12,9 +12,9 @@
<link rel="stylesheet" href="/style.css">
{% endblock %}
</head>
<body style="visibility: hidden;">
<body>
<nav>
<a href="/"><span id="lib">lib</span>reddit. <span id="version">v{{ env!("CARGO_PKG_VERSION") }}</span></a>
<a id="logo" href="/"><span id="lib">lib</span>reddit. <span id="version">v{{ env!("CARGO_PKG_VERSION") }}</span></a>
{% block search %}{% endblock %}
<a id="github" href="https://github.com/spikecodes/libreddit">GITHUB</a>
</nav>

View File

@ -1,6 +1,8 @@
{% extends "base.html" %}
{% import "utils.html" as utils %}
{% block title %}Libreddit: search results - {{ query }}{% endblock %}
{% block content %}
<div id="column_one">
<form id="search_sort">

View File

@ -1,7 +1,11 @@
{% extends "base.html" %}
{% import "utils.html" as utils %}
{% block title %}r/{{ sub.name }}: {{ sub.description }}{% endblock %}
{% block title %}
{% if sub.title != "" %}{{ sub.title }}
{% else if sub.name != "" %}{{ sub.name }}
{% else %}Libreddit{% endif %}
{% endblock %}
{% block search %}
{% call utils::search(["/r/", sub.name.as_str()].concat(), "") %}
@ -62,14 +66,23 @@
{% if sub.name != "" %}
<aside>
<div id="subreddit">
<img id="subreddit_icon" src="{{ sub.icon }}">
<p id="subreddit_name">r/{{ sub.name }}</p>
<p id="subreddit_description">{{ sub.description }}</p>
<div id="subreddit_details">
<label>Members</label>
<label>Active</label>
<div>{{ sub.members }}</div>
<div>{{ sub.active }}</div>
{% if sub.wiki %}
<div id="top">
<div>Posts</div>
<a href="/r/{{ sub.name }}/wiki/index">Wiki</a>
</div>
{% endif %}
<div id="sub_meta">
<img id="sub_icon" src="{{ sub.icon }}">
<p id="sub_title">{{ sub.title }}</p>
<p id="sub_name">r/{{ sub.name }}</p>
<p id="sub_description">{{ sub.description }}</p>
<div id="sub_details">
<label>Members</label>
<label>Active</label>
<div>{{ sub.members }}</div>
<div>{{ sub.active }}</div>
</div>
</div>
</div>
<details id="sidebar">

View File

@ -5,7 +5,7 @@
{% call utils::search("".to_owned(), "", "") %}
{% endblock %}
{% block title %}Libreddit: u/{{ user.name }}{% endblock %}
{% block title %}{{ user.name.replace("u/", "") }} (u/{{ user.name }}) - Libreddit{% endblock %}
{% block body %}
<main style="max-width: 1000px;">
<div id="column_one">
@ -72,6 +72,7 @@
<aside>
<div id="user">
<img id="user_icon" src="{{ user.icon }}">
<p id="user_title">{{ user.title }}</p>
<p id="user_name">u/{{ user.name }}</p>
<div id="user_description">{{ user.description }}</div>
<div id="user_details">

View File

@ -15,9 +15,9 @@
{%- endmacro %}
{% macro search(root, search) -%}
<form action="{% if root != "/r/" %}{{ root }}{% endif %}/search/" id="searchbox">
<form action="{% if root != "/r/" && !root.is_empty() %}{{ root }}{% endif %}/search/" id="searchbox">
<input id="search" type="text" name="q" placeholder="Search" value="{{ search }}">
{% if root != "/r/" %}
{% if root != "/r/" && !root.is_empty() %}
<div id="inside">
<input type="checkbox" name="restrict_sr" id="restrict_sr" checked="checked" data-com.bitwarden.browser.user-edited="yes">
<label for="restrict_sr">in {{ root }}</label>

25
templates/wiki.html Normal file
View File

@ -0,0 +1,25 @@
{% extends "base.html" %}
{% import "utils.html" as utils %}
{% block title %}
{% if sub != "" %}{{ page }} - {{ sub }}
{% else %}Libreddit{% endif %}
{% endblock %}
{% block search %}
{% call utils::search(["/r/", sub.as_str()].concat(), "") %}
{% endblock %}
{% block body %}
<main>
<div id="column_one">
<div id="top">
<a href="/r/{{ sub }}">Posts</a>
<div>Wiki</div>
</div>
<div id="wiki">
{{ wiki }}
</div>
</div>
</main>
{% endblock %}