Compare commits
22 Commits
Author | SHA1 | Date | |
---|---|---|---|
f49bff9853 | |||
4ec529cdb8 | |||
779de6f8af | |||
0925a9b334 | |||
2f2ed6169d | |||
59ef30c76d | |||
d43b49e7e4 | |||
64a92195dd | |||
a7925ed62d | |||
39ba50dada | |||
bc1b29246d | |||
2d77a91150 | |||
93c1db502d | |||
a6dc7ee043 | |||
c7282520cd | |||
a866c1d068 | |||
aa9aad6743 | |||
f65ee2eb6a | |||
44c4341e67 | |||
1c886f8003 | |||
b481d26be2 | |||
f00ef59404 |
58
Cargo.lock
generated
58
Cargo.lock
generated
@ -93,7 +93,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4ca8ce00b267af8ccebbd647de0d61e0674b6e61185cc7a592ff88772bed655"
|
||||
dependencies = [
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -267,14 +267,14 @@ checksum = "ad26f77093333e0e7c6ffe54ebe3582d908a104e448723eec6d43d08b07143fb"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
version = "0.14.0"
|
||||
version = "0.14.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c0929d69e78dd9bf5408269919fcbcaeb2e35e5d43e5815517cdc6a8e11a423"
|
||||
checksum = "a55f82cfe485775d02112886f4169bde0c5894d75e79ead7eafe7e40a25e45f7"
|
||||
dependencies = [
|
||||
"gimli",
|
||||
]
|
||||
@ -346,7 +346,7 @@ checksum = "e5444eec77a9ec2bfe4524139e09195862e981400c4358d3b760cae634e4c4ee"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -357,7 +357,7 @@ checksum = "8d3a45e77e34375a7923b1e8febb049bb011f064714a8e17a1a616fef01da13d"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -562,7 +562,7 @@ checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -604,7 +604,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -696,7 +696,7 @@ dependencies = [
|
||||
"proc-macro-hack",
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -755,11 +755,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.1.15"
|
||||
version = "0.1.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6"
|
||||
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.10",
|
||||
"cfg-if 1.0.0",
|
||||
"libc",
|
||||
"wasi 0.9.0+wasi-snapshot-preview1",
|
||||
]
|
||||
@ -1006,16 +1006,18 @@ checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb"
|
||||
|
||||
[[package]]
|
||||
name = "libreddit"
|
||||
version = "0.2.3"
|
||||
version = "0.2.5"
|
||||
dependencies = [
|
||||
"actix-web",
|
||||
"askama",
|
||||
"async-recursion",
|
||||
"base64 0.13.0",
|
||||
"chrono",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1262,7 +1264,7 @@ checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1273,7 +1275,7 @@ checksum = "f8e8d2bf0b23038a4424865103a4df472855692821aab4e4f5c3312d461d9e5f"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1562,7 +1564,7 @@ checksum = "c84d3526699cd55261af4b941e4e725444df67aa4f9e6a3564f18030d12672df"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1624,9 +1626,9 @@ checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.5.1"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae524f056d7d770e174287294f562e95044c68e88dec909a00d2094805db9d75"
|
||||
checksum = "1a55ca5f3b68e41c979bf8c46a6f1da892ca4db8f94023ce0bd32407573b1ac0"
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
@ -1647,9 +1649,9 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
|
||||
|
||||
[[package]]
|
||||
name = "standback"
|
||||
version = "0.2.13"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf906c8b8fc3f6ecd1046e01da1d8ddec83e48c8b08b84dcc02b585a6bedf5a8"
|
||||
checksum = "c66a8cff4fa24853fdf6b51f75c6d7f8206d7c75cab4e467bcd7f25c2b1febe0"
|
||||
dependencies = [
|
||||
"version_check 0.9.2",
|
||||
]
|
||||
@ -1678,7 +1680,7 @@ dependencies = [
|
||||
"quote 1.0.8",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1694,7 +1696,7 @@ dependencies = [
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"sha1",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1716,9 +1718,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.56"
|
||||
version = "1.0.57"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a9802ddde94170d186eeee5005b798d9c159fa970403f1be19976d0cfb939b72"
|
||||
checksum = "4211ce9909eb971f111059df92c45640aad50a619cf55cd76476be803c4c68e6"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
@ -1742,7 +1744,7 @@ checksum = "9be73a2caec27583d0046ef3796c3794f868a5bc813db689eed00c7631275cd1"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1809,7 +1811,7 @@ dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"standback",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2092,7 +2094,7 @@ dependencies = [
|
||||
"log",
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
@ -2126,7 +2128,7 @@ checksum = "b5a48c72f299d80557c7c62e37e7225369ecc0c963964059509fbafe917c7549"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.24",
|
||||
"quote 1.0.8",
|
||||
"syn 1.0.56",
|
||||
"syn 1.0.57",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
@ -3,7 +3,7 @@ name = "libreddit"
|
||||
description = " Alternative private front-end to Reddit"
|
||||
license = "AGPL-3.0"
|
||||
repository = "https://github.com/spikecodes/libreddit"
|
||||
version = "0.2.3"
|
||||
version = "0.2.5"
|
||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||
edition = "2018"
|
||||
|
||||
@ -19,4 +19,6 @@ askama = "0.8.0"
|
||||
serde = "1.0.117"
|
||||
serde_json = "1.0"
|
||||
chrono = "0.4.19"
|
||||
async-recursion = "0.3.1"
|
||||
async-recursion = "0.3.1"
|
||||
url = "2.2.0"
|
||||
regex = "1"
|
11
README.md
11
README.md
@ -18,7 +18,6 @@ Like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the
|
||||
- [About](#about)
|
||||
- [Elsewhere](#elsewhere)
|
||||
- [Info](#info)
|
||||
- [In Progress](#in-progress)
|
||||
- [Teddit Comparison](#how-does-it-compare-to-teddit)
|
||||
- [Comparison](#comparison)
|
||||
- [Speed](#speed)
|
||||
@ -35,7 +34,7 @@ Like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the
|
||||
|
||||
## Screenshot
|
||||
|
||||

|
||||

|
||||
|
||||
## Instances
|
||||
|
||||
@ -47,6 +46,8 @@ Feel free to [open an issue](https://github.com/spikecodes/libreddit/issues/new)
|
||||
| [libreddit.spike.codes](https://libreddit.spike.codes) (official) | 🇺🇸 US | |
|
||||
| [libreddit.dothq.co](https://libreddit.dothq.co) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.insanity.wtf](https://libreddit.insanity.wtf) | 🇺🇸 US | ✅ |
|
||||
| [libreddit.kavin.rocks](https://libreddit.kavin.rocks) | 🇮🇳 IN | ✅ |
|
||||
| [spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion](http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion) | 🇮🇳 IN | |
|
||||
|
||||
A checkmark in the "Cloudflare" category here refers to the use of the reverse proxy, [Cloudflare](https://cloudflare). The checkmark will not be listed for a site which uses Cloudflare DNS but rather the proxying service which grants Cloudflare the ability to monitor traffic to the website.
|
||||
|
||||
@ -64,9 +65,6 @@ Libreddit hopes to provide an easier way to browse Reddit, without the ads, trac
|
||||
|
||||
Libreddit currently implements most of Reddit's functionalities but still lacks a few features that are being worked on below.
|
||||
|
||||
### In Progress
|
||||
- Searching
|
||||
|
||||
### How does it compare to Teddit?
|
||||
|
||||
Teddit is another awesome open source project designed to provide an alternative frontend to Reddit. There is no connection between the two and you're welcome to use whichever one you favor. Competition fosters innovation and Teddit's release has motivated me to build Libreddit into an even more polished product.
|
||||
@ -74,7 +72,6 @@ Teddit is another awesome open source project designed to provide an alternative
|
||||
If you are looking to compare, the biggest differences I have noticed are:
|
||||
- Libreddit is themed around Reddit's redesign whereas Teddit appears to stick much closer to Reddit's old design. This may suit some users better as design is always subjective.
|
||||
- Libreddit is written in [Rust](https://www.rust-lang.org) for speed and memory safety. It uses [Actix Web](https://actix.rs), which was [benchmarked as the fastest web server for single queries](https://www.techempower.com/benchmarks/#hw=ph&test=db).
|
||||
- Unlike Teddit (at the time of writing this), Libreddit does not require a Reddit API key to host.
|
||||
|
||||
## Comparison
|
||||
|
||||
@ -127,7 +124,7 @@ Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdot
|
||||
|
||||
For transparency, I hope to describe all the ways Libreddit handles user privacy.
|
||||
|
||||
**Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs nothing. When debugging (running from source without `--release`), Libreddit logs post IDs fetched to aid troubleshooting but nothing else.
|
||||
**Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs nothing. When debugging (running from source without `--release`), Libreddit logs post IDs and URL paths fetched to aid troubleshooting but nothing else.
|
||||
|
||||
**DNS:** Both official domains (`libredd.it` and `libreddit.spike.codes`) use Cloudflare as the DNS resolver. Though, the sites are not proxied through Cloudflare meaning Cloudflare doesn't have access to user traffic.
|
||||
|
||||
|
36
src/main.rs
36
src/main.rs
@ -2,9 +2,9 @@
|
||||
use actix_web::{get, middleware::NormalizePath, web, App, HttpResponse, HttpServer};
|
||||
|
||||
// Reference local files
|
||||
mod popular;
|
||||
mod post;
|
||||
mod proxy;
|
||||
mod search;
|
||||
mod subreddit;
|
||||
mod user;
|
||||
mod utils;
|
||||
@ -31,39 +31,51 @@ async fn main() -> std::io::Result<()> {
|
||||
if args.len() > 1 {
|
||||
for arg in args {
|
||||
if arg.starts_with("--address=") || arg.starts_with("-a=") {
|
||||
let split: Vec<&str> = arg.split("=").collect();
|
||||
let split: Vec<&str> = arg.split('=').collect();
|
||||
address = split[1].to_string();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// start http server
|
||||
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), address.clone());
|
||||
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), &address);
|
||||
|
||||
HttpServer::new(|| {
|
||||
App::new()
|
||||
// TRAILING SLASH MIDDLEWARE
|
||||
.wrap(NormalizePath::default())
|
||||
// DEFAULT SERVICE
|
||||
.default_service(web::get().to(utils::error))
|
||||
// GENERAL SERVICES
|
||||
.route("/style.css/", web::get().to(style))
|
||||
.route("/favicon.ico/", web::get().to(|| HttpResponse::Ok()))
|
||||
.route("/favicon.ico/", web::get().to(HttpResponse::Ok))
|
||||
.route("/robots.txt/", web::get().to(robots))
|
||||
// PROXY SERVICE
|
||||
.route("/proxy/{url:.*}/", web::get().to(proxy::handler))
|
||||
// SEARCH SERVICES
|
||||
.route("/search/", web::get().to(search::find))
|
||||
.route("r/{sub}/search/", web::get().to(search::find))
|
||||
// USER SERVICES
|
||||
.route("/u/{username}/", web::get().to(user::page))
|
||||
.route("/user/{username}/", web::get().to(user::page))
|
||||
.route("/u/{username}/", web::get().to(user::profile))
|
||||
.route("/user/{username}/", web::get().to(user::profile))
|
||||
// WIKI SERVICES
|
||||
.route("/wiki/", web::get().to(subreddit::wiki))
|
||||
.route("/wiki/{page}/", web::get().to(subreddit::wiki))
|
||||
.route("/r/{sub}/wiki/", web::get().to(subreddit::wiki))
|
||||
.route("/r/{sub}/wiki/{page}/", web::get().to(subreddit::wiki))
|
||||
// SUBREDDIT SERVICES
|
||||
.route("/r/{sub}/", web::get().to(subreddit::page))
|
||||
.route("/r/{sub}/{sort:hot|new|top|rising}/", web::get().to(subreddit::page))
|
||||
// POPULAR SERVICES
|
||||
.route("/", web::get().to(popular::page))
|
||||
.route("/", web::get().to(subreddit::page))
|
||||
.route("/{sort:best|hot|new|top|rising}/", web::get().to(subreddit::page))
|
||||
// POST SERVICES
|
||||
.route("/{id:.{5,6}}/", web::get().to(post::short))
|
||||
.route("/r/{sub}/comments/{id}/{title}/", web::get().to(post::page))
|
||||
.route("/r/{sub}/comments/{id}/{title}/{comment_id}/", web::get().to(post::comment))
|
||||
.route("/{id:.{5,6}}/", web::get().to(post::item))
|
||||
.route("/r/{sub}/comments/{id}/{title}/", web::get().to(post::item))
|
||||
.route("/r/{sub}/comments/{id}/{title}/{comment_id}/", web::get().to(post::item))
|
||||
})
|
||||
.bind(address.clone())
|
||||
.expect(format!("Cannot bind to the address: {}", address).as_str())
|
||||
.bind(&address)
|
||||
.unwrap_or_else(|_| panic!("Cannot bind to the address: {}", address))
|
||||
.run()
|
||||
.await
|
||||
}
|
||||
|
@ -1,55 +0,0 @@
|
||||
// CRATES
|
||||
use crate::utils::{fetch_posts, ErrorTemplate, Params, Post};
|
||||
use actix_web::{http::StatusCode, web, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
#[template(path = "popular.html", escape = "none")]
|
||||
struct PopularTemplate {
|
||||
posts: Vec<Post>,
|
||||
sort: String,
|
||||
ends: (String, String),
|
||||
}
|
||||
|
||||
// RENDER
|
||||
async fn render(sub_name: String, sort: Option<String>, ends: (Option<String>, Option<String>)) -> Result<HttpResponse> {
|
||||
let sorting = sort.unwrap_or("hot".to_string());
|
||||
let before = ends.1.clone().unwrap_or(String::new()); // If there is an after, there must be a before
|
||||
|
||||
// Build the Reddit JSON API url
|
||||
let url = match ends.0 {
|
||||
Some(val) => format!("r/{}/{}.json?before={}&count=25", sub_name, sorting, val),
|
||||
None => match ends.1 {
|
||||
Some(val) => format!("r/{}/{}.json?after={}&count=25", sub_name, sorting, val),
|
||||
None => format!("r/{}/{}.json", sub_name, sorting),
|
||||
},
|
||||
};
|
||||
|
||||
let items_result = fetch_posts(url, String::new()).await;
|
||||
|
||||
if items_result.is_err() {
|
||||
let s = ErrorTemplate {
|
||||
message: items_result.err().unwrap().to_string(),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
|
||||
} else {
|
||||
let items = items_result.unwrap();
|
||||
|
||||
let s = PopularTemplate {
|
||||
posts: items.0,
|
||||
sort: sorting,
|
||||
ends: (before, items.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
}
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn page(params: web::Query<Params>) -> Result<HttpResponse> {
|
||||
render("popular".to_string(), params.sort.clone(), (params.before.clone(), params.after.clone())).await
|
||||
}
|
123
src/post.rs
123
src/post.rs
@ -1,6 +1,6 @@
|
||||
// CRATES
|
||||
use crate::utils::{format_num, format_url, request, val, Comment, ErrorTemplate, Flair, Params, Post};
|
||||
use actix_web::{http::StatusCode, web, HttpResponse, Result};
|
||||
use crate::utils::{error, format_num, format_url, param, request, rewrite_url, val, Comment, Flags, Flair, Post};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
|
||||
use async_recursion::async_recursion;
|
||||
|
||||
@ -16,62 +16,30 @@ struct PostTemplate {
|
||||
sort: String,
|
||||
}
|
||||
|
||||
async fn render(id: String, sort: Option<String>, comment_id: Option<String>) -> Result<HttpResponse> {
|
||||
pub async fn item(req: HttpRequest) -> Result<HttpResponse> {
|
||||
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
|
||||
let sort = param(&path, "sort");
|
||||
let id = req.match_info().get("id").unwrap_or("").to_string();
|
||||
|
||||
// Log the post ID being fetched in debug mode
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(&id);
|
||||
|
||||
// Handling sort paramater
|
||||
let sorting: String = sort.unwrap_or("confidence".to_string());
|
||||
|
||||
// Build the Reddit JSON API url
|
||||
let url: String = match comment_id {
|
||||
None => format!("{}.json?sort={}&raw_json=1", id, sorting),
|
||||
Some(val) => format!("{}.json?sort={}&comment={}&raw_json=1", id, sorting, val),
|
||||
};
|
||||
|
||||
// Send a request to the url, receive JSON in response
|
||||
let req = request(url).await;
|
||||
match request(&path).await {
|
||||
// Otherwise, grab the JSON output from the request
|
||||
Ok(res) => {
|
||||
// Parse the JSON into Post and Comment structs
|
||||
let post = parse_post(&res[0]).await.unwrap();
|
||||
let comments = parse_comments(&res[1]).await.unwrap();
|
||||
|
||||
// If the Reddit API returns an error, exit and send error page to user
|
||||
if req.is_err() {
|
||||
let s = ErrorTemplate {
|
||||
message: req.err().unwrap().to_string(),
|
||||
// Use the Post and Comment structs to generate a website to show users
|
||||
let s = PostTemplate { comments, post, sort }.render().unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
return Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s));
|
||||
// If the Reddit API returns an error, exit and send error page to user
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
}
|
||||
|
||||
// Otherwise, grab the JSON output from the request
|
||||
let res = req.unwrap();
|
||||
|
||||
// Parse the JSON into Post and Comment structs
|
||||
let post = parse_post(res[0].clone()).await;
|
||||
let comments = parse_comments(res[1].clone()).await;
|
||||
|
||||
// Use the Post and Comment structs to generate a website to show users
|
||||
let s = PostTemplate {
|
||||
comments: comments.unwrap(),
|
||||
post: post.unwrap(),
|
||||
sort: sorting,
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn short(web::Path(id): web::Path<String>, params: web::Query<Params>) -> Result<HttpResponse> {
|
||||
render(id, params.sort.clone(), None).await
|
||||
}
|
||||
|
||||
pub async fn comment(web::Path((_sub, id, _title, comment_id)): web::Path<(String, String, String, String)>, params: web::Query<Params>) -> Result<HttpResponse> {
|
||||
render(id, params.sort.clone(), Some(comment_id)).await
|
||||
}
|
||||
|
||||
pub async fn page(web::Path((_sub, id)): web::Path<(String, String)>, params: web::Query<Params>) -> Result<HttpResponse> {
|
||||
render(id, params.sort.clone(), None).await
|
||||
}
|
||||
|
||||
// UTILITIES
|
||||
@ -79,13 +47,13 @@ async fn media(data: &serde_json::Value) -> (String, String) {
|
||||
let post_type: &str;
|
||||
let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() {
|
||||
post_type = "video";
|
||||
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap().to_string()).await
|
||||
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap().to_string())
|
||||
} else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() {
|
||||
post_type = "video";
|
||||
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap().to_string()).await
|
||||
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap().to_string())
|
||||
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
||||
post_type = "image";
|
||||
format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap().to_string()).await
|
||||
format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap().to_string())
|
||||
} else {
|
||||
post_type = "link";
|
||||
data["url"].as_str().unwrap().to_string()
|
||||
@ -95,7 +63,7 @@ async fn media(data: &serde_json::Value) -> (String, String) {
|
||||
}
|
||||
|
||||
// POSTS
|
||||
async fn parse_post(json: serde_json::Value) -> Result<Post, &'static str> {
|
||||
async fn parse_post(json: &serde_json::Value) -> Result<Post, &'static str> {
|
||||
// Retrieve post (as opposed to comments) from JSON
|
||||
let post_data: &serde_json::Value = &json["data"]["children"][0];
|
||||
|
||||
@ -109,28 +77,31 @@ async fn parse_post(json: serde_json::Value) -> Result<Post, &'static str> {
|
||||
|
||||
// Build a post using data parsed from Reddit post API
|
||||
let post = Post {
|
||||
title: val(post_data, "title").await,
|
||||
community: val(post_data, "subreddit").await,
|
||||
body: val(post_data,"selftext_html").await,
|
||||
author: val(post_data, "author").await,
|
||||
title: val(post_data, "title"),
|
||||
community: val(post_data, "subreddit"),
|
||||
body: rewrite_url(&val(post_data, "selftext_html")),
|
||||
author: val(post_data, "author"),
|
||||
author_flair: Flair(
|
||||
val(post_data, "author_flair_text").await,
|
||||
val(post_data, "author_flair_background_color").await,
|
||||
val(post_data, "author_flair_text_color").await,
|
||||
val(post_data, "author_flair_text"),
|
||||
val(post_data, "author_flair_background_color"),
|
||||
val(post_data, "author_flair_text_color"),
|
||||
),
|
||||
url: val(post_data, "permalink").await,
|
||||
url: val(post_data, "permalink"),
|
||||
score: format_num(score),
|
||||
post_type: media.0,
|
||||
flair: Flair(
|
||||
val(post_data, "link_flair_text").await,
|
||||
val(post_data, "link_flair_background_color").await,
|
||||
if val(post_data, "link_flair_text_color").await == "dark" {
|
||||
val(post_data, "link_flair_text"),
|
||||
val(post_data, "link_flair_background_color"),
|
||||
if val(post_data, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
} else {
|
||||
"white".to_string()
|
||||
},
|
||||
),
|
||||
nsfw: post_data["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
flags: Flags {
|
||||
nsfw: post_data["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
stickied: post_data["data"]["stickied"].as_bool().unwrap_or(false),
|
||||
},
|
||||
media: media.1,
|
||||
time: Utc.timestamp(unix_time, 0).format("%b %e %Y %H:%M UTC").to_string(),
|
||||
};
|
||||
@ -140,7 +111,7 @@ async fn parse_post(json: serde_json::Value) -> Result<Post, &'static str> {
|
||||
|
||||
// COMMENTS
|
||||
#[async_recursion]
|
||||
async fn parse_comments(json: serde_json::Value) -> Result<Vec<Comment>, &'static str> {
|
||||
async fn parse_comments(json: &serde_json::Value) -> Result<Vec<Comment>, &'static str> {
|
||||
// Separate the comment JSON into a Vector of comments
|
||||
let comment_data = json["data"]["children"].as_array().unwrap();
|
||||
|
||||
@ -154,25 +125,25 @@ async fn parse_comments(json: serde_json::Value) -> Result<Vec<Comment>, &'stati
|
||||
}
|
||||
|
||||
let score = comment["data"]["score"].as_i64().unwrap_or(0);
|
||||
let body = val(comment, "body_html").await;
|
||||
let body = rewrite_url(&val(comment, "body_html"));
|
||||
|
||||
let replies: Vec<Comment> = if comment["data"]["replies"].is_object() {
|
||||
parse_comments(comment["data"]["replies"].clone()).await.unwrap_or(Vec::new())
|
||||
parse_comments(&comment["data"]["replies"]).await.unwrap_or_default()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
comments.push(Comment {
|
||||
id: val(comment, "id").await,
|
||||
body: body,
|
||||
author: val(comment, "author").await,
|
||||
id: val(comment, "id"),
|
||||
body,
|
||||
author: val(comment, "author"),
|
||||
score: format_num(score),
|
||||
time: Utc.timestamp(unix_time, 0).format("%b %e %Y %H:%M UTC").to_string(),
|
||||
replies: replies,
|
||||
replies,
|
||||
flair: Flair(
|
||||
val(comment, "author_flair_text").await,
|
||||
val(comment, "author_flair_background_color").await,
|
||||
val(comment, "author_flair_text_color").await,
|
||||
val(comment, "author_flair_text"),
|
||||
val(comment, "author_flair_background_color"),
|
||||
val(comment, "author_flair_text_color"),
|
||||
),
|
||||
});
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ use base64::decode;
|
||||
|
||||
pub async fn handler(web::Path(url): web::Path<String>) -> Result<HttpResponse> {
|
||||
if cfg!(feature = "proxy") {
|
||||
#[cfg(feature = "proxy")]
|
||||
let media: String;
|
||||
|
||||
#[cfg(not(feature = "proxy"))]
|
||||
@ -22,7 +23,7 @@ pub async fn handler(web::Path(url): web::Path<String>) -> Result<HttpResponse>
|
||||
.send()
|
||||
.await
|
||||
.map_err(Error::from)
|
||||
.and_then(|res| Ok(HttpResponse::build(res.status()).streaming(res)))
|
||||
.map(|res| HttpResponse::build(res.status()).streaming(res))
|
||||
} else {
|
||||
Ok(HttpResponse::Ok().body(""))
|
||||
}
|
||||
|
44
src/search.rs
Normal file
44
src/search.rs
Normal file
@ -0,0 +1,44 @@
|
||||
// CRATES
|
||||
use crate::utils::{error, fetch_posts, param, Post};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
#[allow(dead_code)]
|
||||
#[template(path = "search.html", escape = "none")]
|
||||
struct SearchTemplate {
|
||||
posts: Vec<Post>,
|
||||
query: String,
|
||||
sub: String,
|
||||
sort: (String, String),
|
||||
ends: (String, String),
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn find(req: HttpRequest) -> Result<HttpResponse> {
|
||||
let path = format!("{}.json?{}", req.path(), req.query_string());
|
||||
let q = param(&path, "q");
|
||||
let sort = if param(&path, "sort").is_empty() {
|
||||
"relevance".to_string()
|
||||
} else {
|
||||
param(&path, "sort")
|
||||
};
|
||||
let sub = req.match_info().get("sub").unwrap_or("").to_string();
|
||||
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok(posts) => {
|
||||
let s = SearchTemplate {
|
||||
posts: posts.0,
|
||||
query: q,
|
||||
sub,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), posts.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
}
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
}
|
||||
}
|
152
src/subreddit.rs
152
src/subreddit.rs
@ -1,8 +1,7 @@
|
||||
// CRATES
|
||||
use crate::utils::{fetch_posts, format_num, format_url, request, val, ErrorTemplate, Params, Post, Subreddit};
|
||||
use actix_web::{http::StatusCode, web, HttpResponse, Result};
|
||||
use crate::utils::{error, fetch_posts, format_num, format_url, param, request, rewrite_url, val, Post, Subreddit};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
use std::convert::TryInto;
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
@ -10,96 +9,97 @@ use std::convert::TryInto;
|
||||
struct SubredditTemplate {
|
||||
sub: Subreddit,
|
||||
posts: Vec<Post>,
|
||||
sort: String,
|
||||
sort: (String, String),
|
||||
ends: (String, String),
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
#[allow(dead_code)]
|
||||
pub async fn page(web::Path(sub): web::Path<String>, params: web::Query<Params>) -> Result<HttpResponse> {
|
||||
render(sub, params.sort.clone(), (params.before.clone(), params.after.clone())).await
|
||||
#[derive(Template)]
|
||||
#[template(path = "wiki.html", escape = "none")]
|
||||
struct WikiTemplate {
|
||||
sub: String,
|
||||
wiki: String,
|
||||
page: String,
|
||||
}
|
||||
|
||||
pub async fn render(sub_name: String, sort: Option<String>, ends: (Option<String>, Option<String>)) -> Result<HttpResponse> {
|
||||
let sorting = sort.unwrap_or("hot".to_string());
|
||||
let before = ends.1.clone().unwrap_or(String::new()); // If there is an after, there must be a before
|
||||
// SERVICES
|
||||
pub async fn page(req: HttpRequest) -> Result<HttpResponse> {
|
||||
let path = format!("{}.json?{}", req.path(), req.query_string());
|
||||
let sub = req.match_info().get("sub").unwrap_or("popular").to_string();
|
||||
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
|
||||
|
||||
// Build the Reddit JSON API url
|
||||
let url = match ends.0 {
|
||||
Some(val) => format!("r/{}/{}.json?before={}&count=25", sub_name, sorting, val),
|
||||
None => match ends.1 {
|
||||
Some(val) => format!("r/{}/{}.json?after={}&count=25", sub_name, sorting, val),
|
||||
None => format!("r/{}/{}.json", sub_name, sorting),
|
||||
},
|
||||
let sub_result = if !&sub.contains('+') && sub != "popular" {
|
||||
subreddit(&sub).await.unwrap_or_default()
|
||||
} else {
|
||||
Subreddit::default()
|
||||
};
|
||||
|
||||
let sub_result = if !&sub_name.contains("+") {
|
||||
subreddit(&sub_name).await
|
||||
} else {
|
||||
Ok(Subreddit::default())
|
||||
};
|
||||
let items_result = fetch_posts(url, String::new()).await;
|
||||
|
||||
if sub_result.is_err() || items_result.is_err() {
|
||||
let s = ErrorTemplate {
|
||||
message: sub_result.err().unwrap().to_string(),
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok(items) => {
|
||||
let s = SubredditTemplate {
|
||||
sub: sub_result,
|
||||
posts: items.0,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), items.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
|
||||
} else {
|
||||
let sub = sub_result.unwrap();
|
||||
let items = items_result.unwrap();
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
}
|
||||
}
|
||||
|
||||
let s = SubredditTemplate {
|
||||
sub: sub,
|
||||
posts: items.0,
|
||||
sort: sorting,
|
||||
ends: (before, items.1),
|
||||
pub async fn wiki(req: HttpRequest) -> Result<HttpResponse> {
|
||||
let sub = req.match_info().get("sub").unwrap_or("reddit.com");
|
||||
let page = req.match_info().get("page").unwrap_or("index");
|
||||
let path: String = format!("r/{}/wiki/{}.json?raw_json=1", sub, page);
|
||||
|
||||
match request(&path).await {
|
||||
Ok(res) => {
|
||||
let s = WikiTemplate {
|
||||
sub: sub.to_string(),
|
||||
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap()),
|
||||
page: page.to_string(),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
}
|
||||
}
|
||||
|
||||
// SUBREDDIT
|
||||
async fn subreddit(sub: &String) -> Result<Subreddit, &'static str> {
|
||||
async fn subreddit(sub: &str) -> Result<Subreddit, &'static str> {
|
||||
// Build the Reddit JSON API url
|
||||
let url: String = format!("r/{}/about.json?raw_json=1", sub);
|
||||
let path: String = format!("r/{}/about.json?raw_json=1", sub);
|
||||
|
||||
// Send a request to the url, receive JSON in response
|
||||
let req = request(url).await;
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(res) => {
|
||||
// Metadata regarding the subreddit
|
||||
let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
|
||||
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
|
||||
|
||||
// If the Reddit API returns an error, exit this function
|
||||
if req.is_err() {
|
||||
return Err(req.err().unwrap());
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let community_icon: &str = res["data"]["community_icon"].as_str().unwrap_or("").split('?').collect::<Vec<&str>>()[0];
|
||||
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
||||
|
||||
let sub = Subreddit {
|
||||
name: val(&res, "display_name"),
|
||||
title: val(&res, "title"),
|
||||
description: val(&res, "public_description"),
|
||||
info: rewrite_url(&val(&res, "description_html").replace("\\", "")),
|
||||
icon: format_url(icon),
|
||||
members: format_num(members),
|
||||
active: format_num(active),
|
||||
wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(),
|
||||
};
|
||||
|
||||
Ok(sub)
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg),
|
||||
}
|
||||
|
||||
// Otherwise, grab the JSON output from the request
|
||||
let res = req.unwrap();
|
||||
|
||||
// Metadata regarding the subreddit
|
||||
let members = res["data"]["subscribers"].as_u64().unwrap_or(0);
|
||||
let active = res["data"]["accounts_active"].as_u64().unwrap_or(0);
|
||||
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let community_icon: &str = res["data"]["community_icon"].as_str().unwrap().split("?").collect::<Vec<&str>>()[0];
|
||||
let icon = if community_icon.is_empty() {
|
||||
val(&res, "icon_img").await
|
||||
} else {
|
||||
community_icon.to_string()
|
||||
};
|
||||
|
||||
let sub = Subreddit {
|
||||
name: val(&res, "display_name").await,
|
||||
title: val(&res, "title").await,
|
||||
description: val(&res, "public_description").await,
|
||||
info: val(&res, "description_html").await.replace("\\", ""),
|
||||
icon: format_url(icon).await,
|
||||
members: format_num(members.try_into().unwrap()),
|
||||
active: format_num(active.try_into().unwrap()),
|
||||
};
|
||||
|
||||
Ok(sub)
|
||||
}
|
||||
|
93
src/user.rs
93
src/user.rs
@ -1,6 +1,6 @@
|
||||
// CRATES
|
||||
use crate::utils::{fetch_posts, format_url, nested_val, request, ErrorTemplate, Params, Post, User};
|
||||
use actix_web::{http::StatusCode, web, HttpResponse, Result};
|
||||
use crate::utils::{error, fetch_posts, format_url, nested_val, param, request, Post, User};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
use chrono::{TimeZone, Utc};
|
||||
|
||||
@ -10,80 +10,67 @@ use chrono::{TimeZone, Utc};
|
||||
struct UserTemplate {
|
||||
user: User,
|
||||
posts: Vec<Post>,
|
||||
sort: String,
|
||||
sort: (String, String),
|
||||
ends: (String, String),
|
||||
}
|
||||
|
||||
async fn render(username: String, sort: Option<String>, ends: (Option<String>, Option<String>)) -> Result<HttpResponse> {
|
||||
let sorting = sort.unwrap_or("new".to_string());
|
||||
pub async fn profile(req: HttpRequest) -> Result<HttpResponse> {
|
||||
// Build the Reddit JSON API path
|
||||
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
|
||||
|
||||
let before = ends.1.clone().unwrap_or(String::new()); // If there is an after, there must be a before
|
||||
|
||||
// Build the Reddit JSON API url
|
||||
let url = match ends.0 {
|
||||
Some(val) => format!("user/{}/.json?sort={}&before={}&count=25&raw_json=1", username, sorting, val),
|
||||
None => match ends.1 {
|
||||
Some(val) => format!("user/{}/.json?sort={}&after={}&count=25&raw_json=1", username, sorting, val),
|
||||
None => format!("user/{}/.json?sort={}&raw_json=1", username, sorting),
|
||||
},
|
||||
};
|
||||
// Retrieve other variables from Libreddit request
|
||||
let sort = param(&path, "sort");
|
||||
let username = req.match_info().get("username").unwrap_or("").to_string();
|
||||
|
||||
// Request user profile data and user posts/comments from Reddit
|
||||
let user = user(&username).await;
|
||||
let posts = fetch_posts(url, "Comment".to_string()).await;
|
||||
let posts = fetch_posts(&path, "Comment".to_string()).await;
|
||||
|
||||
if user.is_err() || posts.is_err() {
|
||||
let s = ErrorTemplate {
|
||||
message: user.err().unwrap().to_string(),
|
||||
match posts {
|
||||
Ok(items) => {
|
||||
let s = UserTemplate {
|
||||
user: user.unwrap(),
|
||||
posts: items.0,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), items.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
|
||||
} else {
|
||||
let posts_unwrapped = posts.unwrap();
|
||||
|
||||
let s = UserTemplate {
|
||||
user: user.unwrap(),
|
||||
posts: posts_unwrapped.0,
|
||||
sort: sorting,
|
||||
ends: (before, posts_unwrapped.1)
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
// If there is an error show error page
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
}
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn page(web::Path(username): web::Path<String>, params: web::Query<Params>) -> Result<HttpResponse> {
|
||||
render(username, params.sort.clone(), (params.before.clone(), params.after.clone())).await
|
||||
}
|
||||
|
||||
// USER
|
||||
async fn user(name: &String) -> Result<User, &'static str> {
|
||||
// Build the Reddit JSON API url
|
||||
let url: String = format!("user/{}/about.json", name);
|
||||
async fn user(name: &str) -> Result<User, &'static str> {
|
||||
// Build the Reddit JSON API path
|
||||
let path: String = format!("user/{}/about.json", name);
|
||||
|
||||
// Send a request to the url, receive JSON in response
|
||||
let req = request(url).await;
|
||||
let res;
|
||||
|
||||
// If the Reddit API returns an error, exit this function
|
||||
if req.is_err() {
|
||||
return Err(req.err().unwrap());
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => {
|
||||
res = response;
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg),
|
||||
}
|
||||
|
||||
// Otherwise, grab the JSON output from the request
|
||||
let res = req.unwrap();
|
||||
|
||||
// Grab creation date as unix timestamp
|
||||
let created: i64 = res["data"]["created"].as_f64().unwrap().round() as i64;
|
||||
|
||||
// Parse the JSON output into a User struct
|
||||
Ok(User {
|
||||
name: name.to_string(),
|
||||
icon: format_url(nested_val(&res, "subreddit", "icon_img").await).await,
|
||||
title: nested_val(&res, "subreddit", "title"),
|
||||
icon: format_url(nested_val(&res, "subreddit", "icon_img")),
|
||||
karma: res["data"]["total_karma"].as_i64().unwrap(),
|
||||
created: Utc.timestamp(created, 0).format("%b %e, %Y").to_string(),
|
||||
banner: nested_val(&res, "subreddit", "banner_img").await,
|
||||
description: nested_val(&res, "subreddit", "public_description").await,
|
||||
banner: nested_val(&res, "subreddit", "banner_img"),
|
||||
description: nested_val(&res, "subreddit", "public_description"),
|
||||
})
|
||||
}
|
||||
|
146
src/utils.rs
146
src/utils.rs
@ -1,8 +1,12 @@
|
||||
//
|
||||
// CRATES
|
||||
//
|
||||
use actix_web::{http::StatusCode, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
use chrono::{TimeZone, Utc};
|
||||
use serde_json::{from_str, Value};
|
||||
use regex::Regex;
|
||||
use serde_json::from_str;
|
||||
use url::Url;
|
||||
// use surf::{client, get, middleware::Redirect};
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
@ -13,6 +17,11 @@ use base64::encode;
|
||||
//
|
||||
// Post flair with text, background color and foreground color
|
||||
pub struct Flair(pub String, pub String, pub String);
|
||||
// Post flags with nsfw and stickied
|
||||
pub struct Flags {
|
||||
pub nsfw: bool,
|
||||
pub stickied: bool,
|
||||
}
|
||||
|
||||
// Post containing content, metadata and media
|
||||
pub struct Post {
|
||||
@ -25,7 +34,7 @@ pub struct Post {
|
||||
pub score: String,
|
||||
pub post_type: String,
|
||||
pub flair: Flair,
|
||||
pub nsfw: bool,
|
||||
pub flags: Flags,
|
||||
pub media: String,
|
||||
pub time: String,
|
||||
}
|
||||
@ -44,6 +53,7 @@ pub struct Comment {
|
||||
// User struct containing metadata about user
|
||||
pub struct User {
|
||||
pub name: String,
|
||||
pub title: String,
|
||||
pub icon: String,
|
||||
pub karma: i64,
|
||||
pub created: String,
|
||||
@ -61,18 +71,21 @@ pub struct Subreddit {
|
||||
pub icon: String,
|
||||
pub members: String,
|
||||
pub active: String,
|
||||
pub wiki: bool,
|
||||
}
|
||||
|
||||
// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct Params {
|
||||
pub t: Option<String>,
|
||||
pub q: Option<String>,
|
||||
pub sort: Option<String>,
|
||||
pub after: Option<String>,
|
||||
pub before: Option<String>,
|
||||
}
|
||||
|
||||
// Error template
|
||||
#[derive(askama::Template)]
|
||||
#[derive(Template)]
|
||||
#[template(path = "error.html", escape = "none")]
|
||||
pub struct ErrorTemplate {
|
||||
pub message: String,
|
||||
@ -82,8 +95,15 @@ pub struct ErrorTemplate {
|
||||
// FORMATTING
|
||||
//
|
||||
|
||||
// Grab a query param from a url
|
||||
pub fn param(path: &str, value: &str) -> String {
|
||||
let url = Url::parse(format!("https://reddit.com/{}", path).as_str()).unwrap();
|
||||
let pairs: std::collections::HashMap<_, _> = url.query_pairs().into_owned().collect();
|
||||
pairs.get(value).unwrap_or(&String::new()).to_owned()
|
||||
}
|
||||
|
||||
// Direct urls to proxy if proxy is enabled
|
||||
pub async fn format_url(url: String) -> String {
|
||||
pub fn format_url(url: String) -> String {
|
||||
if url.is_empty() {
|
||||
return String::new();
|
||||
};
|
||||
@ -95,6 +115,12 @@ pub async fn format_url(url: String) -> String {
|
||||
return url.to_string();
|
||||
}
|
||||
|
||||
// Rewrite Reddit links to Libreddit in body of text
|
||||
pub fn rewrite_url(text: &str) -> String {
|
||||
let re = Regex::new(r#"href="(https://|http://|)(www.|)(reddit).(com)/"#).unwrap();
|
||||
re.replace_all(text, r#"href="/"#).to_string()
|
||||
}
|
||||
|
||||
// Append `m` and `k` for millions and thousands respectively
|
||||
pub fn format_num(num: i64) -> String {
|
||||
if num > 1000000 {
|
||||
@ -111,73 +137,79 @@ pub fn format_num(num: i64) -> String {
|
||||
//
|
||||
|
||||
// val() function used to parse JSON from Reddit APIs
|
||||
pub async fn val(j: &serde_json::Value, k: &str) -> String {
|
||||
String::from(j["data"][k].as_str().unwrap_or(""))
|
||||
pub fn val(j: &serde_json::Value, k: &str) -> String {
|
||||
String::from(j["data"][k].as_str().unwrap_or_default())
|
||||
}
|
||||
|
||||
// nested_val() function used to parse JSON from Reddit APIs
|
||||
pub async fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
|
||||
String::from(j["data"][n][k].as_str().unwrap())
|
||||
pub fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
|
||||
String::from(j["data"][n][k].as_str().unwrap_or_default())
|
||||
}
|
||||
|
||||
// Fetch posts of a user or subreddit
|
||||
pub async fn fetch_posts(url: String, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
|
||||
// Send a request to the url, receive JSON in response
|
||||
let req = request(url.clone()).await;
|
||||
pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
|
||||
let res;
|
||||
let post_list;
|
||||
|
||||
// If the Reddit API returns an error, exit this function
|
||||
if req.is_err() {
|
||||
return Err(req.err().unwrap());
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => {
|
||||
res = response;
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg),
|
||||
}
|
||||
|
||||
// Otherwise, grab the JSON output from the request
|
||||
let res = req.unwrap();
|
||||
|
||||
// Fetch the list of posts from the JSON response
|
||||
let post_list = res["data"]["children"].as_array().unwrap();
|
||||
match res["data"]["children"].as_array() {
|
||||
Some(list) => post_list = list,
|
||||
None => return Err("No posts found"),
|
||||
}
|
||||
|
||||
let mut posts: Vec<Post> = Vec::new();
|
||||
|
||||
for post in post_list {
|
||||
let img = if val(post, "thumbnail").await.starts_with("https:/") {
|
||||
format_url(val(post, "thumbnail").await).await
|
||||
let img = if val(post, "thumbnail").starts_with("https:/") {
|
||||
format_url(val(post, "thumbnail"))
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap().round() as i64;
|
||||
let score = post["data"]["score"].as_i64().unwrap();
|
||||
let title = val(post, "title").await;
|
||||
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap_or_default().round() as i64;
|
||||
let score = post["data"]["score"].as_i64().unwrap_or_default();
|
||||
let title = val(post, "title");
|
||||
|
||||
posts.push(Post {
|
||||
title: if title.is_empty() { fallback_title.to_owned() } else { title },
|
||||
community: val(post, "subreddit").await,
|
||||
body: val(post, "body_html").await,
|
||||
author: val(post, "author").await,
|
||||
community: val(post, "subreddit"),
|
||||
body: rewrite_url(&val(post, "body_html")),
|
||||
author: val(post, "author"),
|
||||
author_flair: Flair(
|
||||
val(post, "author_flair_text").await,
|
||||
val(post, "author_flair_background_color").await,
|
||||
val(post, "author_flair_text_color").await,
|
||||
val(post, "author_flair_text"),
|
||||
val(post, "author_flair_background_color"),
|
||||
val(post, "author_flair_text_color"),
|
||||
),
|
||||
score: format_num(score),
|
||||
post_type: "link".to_string(),
|
||||
media: img,
|
||||
flair: Flair(
|
||||
val(post, "link_flair_text").await,
|
||||
val(post, "link_flair_background_color").await,
|
||||
if val(post, "link_flair_text_color").await == "dark" {
|
||||
val(post, "link_flair_text"),
|
||||
val(post, "link_flair_background_color"),
|
||||
if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
} else {
|
||||
"white".to_string()
|
||||
},
|
||||
),
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
url: val(post, "permalink").await,
|
||||
flags: Flags {
|
||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
||||
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
||||
},
|
||||
url: val(post, "permalink"),
|
||||
time: Utc.timestamp(unix_time, 0).format("%b %e '%y").to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
dbg!(url);
|
||||
|
||||
Ok((posts, res["data"]["after"].as_str().unwrap_or("").to_string()))
|
||||
}
|
||||
|
||||
@ -185,9 +217,15 @@ pub async fn fetch_posts(url: String, fallback_title: String) -> Result<(Vec<Pos
|
||||
// NETWORKING
|
||||
//
|
||||
|
||||
pub async fn error(message: String) -> Result<HttpResponse> {
|
||||
let msg = if message.is_empty() { "Page not found".to_string() } else { message };
|
||||
let body = ErrorTemplate { message: msg }.render().unwrap_or_default();
|
||||
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(body))
|
||||
}
|
||||
|
||||
// Make a request to a Reddit API and parse the JSON response
|
||||
pub async fn request(mut url: String) -> Result<serde_json::Value, &'static str> {
|
||||
url = format!("https://www.reddit.com/{}", url);
|
||||
pub async fn request(path: &str) -> Result<serde_json::Value, &'static str> {
|
||||
let url = format!("https://www.reddit.com/{}", path);
|
||||
|
||||
// --- actix-web::client ---
|
||||
// let client = actix_web::client::Client::default();
|
||||
@ -211,20 +249,22 @@ pub async fn request(mut url: String) -> Result<serde_json::Value, &'static str>
|
||||
// --- reqwest ---
|
||||
let res = reqwest::get(&url).await.unwrap();
|
||||
// Read the status from the response
|
||||
let success = res.status().is_success();
|
||||
// Read the body of the response
|
||||
let body = res.text().await.unwrap();
|
||||
|
||||
// Parse the response from Reddit as JSON
|
||||
let json: Value = from_str(body.as_str()).unwrap_or(Value::Null);
|
||||
|
||||
if !success {
|
||||
println!("! {} - {}", url, "Page not found");
|
||||
Err("Page not found")
|
||||
} else if json == Value::Null {
|
||||
println!("! {} - {}", url, "Failed to parse page JSON data");
|
||||
Err("Failed to parse page JSON data")
|
||||
} else {
|
||||
Ok(json)
|
||||
match res.status().is_success() {
|
||||
true => {
|
||||
// Parse the response from Reddit as JSON
|
||||
match from_str(res.text().await.unwrap_or_default().as_str()) {
|
||||
Ok(json) => Ok(json),
|
||||
Err(_) => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Failed to parse page JSON data", url));
|
||||
Err("Failed to parse page JSON data")
|
||||
}
|
||||
}
|
||||
}
|
||||
false => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Page not found", url));
|
||||
Err("Page not found")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
214
static/style.css
214
static/style.css
@ -10,45 +10,59 @@
|
||||
--black-contrast: 0 1px 3px rgba(0,0,0,0.5);
|
||||
}
|
||||
|
||||
::selection {
|
||||
color: var(--background);
|
||||
background: var(--accent);
|
||||
}
|
||||
|
||||
* {
|
||||
transition: 0.2s all;
|
||||
margin: 0;
|
||||
color: white;
|
||||
font-family: sans-serif;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
body {
|
||||
background: var(--background);
|
||||
visibility: visible !important;
|
||||
}
|
||||
|
||||
nav {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
color: var(--accent);
|
||||
background: var(--outside);
|
||||
padding: 15px;
|
||||
padding: 5px 15px;
|
||||
font-size: 20px;
|
||||
min-height: 40px;
|
||||
}
|
||||
|
||||
#lib, #github {
|
||||
color: white;
|
||||
}
|
||||
nav #lib, nav #github, nav #version { color: white; }
|
||||
nav #version { opacity: 25%; }
|
||||
|
||||
main {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
max-width: 750px;
|
||||
max-width: 1000px;
|
||||
padding: 10px 20px;
|
||||
margin: 20px auto;
|
||||
}
|
||||
|
||||
#column_one {
|
||||
max-width: 750px;
|
||||
border-radius: 5px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
footer {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
footer > a {
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
button {
|
||||
background: none;
|
||||
border: none;
|
||||
@ -78,11 +92,6 @@ aside {
|
||||
max-width: 350px;
|
||||
}
|
||||
|
||||
#version {
|
||||
color: white;
|
||||
opacity: 25%;
|
||||
}
|
||||
|
||||
/* User & Subreddit */
|
||||
|
||||
#user, #subreddit, #sidebar {
|
||||
@ -94,18 +103,22 @@ aside {
|
||||
height: max-content;
|
||||
background: var(--outside);
|
||||
border-radius: 5px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
#sidebar, #sidebar_contents {
|
||||
margin-top: 20px;
|
||||
}
|
||||
#user *, #subreddit * { text-align: center; }
|
||||
|
||||
#subreddit { padding: 0; }
|
||||
#sub_meta { padding: 20px; }
|
||||
|
||||
#sidebar, #sidebar_contents { margin-top: 20px; }
|
||||
|
||||
#sidebar_label {
|
||||
border: 2px solid var(--highlighted);
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
#user_icon, #subreddit_icon {
|
||||
#user_icon, #sub_icon {
|
||||
width: 100px;
|
||||
height: 100px;
|
||||
border: 2px solid var(--accent);
|
||||
@ -114,70 +127,161 @@ aside {
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
#user_name, #subreddit_name {
|
||||
margin-top: 10px;
|
||||
#user_title, #sub_title {
|
||||
margin: 0 20px;
|
||||
font-size: 20px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
#user_description, #subreddit_description {
|
||||
margin: 10px 20px;
|
||||
text-align: center;
|
||||
#user_description, #sub_description {
|
||||
margin: 0 20px;
|
||||
font-size: 15px;
|
||||
}
|
||||
|
||||
#user_details, #subreddit_details {
|
||||
#user_name, #sub_name, #user_icon, #sub_icon, #user_description, #sub_description { margin-bottom: 20px; }
|
||||
|
||||
#user_details, #sub_details {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
margin-top: 15px;
|
||||
grid-column-gap: 20px;
|
||||
}
|
||||
|
||||
#user_details > label, #subreddit_details > label {
|
||||
#user_details > label, #sub_details > label {
|
||||
color: var(--accent);
|
||||
font-size: 15px;
|
||||
}
|
||||
|
||||
/* Sorting */
|
||||
/* Wiki Pages */
|
||||
|
||||
#sort {
|
||||
#wiki {
|
||||
background: var(--foreground);
|
||||
padding: 35px;
|
||||
}
|
||||
|
||||
#top {
|
||||
background: var(--highlighted);
|
||||
font-size: 18px;
|
||||
width: 100%;
|
||||
display: flex;
|
||||
}
|
||||
|
||||
#top > * {
|
||||
flex-grow: 1;
|
||||
text-align: center;
|
||||
height: 40px;
|
||||
line-height: 40px;
|
||||
}
|
||||
|
||||
#top > div {
|
||||
border-bottom: 2px solid white;
|
||||
}
|
||||
|
||||
/* Sorting and Search */
|
||||
|
||||
select {
|
||||
background: var(--outside);
|
||||
box-shadow: var(--black-contrast);
|
||||
}
|
||||
|
||||
select, #search {
|
||||
border: 0;
|
||||
padding: 0 15px;
|
||||
margin-bottom: 20px;
|
||||
height: 40px;
|
||||
font-size: 15px;
|
||||
border-radius: 5px 0 0 5px;
|
||||
appearance: none;
|
||||
border-radius: 5px 0px 0px 5px;
|
||||
}
|
||||
|
||||
#sort_submit {
|
||||
#searchbox {
|
||||
display: flex;
|
||||
box-shadow: var(--black-contrast);
|
||||
}
|
||||
|
||||
#searchbox > *, #sort_submit {
|
||||
background: var(--highlighted);
|
||||
border: 0;
|
||||
font-size: 15px;
|
||||
height: 40px;
|
||||
border-radius: 0 5px 5px 0;
|
||||
}
|
||||
|
||||
#sort:hover { background: var(--foreground); }
|
||||
#sort_submit:hover { color: var(--accent); }
|
||||
#search {
|
||||
border-right: 2px var(--outside) solid;
|
||||
min-width: 0;
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
#sort > div, footer > a {
|
||||
#inside {
|
||||
display: flex;
|
||||
font-size: 15px;
|
||||
align-items: center;
|
||||
border-right: 2px var(--outside) solid;
|
||||
height: 40px;
|
||||
padding: 0 10px;
|
||||
}
|
||||
|
||||
#restrict_sr { margin-right: 5px; }
|
||||
|
||||
input[type="submit"] {
|
||||
border: 0;
|
||||
border-radius: 0px 5px 5px 0px;
|
||||
}
|
||||
|
||||
select:hover { background: var(--foreground); }
|
||||
input[type="submit"]:hover { color: var(--accent); }
|
||||
|
||||
#timeframe {
|
||||
border-radius: 5px 0px 0px 5px;
|
||||
margin-left: 10px;
|
||||
}
|
||||
|
||||
#search_sort {
|
||||
background: var(--highlighted);
|
||||
border-radius: 5px;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
#search_sort > #search {
|
||||
border: 0;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#search_sort > :not(:first-child), #search_sort > #sort_options {
|
||||
margin: 0;
|
||||
border-radius: 0;
|
||||
border-right: 0;
|
||||
border-left: 2px solid var(--background);
|
||||
box-shadow: none;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#sort_options {
|
||||
height: 40px;
|
||||
}
|
||||
|
||||
#sort, #search_sort {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
#sort_options, footer > a {
|
||||
border-radius: 5px;
|
||||
box-shadow: var(--black-contrast);
|
||||
background: var(--outside);
|
||||
display: flex;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
#sort_options > a, footer > a {
|
||||
color: lightgrey;
|
||||
border-radius: 5px;
|
||||
margin-right: 5px;
|
||||
padding: 10px 20px;
|
||||
text-align: center;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#sort > div.selected {
|
||||
#sort_options > a.selected {
|
||||
background: var(--accent);
|
||||
color: black;
|
||||
}
|
||||
|
||||
#sort > div:hover {
|
||||
#sort_options > a:not(.selected):hover {
|
||||
background: var(--foreground);
|
||||
}
|
||||
|
||||
@ -205,7 +309,7 @@ aside {
|
||||
.post_left, .post_right {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow-wrap: anywhere;
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
|
||||
.post_left {
|
||||
@ -232,12 +336,19 @@ aside {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.stickied {
|
||||
--accent: #5cff85;
|
||||
border: 1px solid #5cff85;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
.post_subreddit {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.post_title {
|
||||
font-size: 18px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.post_right {
|
||||
@ -278,7 +389,6 @@ aside {
|
||||
background: var(--accent);
|
||||
color: black;
|
||||
padding: 5px;
|
||||
margin-right: 5px;
|
||||
border-radius: 5px;
|
||||
font-size: 12px;
|
||||
font-weight: bold;
|
||||
@ -422,11 +532,11 @@ aside {
|
||||
}
|
||||
|
||||
.md a {
|
||||
text-decoration: underline;
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.md li { margin: 10px 0; }
|
||||
.toc_child { list-style: none; }
|
||||
|
||||
.md pre {
|
||||
background: var(--outside);
|
||||
@ -489,16 +599,16 @@ td, th {
|
||||
}
|
||||
|
||||
@media screen and (max-width: 800px) {
|
||||
main {
|
||||
flex-direction: column-reverse;
|
||||
}
|
||||
main { flex-direction: column-reverse; }
|
||||
nav { flex-direction: column; }
|
||||
|
||||
aside {
|
||||
margin: 20px 0 0 0;
|
||||
aside, #subreddit, #user {
|
||||
margin: 0;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
#sidebar {
|
||||
margin: 20px 0;
|
||||
}
|
||||
}
|
||||
#user, #sidebar { margin: 20px 0; }
|
||||
#logo { margin: 5px auto; }
|
||||
#searchbox { width: 100%; }
|
||||
#github { display: none; }
|
||||
}
|
||||
|
@ -4,20 +4,20 @@
|
||||
{% block head %}
|
||||
<title>{% block title %}Libreddit{% endblock %}</title>
|
||||
<meta http-equiv="Referrer-Policy" content="no-referrer">
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; form-action 'self';">
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; form-action 'self';
|
||||
{% if cfg!(not(feature = "proxy")) %}img-src https://*; media-src https://*{% endif %}">
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
||||
<meta name="description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link rel="stylesheet" href="/style.css">
|
||||
{% endblock %}
|
||||
</head>
|
||||
<body style="visibility: hidden;">
|
||||
{% block navbar %}
|
||||
</head>
|
||||
<body>
|
||||
<nav>
|
||||
<a href="/"><span id="lib">lib</span>reddit. <span id="version">v{{ env!("CARGO_PKG_VERSION") }}</span></a>
|
||||
<a id="logo" href="/"><span id="lib">lib</span>reddit. <span id="version">v{{ env!("CARGO_PKG_VERSION") }}</span></a>
|
||||
{% block search %}{% endblock %}
|
||||
<a id="github" href="https://github.com/spikecodes/libreddit">GITHUB</a>
|
||||
</nav>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<main>
|
||||
|
@ -1,48 +0,0 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
<form>
|
||||
<select id="sort" name="sort">
|
||||
<option value="confidence" {% if sort == "confidence" %}selected{% endif %}>Best</option>
|
||||
<option value="hot" {% if sort == "hot" %}selected{% endif %}>Hot</option>
|
||||
<option value="new" {% if sort == "new" %}selected{% endif %}>New</option>
|
||||
<option value="top" {% if sort == "top" %}selected{% endif %}>Top</option>
|
||||
</select><input id="sort_submit" type="submit" value="→">
|
||||
</form>
|
||||
{% for post in posts %}
|
||||
<div class="post">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<p>
|
||||
<b><a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a></b>
|
||||
• <a class="post_author" href="/u/{{ post.author }}">u/{{ post.author }}</a>
|
||||
{% if post.author_flair.0 != "" %}
|
||||
<small class="author_flair">{{ post.author_flair.0 }}</small>
|
||||
{% endif %}
|
||||
<span class="datetime" style="float: right;">{{ post.time }}</span>
|
||||
</p>
|
||||
<p class="post_title">
|
||||
{% if post.flair.0 != "" %}
|
||||
<small class="post_flair" style="color:{{ post.flair.2 }}; background:{{ post.flair.1 }}">{{ post.flair.0 }}</small>
|
||||
{% endif %}
|
||||
<a href="{{ post.url }}">{{ post.title }}</a>
|
||||
</p>
|
||||
</div>
|
||||
<img class="post_thumbnail" src="{{ post.media }}">
|
||||
</div><br>
|
||||
{% endfor %}
|
||||
|
||||
<footer>
|
||||
{% if ends.0 != "" %}
|
||||
<a href="?sort={{ sort }}&before={{ ends.0 }}">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if ends.1 != "" %}
|
||||
<a href="?sort={{ sort }}&after={{ ends.1 }}">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
</div>
|
||||
{% endblock %}
|
@ -1,5 +1,13 @@
|
||||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block title %}{{ post.title }} - r/{{ post.community }}{% endblock %}
|
||||
|
||||
{% block search %}
|
||||
{% call utils::search(["/r/", post.community.as_str()].concat(), "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block root %}/r/{{ post.community }}{% endblock %}{% block location %}r/{{ post.community }}{% endblock %}
|
||||
{% block head %}
|
||||
{% call super() %}
|
||||
<meta name="author" content="u/{{ post.author }}">
|
||||
@ -28,7 +36,7 @@
|
||||
<div class="post highlighted">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<p>
|
||||
@ -56,13 +64,9 @@
|
||||
<div class="post_body">{{ post.body }}</div>
|
||||
</div>
|
||||
</div>
|
||||
<form>
|
||||
<select id="sort" name="sort">
|
||||
<option value="confidence" {% if sort == "confidence" %}selected{% endif %}>Best</option>
|
||||
<option value="top" {% if sort == "top" %}selected{% endif %}>Top</option>
|
||||
<option value="new" {% if sort == "new" %}selected{% endif %}>New</option>
|
||||
<option value="controversial" {% if sort == "controversial" %}selected{% endif %}>Controversial</option>
|
||||
<option value="old" {% if sort == "old" %}selected{% endif %}>Old</option>
|
||||
<form id="sort">
|
||||
<select name="sort">
|
||||
{% call utils::options(sort, ["confidence", "top", "new", "controversial", "old"], "") %}
|
||||
</select><input id="sort_submit" type="submit" value="→">
|
||||
</form>
|
||||
|
||||
|
74
templates/search.html
Normal file
74
templates/search.html
Normal file
@ -0,0 +1,74 @@
|
||||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block title %}Libreddit: search results - {{ query }}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="column_one">
|
||||
<form id="search_sort">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ query }}">
|
||||
{% if sub != "" %}
|
||||
<div id="inside">
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr" checked="checked" data-com.bitwarden.browser.user-edited="yes">
|
||||
<label for="restrict_sr">in r/{{ sub }}</label>
|
||||
</div>
|
||||
{% endif %}
|
||||
<select id="sort_options" name="sort">
|
||||
{% call utils::options(sort.0, ["relevance", "hot", "top", "new", "comments"], "") %}
|
||||
</select>{% if sort.0 != "new" %}<select id="timeframe" name="t">
|
||||
{% call utils::options(sort.1, ["hour", "day", "week", "month", "year", "all"], "all") %}
|
||||
</select>{% endif %}<input id="sort_submit" type="submit" value="→">
|
||||
</form>
|
||||
{% for post in posts %}
|
||||
{% if post.title != "Comment" %}
|
||||
<div class="post">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<p>
|
||||
<b><a class="post_subreddit" href="/r/{{ post.community }}">r/{{ post.community }}</a></b>
|
||||
• <a class="post_author" href="/u/{{ post.author }}">u/{{ post.author }}</a>
|
||||
{% if post.author_flair.0 != "" %}
|
||||
<small class="author_flair">{{ post.author_flair.0 }}</small>
|
||||
{% endif %}
|
||||
<span class="datetime" style="float: right;">{{ post.time }}</span>
|
||||
</p>
|
||||
<p class="post_title">
|
||||
{% if post.flair.0 != "" %}
|
||||
<small class="post_flair" style="color:{{ post.flair.2 }}; background:{{ post.flair.1 }}">{{ post.flair.0 }}</small>
|
||||
{% endif %}
|
||||
<a href="{{ post.url }}">{{ post.title }}</a>
|
||||
</p>
|
||||
</div>
|
||||
<img class="post_thumbnail" src="{{ post.media }}">
|
||||
</div><br>
|
||||
{% else %}
|
||||
<div class="comment">
|
||||
<div class="comment_left">
|
||||
<p class="comment_score">{{ post.score }}</p>
|
||||
<div class="line"></div>
|
||||
</div>
|
||||
<details class="comment_right" open>
|
||||
<summary class="comment_data">
|
||||
<a class="comment_link" href="{{ post.url }}">COMMENT</a>
|
||||
<span class="datetime">{{ post.time }}</span>
|
||||
</summary>
|
||||
<p class="comment_body">{{ post.body }}</p>
|
||||
</details>
|
||||
</div><br>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
<footer>
|
||||
{% if ends.0 != "" %}
|
||||
<a href="?sort={{ sort.0 }}{% if sort.0 == "top" %}&t={{ sort.1 }}{% endif %}&before={{ ends.0 }}">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if ends.1 != "" %}
|
||||
<a href="?sort={{ sort.0 }}{% if sort.0 == "top" %}&t={{ sort.1 }}{% endif %}&after={{ ends.1 }}">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
</div>
|
||||
{% endblock %}
|
@ -1,24 +1,37 @@
|
||||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% if sub.name != "" %}
|
||||
{% block title %}r/{{ sub.name }}: {{ sub.description }}{% endblock %}
|
||||
{% endif %}
|
||||
{% block title %}
|
||||
{% if sub.title != "" %}{{ sub.title }}
|
||||
{% else if sub.name != "" %}{{ sub.name }}
|
||||
{% else %}Libreddit{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block search %}
|
||||
{% call utils::search(["/r/", sub.name.as_str()].concat(), "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<main style="max-width: 1000px;">
|
||||
<main>
|
||||
<div id="column_one">
|
||||
<form>
|
||||
<select id="sort" name="sort">
|
||||
<option value="hot" {% if sort == "hot" %}selected{% endif %}>Hot</option>
|
||||
<option value="new" {% if sort == "new" %}selected{% endif %}>New</option>
|
||||
<option value="top" {% if sort == "top" %}selected{% endif %}>Top</option>
|
||||
</select><input id="sort_submit" type="submit" value="→">
|
||||
<form id="sort">
|
||||
<div id="sort_options">
|
||||
{% if sub.name.is_empty() %}
|
||||
{% call utils::sort("", ["hot", "new", "top", "rising"], sort.0) %}
|
||||
{% else %}
|
||||
{% call utils::sort(["/r/", sub.name.as_str()].concat(), ["hot", "new", "top", "rising"], sort.0) %}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% if sort.0 == "top" %}<select id="timeframe" name="t">
|
||||
{% call utils::options(sort.1, ["hour", "day", "week", "month", "year", "all"], "day") %}
|
||||
<input id="sort_submit" type="submit" value="→">
|
||||
</select>{% endif %}
|
||||
</form>
|
||||
{% for post in posts %}
|
||||
<div class="post">
|
||||
<div class="post {% if post.flags.stickied %}stickied{% endif %}">
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<p>
|
||||
@ -42,25 +55,34 @@
|
||||
|
||||
<footer>
|
||||
{% if ends.0 != "" %}
|
||||
<a href="?sort={{ sort }}&before={{ ends.0 }}">PREV</a>
|
||||
<a href="?sort={{ sort.0 }}&before={{ ends.0 }}">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if ends.1 != "" %}
|
||||
<a href="?sort={{ sort }}&after={{ ends.1 }}">NEXT</a>
|
||||
<a href="?sort={{ sort.0 }}&after={{ ends.1 }}">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
</div>
|
||||
{% if sub.name != "" %}
|
||||
<aside>
|
||||
<div id="subreddit">
|
||||
<img id="subreddit_icon" src="{{ sub.icon }}">
|
||||
<p id="subreddit_name">r/{{ sub.name }}</p>
|
||||
<p id="subreddit_description">{{ sub.description }}</p>
|
||||
<div id="subreddit_details">
|
||||
<label>Members</label>
|
||||
<label>Active</label>
|
||||
<div>{{ sub.members }}</div>
|
||||
<div>{{ sub.active }}</div>
|
||||
{% if sub.wiki %}
|
||||
<div id="top">
|
||||
<div>Posts</div>
|
||||
<a href="/r/{{ sub.name }}/wiki/index">Wiki</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div id="sub_meta">
|
||||
<img id="sub_icon" src="{{ sub.icon }}">
|
||||
<p id="sub_title">{{ sub.title }}</p>
|
||||
<p id="sub_name">r/{{ sub.name }}</p>
|
||||
<p id="sub_description">{{ sub.description }}</p>
|
||||
<div id="sub_details">
|
||||
<label>Members</label>
|
||||
<label>Active</label>
|
||||
<div>{{ sub.members }}</div>
|
||||
<div>{{ sub.active }}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<details id="sidebar">
|
||||
|
@ -1,21 +1,27 @@
|
||||
{% extends "base.html" %}
|
||||
{% block title %}Libreddit: u/{{ user.name }}{% endblock %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block search %}
|
||||
{% call utils::search("".to_owned(), "", "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block title %}{{ user.name.replace("u/", "") }} (u/{{ user.name }}) - Libreddit{% endblock %}
|
||||
{% block body %}
|
||||
<main style="max-width: 1000px;">
|
||||
<div id="column_one">
|
||||
<form>
|
||||
<select id="sort" name="sort">
|
||||
<option value="hot" {% if sort == "hot" %}selected{% endif %}>Hot</option>
|
||||
<option value="new" {% if sort == "new" %}selected{% endif %}>New</option>
|
||||
<option value="top" {% if sort == "top" %}selected{% endif %}>Top</option>
|
||||
</select><input id="sort_submit" type="submit" value="→">
|
||||
<form id="sort">
|
||||
<select name="sort">
|
||||
{% call utils::options(sort.0, ["hot", "new", "top"], "") %}
|
||||
</select>{% if sort.0 == "top" %}<select id="timeframe" name="t">
|
||||
{% call utils::options(sort.1, ["hour", "day", "week", "month", "year", "all"], "all") %}
|
||||
</select>{% endif %}<input id="sort_submit" type="submit" value="→">
|
||||
</form>
|
||||
{% for post in posts %}
|
||||
{% if post.title != "Comment" %}
|
||||
<div class='post'>
|
||||
<div class="post_left">
|
||||
<p class="post_score">{{ post.score }}</p>
|
||||
{% if post.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
{% if post.flags.nsfw %}<div class="nsfw">NSFW</div>{% endif %}
|
||||
</div>
|
||||
<div class="post_right">
|
||||
<p>
|
||||
@ -55,17 +61,18 @@
|
||||
{% endfor %}
|
||||
<footer>
|
||||
{% if ends.0 != "" %}
|
||||
<a href="?sort={{ sort }}&before={{ ends.0 }}">PREV</a>
|
||||
<a href="?sort={{ sort.0 }}&before={{ ends.0 }}">PREV</a>
|
||||
{% endif %}
|
||||
|
||||
{% if ends.1 != "" %}
|
||||
<a href="?sort={{ sort }}&after={{ ends.1 }}">NEXT</a>
|
||||
<a href="?sort={{ sort.0 }}&after={{ ends.1 }}">NEXT</a>
|
||||
{% endif %}
|
||||
</footer>
|
||||
</div>
|
||||
<aside>
|
||||
<div id="user">
|
||||
<img id="user_icon" src="{{ user.icon }}">
|
||||
<p id="user_title">{{ user.title }}</p>
|
||||
<p id="user_name">u/{{ user.name }}</p>
|
||||
<div id="user_description">{{ user.description }}</div>
|
||||
<div id="user_details">
|
||||
|
28
templates/utils.html
Normal file
28
templates/utils.html
Normal file
@ -0,0 +1,28 @@
|
||||
{% macro options(current, values, default) -%}
|
||||
{% for value in values %}
|
||||
<option value="{{ value }}" {% if current == value || (current == "" && value == default) %}selected{% endif %}>
|
||||
{{ format!("{}{}", value.get(0..1).unwrap().to_uppercase(), value.get(1..).unwrap()) }}
|
||||
</option>
|
||||
{% endfor %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro sort(root, methods, selected) -%}
|
||||
{% for method in methods %}
|
||||
<a {% if method == selected %}class="selected"{% endif %} href="{{ root }}/{{ method }}">
|
||||
{{ format!("{}{}", method.get(0..1).unwrap().to_uppercase(), method.get(1..).unwrap()) }}
|
||||
</a>
|
||||
{% endfor %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro search(root, search) -%}
|
||||
<form action="{% if root != "/r/" && !root.is_empty() %}{{ root }}{% endif %}/search/" id="searchbox">
|
||||
<input id="search" type="text" name="q" placeholder="Search" value="{{ search }}">
|
||||
{% if root != "/r/" && !root.is_empty() %}
|
||||
<div id="inside">
|
||||
<input type="checkbox" name="restrict_sr" id="restrict_sr" checked="checked" data-com.bitwarden.browser.user-edited="yes">
|
||||
<label for="restrict_sr">in {{ root }}</label>
|
||||
</div>
|
||||
{% endif %}
|
||||
<input type="submit" value="→">
|
||||
</form>
|
||||
{%- endmacro %}
|
25
templates/wiki.html
Normal file
25
templates/wiki.html
Normal file
@ -0,0 +1,25 @@
|
||||
{% extends "base.html" %}
|
||||
{% import "utils.html" as utils %}
|
||||
|
||||
{% block title %}
|
||||
{% if sub != "" %}{{ page }} - {{ sub }}
|
||||
{% else %}Libreddit{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block search %}
|
||||
{% call utils::search(["/r/", sub.as_str()].concat(), "") %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<main>
|
||||
<div id="column_one">
|
||||
<div id="top">
|
||||
<a href="/r/{{ sub }}">Posts</a>
|
||||
<div>Wiki</div>
|
||||
</div>
|
||||
<div id="wiki">
|
||||
{{ wiki }}
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
{% endblock %}
|
Reference in New Issue
Block a user