commit
d7839899e6
89
Cargo.lock
generated
89
Cargo.lock
generated
@ -31,7 +31,7 @@ dependencies = [
|
|||||||
"futures-util",
|
"futures-util",
|
||||||
"http",
|
"http",
|
||||||
"log",
|
"log",
|
||||||
"rustls",
|
"rustls 0.18.1",
|
||||||
"tokio-rustls",
|
"tokio-rustls",
|
||||||
"trust-dns-proto",
|
"trust-dns-proto",
|
||||||
"trust-dns-resolver",
|
"trust-dns-resolver",
|
||||||
@ -193,10 +193,10 @@ dependencies = [
|
|||||||
"actix-service",
|
"actix-service",
|
||||||
"actix-utils",
|
"actix-utils",
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"rustls",
|
"rustls 0.18.1",
|
||||||
"tokio-rustls",
|
"tokio-rustls",
|
||||||
"webpki",
|
"webpki",
|
||||||
"webpki-roots",
|
"webpki-roots 0.20.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -249,7 +249,7 @@ dependencies = [
|
|||||||
"mime",
|
"mime",
|
||||||
"pin-project 1.0.4",
|
"pin-project 1.0.4",
|
||||||
"regex",
|
"regex",
|
||||||
"rustls",
|
"rustls 0.18.1",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
@ -393,7 +393,7 @@ dependencies = [
|
|||||||
"mime",
|
"mime",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"rand",
|
"rand",
|
||||||
"rustls",
|
"rustls 0.18.1",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
@ -529,6 +529,12 @@ version = "1.0.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "chunked_transfer"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7477065d45a8fe57167bf3cf8bcd3729b54cfcb81cca49bda2d038ea89ae82ca"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "const_fn"
|
name = "const_fn"
|
||||||
version = "0.4.5"
|
version = "0.4.5"
|
||||||
@ -672,9 +678,9 @@ checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures"
|
name = "futures"
|
||||||
version = "0.3.10"
|
version = "0.3.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "309f13e3f4be6d5917178c84db67c0b9a09177ac16d4f9a7313a767a68adaa77"
|
checksum = "da9052a1a50244d8d5aa9bf55cbc2fb6f357c86cc52e46c62ed390a7180cf150"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-channel",
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
@ -686,9 +692,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-channel"
|
name = "futures-channel"
|
||||||
version = "0.3.10"
|
version = "0.3.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7a3b03bd32f6ec7885edeb99acd1e47e20e34fd4dfd3c6deed6fcac8a9d28f6a"
|
checksum = "f2d31b7ec7efab6eefc7c57233bb10b847986139d88cc2f5a02a1ae6871a1846"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"futures-sink",
|
"futures-sink",
|
||||||
@ -696,21 +702,21 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-core"
|
name = "futures-core"
|
||||||
version = "0.3.10"
|
version = "0.3.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ed8aeae2b6ab243ebabe6f54cd4cf53054d98883d5d326128af7d57a9ca5cd3d"
|
checksum = "79e5145dde8da7d1b3892dad07a9c98fc04bc39892b1ecc9692cf53e2b780a65"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-io"
|
name = "futures-io"
|
||||||
version = "0.3.10"
|
version = "0.3.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d41234e71d5e8ca73d01563974ef6f50e516d71e18f1a2f1184742e31f5d469f"
|
checksum = "28be053525281ad8259d47e4de5de657b25e7bac113458555bb4b70bc6870500"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-macro"
|
name = "futures-macro"
|
||||||
version = "0.3.10"
|
version = "0.3.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3520e0eb4e704e88d771b92d51273ee212997f0d8282f17f5d8ff1cb39104e42"
|
checksum = "c287d25add322d9f9abdcdc5927ca398917996600182178774032e9f8258fedd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-hack",
|
"proc-macro-hack",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
@ -720,24 +726,24 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-sink"
|
name = "futures-sink"
|
||||||
version = "0.3.10"
|
version = "0.3.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c72d188479368953c6c8c7140e40d7a4401674ab3b98a41e60e515d6cbdbe5de"
|
checksum = "caf5c69029bda2e743fddd0582d1083951d65cc9539aebf8812f36c3491342d6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-task"
|
name = "futures-task"
|
||||||
version = "0.3.10"
|
version = "0.3.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "08944cea9021170d383287169859c0ca8147d9ec285978393109954448f33cc7"
|
checksum = "13de07eb8ea81ae445aca7b69f5f7bf15d7bf4912d8ca37d6645c77ae8a58d86"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-util"
|
name = "futures-util"
|
||||||
version = "0.3.10"
|
version = "0.3.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d3dd206efbe2ca683b2ce138ccdf61e1b0a63f5816dcedc9d8654c500ba0cea6"
|
checksum = "632a8cd0f2a4b3fdea1657f08bde063848c3bd00f9bbf6e256b8be78802e624b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-channel",
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
@ -986,6 +992,7 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"time",
|
"time",
|
||||||
|
"ureq",
|
||||||
"url",
|
"url",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1413,6 +1420,19 @@ dependencies = [
|
|||||||
"webpki",
|
"webpki",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustls"
|
||||||
|
version = "0.19.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "064fd21ff87c6e87ed4506e68beb42459caa4a0e2eb144932e6776768556980b"
|
||||||
|
dependencies = [
|
||||||
|
"base64 0.13.0",
|
||||||
|
"log",
|
||||||
|
"ring",
|
||||||
|
"sct",
|
||||||
|
"webpki",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
version = "1.0.5"
|
version = "1.0.5"
|
||||||
@ -1749,7 +1769,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "e12831b255bcfa39dc0436b01e19fea231a37db570686c06ee72c423479f889a"
|
checksum = "e12831b255bcfa39dc0436b01e19fea231a37db570686c06ee72c423479f889a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"rustls",
|
"rustls 0.18.1",
|
||||||
"tokio",
|
"tokio",
|
||||||
"webpki",
|
"webpki",
|
||||||
]
|
]
|
||||||
@ -1890,6 +1910,22 @@ version = "0.7.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
|
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ureq"
|
||||||
|
version = "2.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "96014ded8c85822677daee4f909d18acccca744810fd4f8ffc492c284f2324bc"
|
||||||
|
dependencies = [
|
||||||
|
"base64 0.13.0",
|
||||||
|
"chunked_transfer",
|
||||||
|
"log",
|
||||||
|
"once_cell",
|
||||||
|
"rustls 0.19.0",
|
||||||
|
"url",
|
||||||
|
"webpki",
|
||||||
|
"webpki-roots 0.21.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "url"
|
name = "url"
|
||||||
version = "2.2.0"
|
version = "2.2.0"
|
||||||
@ -1997,6 +2033,15 @@ dependencies = [
|
|||||||
"webpki",
|
"webpki",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "webpki-roots"
|
||||||
|
version = "0.21.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "82015b7e0b8bad8185994674a13a93306bea76cf5a16c5a181382fd3a5ec2376"
|
||||||
|
dependencies = [
|
||||||
|
"webpki",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "widestring"
|
name = "widestring"
|
||||||
version = "0.4.3"
|
version = "0.4.3"
|
||||||
|
@ -11,6 +11,7 @@ edition = "2018"
|
|||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
actix-web = { version = "3.3.2", features = ["rustls"] }
|
actix-web = { version = "3.3.2", features = ["rustls"] }
|
||||||
askama = "0.10.5"
|
askama = "0.10.5"
|
||||||
|
ureq = "2.0.1"
|
||||||
serde = { version = "1.0.118", default_features = false, features = ["derive"] }
|
serde = { version = "1.0.118", default_features = false, features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
async-recursion = "0.3.1"
|
async-recursion = "0.3.1"
|
||||||
|
@ -2,14 +2,14 @@
|
|||||||
|
|
||||||
> An alternative private front-end to Reddit
|
> An alternative private front-end to Reddit
|
||||||
|
|
||||||
Libre + Reddit = [Libreddit](https://libredd.it)
|
Libre + Reddit = [Libreddit](https://libreddit.spike.codes)
|
||||||
|
|
||||||
- 🚀 Fast: written in Rust for blazing fast speeds and safety
|
- 🚀 Fast: written in Rust for blazing fast speeds and safety
|
||||||
- ☁️ Light: no JavaScript, no ads, no tracking
|
- ☁️ Light: no JavaScript, no ads, no tracking
|
||||||
- 🕵 Private: all requests are proxied through the server, including media
|
- 🕵 Private: all requests are proxied through the server, including media
|
||||||
- 🔒 Secure: strong [Content Security Policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP) prevents browser requests to Reddit
|
- 🔒 Secure: strong [Content Security Policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP) prevents browser requests to Reddit
|
||||||
|
|
||||||
Like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libredd.it/r/unpopularopinion) without being [tracked](#reddit).
|
Like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libreddit.spike.codes/r/unpopularopinion) without being [tracked](#reddit).
|
||||||
|
|
||||||
## Contents
|
## Contents
|
||||||
- [Screenshot](#screenshot)
|
- [Screenshot](#screenshot)
|
||||||
@ -123,11 +123,11 @@ Results from Google Lighthouse ([Libreddit Report](https://lighthouse-dot-webdot
|
|||||||
|
|
||||||
For transparency, I hope to describe all the ways Libreddit handles user privacy.
|
For transparency, I hope to describe all the ways Libreddit handles user privacy.
|
||||||
|
|
||||||
**Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs nothing. When debugging (running from source without `--release`), Libreddit logs post IDs and URL paths fetched to aid troubleshooting but nothing else.
|
**Logging:** In production (when running the binary, hosting with docker, or using the official instances), Libreddit logs errors. When debugging (running from source without `--release`), Libreddit logs post IDs and URL paths fetched to aid with troubleshooting.
|
||||||
|
|
||||||
**DNS:** Both official domains (`libredd.it` and `libreddit.spike.codes`) use Cloudflare as the DNS resolver. Though, the sites are not proxied through Cloudflare meaning Cloudflare doesn't have access to user traffic.
|
**DNS:** Both official domains (`libredd.it` and `libreddit.spike.codes`) use Cloudflare as the DNS resolver. Though, the sites are not proxied through Cloudflare meaning Cloudflare doesn't have access to user traffic.
|
||||||
|
|
||||||
**Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libredd.it/settings). This is not a cross-site cookie and the cookie holds no personal data, only a value of the possible layout.
|
**Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libreddit.spike.codes/settings). This is not a cross-site cookie and the cookie holds no personal data, only a value of the possible layout.
|
||||||
|
|
||||||
**Hosting:** The official instances (`libredd.it` and `libreddit.spike.codes`) are hosted on [Repl.it](https://repl.it/) which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models and therefore, selfhosting and browsing through Tor are welcomed.
|
**Hosting:** The official instances (`libredd.it` and `libreddit.spike.codes`) are hosted on [Repl.it](https://repl.it/) which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models and therefore, selfhosting and browsing through Tor are welcomed.
|
||||||
|
|
||||||
|
10
src/main.rs
10
src/main.rs
@ -58,7 +58,15 @@ async fn main() -> std::io::Result<()> {
|
|||||||
// Proxy media through Libreddit
|
// Proxy media through Libreddit
|
||||||
.route("/proxy/{url:.*}/", web::get().to(proxy::handler))
|
.route("/proxy/{url:.*}/", web::get().to(proxy::handler))
|
||||||
// Browse user profile
|
// Browse user profile
|
||||||
.route("/{scope:u|user}/{username}/", web::get().to(user::profile))
|
.service(
|
||||||
|
web::scope("/{scope:user|u}").service(
|
||||||
|
web::scope("/{username}").route("/", web::get().to(user::profile)).service(
|
||||||
|
web::scope("/comments/{id}/{title}")
|
||||||
|
.route("/", web::get().to(post::item))
|
||||||
|
.route("/{comment_id}/", web::get().to(post::item)),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
// Configure settings
|
// Configure settings
|
||||||
.service(web::resource("/settings/").route(web::get().to(settings::get)).route(web::post().to(settings::set)))
|
.service(web::resource("/settings/").route(web::get().to(settings::get)).route(web::post().to(settings::set)))
|
||||||
// Subreddit services
|
// Subreddit services
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
use crate::utils::{error, fetch_posts, param, prefs, Post, Preferences, request, val};
|
use crate::utils::{error, fetch_posts, param, prefs, request, val, Post, Preferences};
|
||||||
use actix_web::{HttpRequest, HttpResponse};
|
use actix_web::{HttpRequest, HttpResponse};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
|
|
||||||
@ -34,42 +34,19 @@ struct SearchTemplate {
|
|||||||
// SERVICES
|
// SERVICES
|
||||||
pub async fn find(req: HttpRequest) -> HttpResponse {
|
pub async fn find(req: HttpRequest) -> HttpResponse {
|
||||||
let path = format!("{}.json?{}", req.path(), req.query_string());
|
let path = format!("{}.json?{}", req.path(), req.query_string());
|
||||||
|
let sub = req.match_info().get("sub").unwrap_or("").to_string();
|
||||||
|
|
||||||
let sort = if param(&path, "sort").is_empty() {
|
let sort = if param(&path, "sort").is_empty() {
|
||||||
"relevance".to_string()
|
"relevance".to_string()
|
||||||
} else {
|
} else {
|
||||||
param(&path, "sort")
|
param(&path, "sort")
|
||||||
};
|
};
|
||||||
let sub = req.match_info().get("sub").unwrap_or("").to_string();
|
|
||||||
let mut subreddits: Vec<Subreddit> = Vec::new();
|
|
||||||
|
|
||||||
if param(&path, "restrict_sr") == "" {
|
|
||||||
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit=3", param(&path, "q"));
|
|
||||||
let res;
|
|
||||||
let subreddit_list;
|
|
||||||
|
|
||||||
// Send a request to the url
|
let subreddits = if param(&path, "restrict_sr").is_empty() {
|
||||||
match request(&subreddit_search_path).await {
|
search_subreddits(param(&path, "q")).await
|
||||||
// If success, receive JSON in response
|
} else {
|
||||||
Ok(response) => {
|
Vec::new()
|
||||||
res = response;
|
};
|
||||||
subreddit_list = res["data"]["children"].as_array();
|
|
||||||
}
|
|
||||||
// If the Reddit API returns an error, exit this function
|
|
||||||
Err(_msg) => {subreddit_list = None;}
|
|
||||||
}
|
|
||||||
|
|
||||||
// For each subreddit from subreddit list
|
|
||||||
if !subreddit_list.is_none() {
|
|
||||||
for subreddit in subreddit_list.unwrap() {
|
|
||||||
subreddits.push(Subreddit {
|
|
||||||
name: val(subreddit, "display_name_prefixed"),
|
|
||||||
url: val(subreddit, "url"),
|
|
||||||
description: val(subreddit, "public_description"),
|
|
||||||
subscribers: subreddit["data"]["subscribers"].as_u64().unwrap_or_default() as i64,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match fetch_posts(&path, String::new()).await {
|
match fetch_posts(&path, String::new()).await {
|
||||||
Ok((posts, after)) => HttpResponse::Ok().content_type("text/html").body(
|
Ok((posts, after)) => HttpResponse::Ok().content_type("text/html").body(
|
||||||
@ -93,3 +70,29 @@ pub async fn find(req: HttpRequest) -> HttpResponse {
|
|||||||
Err(msg) => error(msg).await,
|
Err(msg) => error(msg).await,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn search_subreddits(q: String) -> Vec<Subreddit> {
|
||||||
|
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit=3", q.replace(' ', "+"));
|
||||||
|
|
||||||
|
// Send a request to the url
|
||||||
|
match request(&subreddit_search_path).await {
|
||||||
|
// If success, receive JSON in response
|
||||||
|
Ok(response) => {
|
||||||
|
match response["data"]["children"].as_array() {
|
||||||
|
// For each subreddit from subreddit list
|
||||||
|
Some(list) => list
|
||||||
|
.iter()
|
||||||
|
.map(|subreddit| Subreddit {
|
||||||
|
name: val(subreddit, "display_name_prefixed"),
|
||||||
|
url: val(subreddit, "url"),
|
||||||
|
description: val(subreddit, "public_description"),
|
||||||
|
subscribers: subreddit["data"]["subscribers"].as_u64().unwrap_or_default() as i64,
|
||||||
|
})
|
||||||
|
.collect::<Vec<Subreddit>>(),
|
||||||
|
_ => Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If the Reddit API returns an error, exit this function
|
||||||
|
_ => Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -34,19 +34,20 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
|
|||||||
.to_string();
|
.to_string();
|
||||||
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
|
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
|
||||||
|
|
||||||
let sub = if !sub_name.contains('+') && sub_name != "popular" && sub_name != "all" {
|
|
||||||
subreddit(&sub_name).await.unwrap_or_default()
|
|
||||||
} else if sub_name.contains('+') {
|
|
||||||
Subreddit {
|
|
||||||
name: sub_name,
|
|
||||||
..Subreddit::default()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Subreddit::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
match fetch_posts(&path, String::new()).await {
|
match fetch_posts(&path, String::new()).await {
|
||||||
Ok((posts, after)) => {
|
Ok((posts, after)) => {
|
||||||
|
// If you can get subreddit posts, also request subreddit metadata
|
||||||
|
let sub = if !sub_name.contains('+') && sub_name != "popular" && sub_name != "all" {
|
||||||
|
subreddit(&sub_name).await.unwrap_or_default()
|
||||||
|
} else if sub_name.contains('+') {
|
||||||
|
Subreddit {
|
||||||
|
name: sub_name,
|
||||||
|
..Subreddit::default()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Subreddit::default()
|
||||||
|
};
|
||||||
|
|
||||||
let s = SubredditTemplate {
|
let s = SubredditTemplate {
|
||||||
sub,
|
sub,
|
||||||
posts,
|
posts,
|
||||||
|
@ -24,12 +24,14 @@ pub async fn profile(req: HttpRequest) -> HttpResponse {
|
|||||||
let sort = param(&path, "sort");
|
let sort = param(&path, "sort");
|
||||||
let username = req.match_info().get("username").unwrap_or("").to_string();
|
let username = req.match_info().get("username").unwrap_or("").to_string();
|
||||||
|
|
||||||
// Request user profile data and user posts/comments from Reddit
|
// Request user posts/comments from Reddit
|
||||||
let user = user(&username).await.unwrap_or_default();
|
|
||||||
let posts = fetch_posts(&path, "Comment".to_string()).await;
|
let posts = fetch_posts(&path, "Comment".to_string()).await;
|
||||||
|
|
||||||
match posts {
|
match posts {
|
||||||
Ok((posts, after)) => {
|
Ok((posts, after)) => {
|
||||||
|
// If you can get user posts, also request user data
|
||||||
|
let user = user(&username).await.unwrap_or_default();
|
||||||
|
|
||||||
let s = UserTemplate {
|
let s = UserTemplate {
|
||||||
user,
|
user,
|
||||||
posts,
|
posts,
|
||||||
|
135
src/utils.rs
135
src/utils.rs
@ -20,6 +20,7 @@ pub struct Flair {
|
|||||||
pub foreground_color: String,
|
pub foreground_color: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Part of flair, either emoji or text
|
||||||
pub struct FlairPart {
|
pub struct FlairPart {
|
||||||
pub flair_part_type: String,
|
pub flair_part_type: String,
|
||||||
pub value: String,
|
pub value: String,
|
||||||
@ -133,9 +134,10 @@ pub fn prefs(req: HttpRequest) -> Preferences {
|
|||||||
|
|
||||||
// Grab a query param from a url
|
// Grab a query param from a url
|
||||||
pub fn param(path: &str, value: &str) -> String {
|
pub fn param(path: &str, value: &str) -> String {
|
||||||
let url = Url::parse(format!("https://libredd.it/{}", path).as_str()).unwrap();
|
match Url::parse(format!("https://libredd.it/{}", path).as_str()) {
|
||||||
let pairs: HashMap<_, _> = url.query_pairs().into_owned().collect();
|
Ok(url) => url.query_pairs().into_owned().collect::<HashMap<_, _>>().get(value).unwrap_or(&String::new()).to_owned(),
|
||||||
pairs.get(value).unwrap_or(&String::new()).to_owned()
|
_ => String::new(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse Cookie value from request
|
// Parse Cookie value from request
|
||||||
@ -171,19 +173,23 @@ pub fn format_num(num: i64) -> String {
|
|||||||
|
|
||||||
pub async fn media(data: &Value) -> (String, String) {
|
pub async fn media(data: &Value) -> (String, String) {
|
||||||
let post_type: &str;
|
let post_type: &str;
|
||||||
let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() {
|
// If post is a video, return the video
|
||||||
|
let url = if data["preview"]["reddit_video_preview"]["fallback_url"].is_string() {
|
||||||
post_type = "video";
|
post_type = "video";
|
||||||
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap_or_default())
|
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap_or_default())
|
||||||
} else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() {
|
} else if data["secure_media"]["reddit_video"]["fallback_url"].is_string() {
|
||||||
post_type = "video";
|
post_type = "video";
|
||||||
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap_or_default())
|
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap_or_default())
|
||||||
|
// Handle images, whether GIFs or pics
|
||||||
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
|
||||||
let preview = data["preview"]["images"][0].clone();
|
let preview = data["preview"]["images"][0].clone();
|
||||||
match preview["variants"]["mp4"].as_object() {
|
match preview["variants"]["mp4"].as_object() {
|
||||||
|
// Return the mp4 if the media is a gif
|
||||||
Some(gif) => {
|
Some(gif) => {
|
||||||
post_type = "gif";
|
post_type = "gif";
|
||||||
format_url(gif["source"]["url"].as_str().unwrap_or_default())
|
format_url(gif["source"]["url"].as_str().unwrap_or_default())
|
||||||
}
|
}
|
||||||
|
// Return the picture if the media is an image
|
||||||
None => {
|
None => {
|
||||||
post_type = "image";
|
post_type = "image";
|
||||||
format_url(preview["source"]["url"].as_str().unwrap_or_default())
|
format_url(preview["source"]["url"].as_str().unwrap_or_default())
|
||||||
@ -201,10 +207,13 @@ pub async fn media(data: &Value) -> (String, String) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_rich_flair(flair_type: String, rich_flair: Option<&Vec<Value>>, text_flair: Option<&str>) -> Vec<FlairPart> {
|
pub fn parse_rich_flair(flair_type: String, rich_flair: Option<&Vec<Value>>, text_flair: Option<&str>) -> Vec<FlairPart> {
|
||||||
|
// Parse type of flair
|
||||||
match flair_type.as_str() {
|
match flair_type.as_str() {
|
||||||
|
// If flair contains emojis and text
|
||||||
"richtext" => match rich_flair {
|
"richtext" => match rich_flair {
|
||||||
Some(rich) => rich
|
Some(rich) => rich
|
||||||
.iter()
|
.iter()
|
||||||
|
// For each part of the flair, extract text and emojis
|
||||||
.map(|part| {
|
.map(|part| {
|
||||||
let value = |name: &str| part[name].as_str().unwrap_or_default();
|
let value = |name: &str| part[name].as_str().unwrap_or_default();
|
||||||
FlairPart {
|
FlairPart {
|
||||||
@ -219,6 +228,7 @@ pub fn parse_rich_flair(flair_type: String, rich_flair: Option<&Vec<Value>>, tex
|
|||||||
.collect::<Vec<FlairPart>>(),
|
.collect::<Vec<FlairPart>>(),
|
||||||
None => Vec::new(),
|
None => Vec::new(),
|
||||||
},
|
},
|
||||||
|
// If flair contains only text
|
||||||
"text" => match text_flair {
|
"text" => match text_flair {
|
||||||
Some(text) => vec![FlairPart {
|
Some(text) => vec![FlairPart {
|
||||||
flair_part_type: "text".to_string(),
|
flair_part_type: "text".to_string(),
|
||||||
@ -233,8 +243,10 @@ pub fn parse_rich_flair(flair_type: String, rich_flair: Option<&Vec<Value>>, tex
|
|||||||
pub fn time(unix_time: i64) -> String {
|
pub fn time(unix_time: i64) -> String {
|
||||||
let time = OffsetDateTime::from_unix_timestamp(unix_time);
|
let time = OffsetDateTime::from_unix_timestamp(unix_time);
|
||||||
let time_delta = OffsetDateTime::now_utc() - time;
|
let time_delta = OffsetDateTime::now_utc() - time;
|
||||||
|
// If the time difference is more than a month, show full date
|
||||||
if time_delta > Duration::days(30) {
|
if time_delta > Duration::days(30) {
|
||||||
time.format("%b %d '%y") // %b %e '%y
|
time.format("%b %d '%y")
|
||||||
|
// Otherwise, show relative date/time
|
||||||
} else if time_delta.whole_days() > 0 {
|
} else if time_delta.whole_days() > 0 {
|
||||||
format!("{}d ago", time_delta.whole_days())
|
format!("{}d ago", time_delta.whole_days())
|
||||||
} else if time_delta.whole_hours() > 0 {
|
} else if time_delta.whole_hours() > 0 {
|
||||||
@ -351,46 +363,85 @@ pub async fn request(path: &str) -> Result<Value, String> {
|
|||||||
let url = format!("https://www.reddit.com{}", path);
|
let url = format!("https://www.reddit.com{}", path);
|
||||||
|
|
||||||
// Send request using awc
|
// Send request using awc
|
||||||
async fn send(url: &str) -> Result<String, (bool, String)> {
|
// async fn send(url: &str) -> Result<String, (bool, String)> {
|
||||||
let client = actix_web::client::Client::default();
|
// let client = actix_web::client::Client::default();
|
||||||
let response = client.get(url).send().await;
|
// let response = client.get(url).header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"))).send().await;
|
||||||
|
|
||||||
match response {
|
// match response {
|
||||||
Ok(mut payload) => {
|
// Ok(mut payload) => {
|
||||||
// Get first number of response HTTP status code
|
// // Get first number of response HTTP status code
|
||||||
match payload.status().to_string().chars().next() {
|
// match payload.status().to_string().chars().next() {
|
||||||
// If success
|
// // If success
|
||||||
Some('2') => Ok(String::from_utf8(payload.body().limit(20_000_000).await.unwrap().to_vec()).unwrap()),
|
// Some('2') => Ok(String::from_utf8(payload.body().limit(20_000_000).await.unwrap_or_default().to_vec()).unwrap_or_default()),
|
||||||
// If redirection
|
// // If redirection
|
||||||
Some('3') => Err((true, payload.headers().get("location").unwrap().to_str().unwrap().to_string())),
|
// Some('3') => match payload.headers().get("location") {
|
||||||
// Otherwise
|
// Some(location) => Err((true, location.to_str().unwrap_or_default().to_string())),
|
||||||
_ => Err((false, "Page not found".to_string())),
|
// None => Err((false, "Page not found".to_string())),
|
||||||
|
// },
|
||||||
|
// // Otherwise
|
||||||
|
// _ => Err((false, "Page not found".to_string())),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// Err(e) => { dbg!(e); Err((false, "Couldn't send request to Reddit, this instance may be being rate-limited. Try another.".to_string())) },
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // Print error if debugging then return error based on error message
|
||||||
|
// fn err(url: String, msg: String) -> Result<Value, String> {
|
||||||
|
// // #[cfg(debug_assertions)]
|
||||||
|
// dbg!(format!("{} - {}", url, msg));
|
||||||
|
// Err(msg)
|
||||||
|
// };
|
||||||
|
|
||||||
|
// // Parse JSON from body. If parsing fails, return error
|
||||||
|
// fn json(url: String, body: String) -> Result<Value, String> {
|
||||||
|
// match from_str(body.as_str()) {
|
||||||
|
// Ok(json) => Ok(json),
|
||||||
|
// Err(_) => err(url, "Failed to parse page JSON data".to_string()),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // Make request to Reddit using send function
|
||||||
|
// match send(&url).await {
|
||||||
|
// // If success, parse and return body
|
||||||
|
// Ok(body) => json(url, body),
|
||||||
|
// // Follow any redirects
|
||||||
|
// Err((true, location)) => match send(location.as_str()).await {
|
||||||
|
// // If success, parse and return body
|
||||||
|
// Ok(body) => json(url, body),
|
||||||
|
// // Follow any redirects again
|
||||||
|
// Err((true, location)) => err(url, location),
|
||||||
|
// // Return errors if request fails
|
||||||
|
// Err((_, msg)) => err(url, msg),
|
||||||
|
// },
|
||||||
|
// // Return errors if request fails
|
||||||
|
// Err((_, msg)) => err(url, msg),
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Send request using ureq
|
||||||
|
match ureq::get(&url).call() {
|
||||||
|
// If response is success
|
||||||
|
Ok(response) => {
|
||||||
|
// Parse the response from Reddit as JSON
|
||||||
|
match from_str(&response.into_string().unwrap()) {
|
||||||
|
Ok(json) => Ok(json),
|
||||||
|
Err(_) => {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
dbg!(format!("{} - Failed to parse page JSON data", url));
|
||||||
|
Err("Failed to parse page JSON data".to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(_) => Err((false, "Couldn't send request to Reddit".to_string())),
|
|
||||||
}
|
}
|
||||||
}
|
// If response is error
|
||||||
|
Err(ureq::Error::Status(_, _)) => {
|
||||||
fn err(u: String, m: String) -> Result<Value, String> {
|
#[cfg(debug_assertions)]
|
||||||
#[cfg(debug_assertions)]
|
dbg!(format!("{} - Page not found", url));
|
||||||
dbg!(format!("{} - {}", u, m));
|
Err("Page not found".to_string())
|
||||||
Err(m)
|
}
|
||||||
};
|
// If failed to send request
|
||||||
|
Err(e) => {
|
||||||
fn json(url: String, body: String) -> Result<Value, String> {
|
dbg!(format!("{} - {}", url, e));
|
||||||
match from_str(body.as_str()) {
|
Err("Couldn't send request to Reddit, this instance may be being rate-limited. Try another.".to_string())
|
||||||
Ok(json) => Ok(json),
|
|
||||||
Err(_) => err(url, "Failed to parse page JSON data".to_string()),
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
match send(&url).await {
|
|
||||||
Ok(body) => json(url, body),
|
|
||||||
Err((true, location)) => match send(location.as_str()).await {
|
|
||||||
Ok(body) => json(url, body),
|
|
||||||
Err((true, location)) => err(url, location),
|
|
||||||
Err((_, msg)) => err(url, msg),
|
|
||||||
},
|
|
||||||
Err((_, msg)) => err(url, msg),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -543,7 +543,8 @@ a.search_subreddit:hover {
|
|||||||
.post_flair {
|
.post_flair {
|
||||||
background: var(--accent);
|
background: var(--accent);
|
||||||
color: var(--background);
|
color: var(--background);
|
||||||
padding: 5px;
|
padding: 4px;
|
||||||
|
margin-right: 5px;
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
font-size: 12px;
|
font-size: 12px;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
|
@ -76,11 +76,11 @@
|
|||||||
|
|
||||||
<footer>
|
<footer>
|
||||||
{% if ends.0 != "" %}
|
{% if ends.0 != "" %}
|
||||||
<a href="?sort={{ sort.0 }}&before={{ ends.0 }}">PREV</a>
|
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}">PREV</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if ends.1 != "" %}
|
{% if ends.1 != "" %}
|
||||||
<a href="?sort={{ sort.0 }}&after={{ ends.1 }}">NEXT</a>
|
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}">NEXT</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</footer>
|
</footer>
|
||||||
</div>
|
</div>
|
||||||
|
@ -86,11 +86,11 @@
|
|||||||
|
|
||||||
<footer>
|
<footer>
|
||||||
{% if ends.0 != "" %}
|
{% if ends.0 != "" %}
|
||||||
<a href="?sort={{ sort.0 }}&before={{ ends.0 }}">PREV</a>
|
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&before={{ ends.0 }}">PREV</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if ends.1 != "" %}
|
{% if ends.1 != "" %}
|
||||||
<a href="?sort={{ sort.0 }}&after={{ ends.1 }}">NEXT</a>
|
<a href="?sort={{ sort.0 }}&t={{ sort.1 }}&after={{ ends.1 }}">NEXT</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</footer>
|
</footer>
|
||||||
</div>
|
</div>
|
||||||
|
@ -28,5 +28,8 @@
|
|||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
|
|
||||||
{% macro render_flair(flair) -%}
|
{% macro render_flair(flair) -%}
|
||||||
{% for flair_part in flair %}{% if flair_part.flair_part_type == "emoji" %}<span class="emoji" style="background-image:url('{{ flair_part.value }}')"></span>{% else if flair_part.flair_part_type == "text" %}<span>{{ flair_part.value }}</span>{% endif %}{% endfor %}
|
{% for flair_part in flair %}
|
||||||
|
{% if flair_part.flair_part_type == "emoji" %}<span class="emoji" style="background-image:url('{{ flair_part.value }}')"></span>
|
||||||
|
{% else if flair_part.flair_part_type == "text" %}<span>{{ flair_part.value }}</span>{% endif %}
|
||||||
|
{% endfor %}
|
||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
|
Loading…
Reference in New Issue
Block a user