Move from Actix Web to Tide (#99)

* Initial commit

* Port posts

* Pinpoint Tide Bug

* Revert testing

* Add basic sub support

* Unwrap nested routes

* Front page & sync templates

* Port remaining functions

* Log request errors

* Clean main and settings

* Handle /w/ requests

* Create template() util

* Reduce caching time to 30s

* Fix subscription redirects

* Handle frontpage sorting
This commit is contained in:
Spike 2021-02-09 09:38:52 -08:00 committed by GitHub
parent 402b3149e1
commit ebbdd7185f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 1283 additions and 1189 deletions

1744
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -8,15 +8,15 @@ authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
edition = "2018" edition = "2018"
[dependencies] [dependencies]
tide = "0.16"
async-std = { version = "1", features = ["attributes"] }
surf = "2"
base64 = "0.13" base64 = "0.13"
actix-web = { version = "3.3", features = ["rustls"] } cached = "0.23"
futures = "0.3" futures = "0.3"
askama = "0.10" askama = "0.10"
ureq = "2" serde = { version = "1", features = ["derive"] }
serde = { version = "1.0", default_features = false, features = ["derive"] } serde_json = "1"
serde_json = "1.0"
async-recursion = "0.3" async-recursion = "0.3"
url = "2.2" regex = "1"
regex = "1.4"
time = "0.2" time = "0.2"
cached = "0.23"

View File

@ -1,4 +1,4 @@
edition = "2018" edition = "2018"
tab_spaces = 2 tab_spaces = 2
hard_tabs = true hard_tabs = true
max_width = 175 max_width = 150

View File

@ -1,9 +1,7 @@
// Import Crates // Import Crates
use actix_web::{ // use askama::filters::format;
dev::{Service, ServiceResponse}, use surf::utils::async_trait;
middleware, web, App, HttpResponse, HttpServer, use tide::{utils::After, Middleware, Next, Request, Response};
};
use futures::future::FutureExt;
// Reference local files // Reference local files
mod post; mod post;
@ -14,43 +12,103 @@ mod subreddit;
mod user; mod user;
mod utils; mod utils;
// Build middleware
struct HttpsRedirect<HttpsOnly>(HttpsOnly);
struct NormalizePath;
#[async_trait]
impl<State, HttpsOnly> Middleware<State> for HttpsRedirect<HttpsOnly>
where
State: Clone + Send + Sync + 'static,
HttpsOnly: Into<bool> + Copy + Send + Sync + 'static,
{
async fn handle(&self, request: Request<State>, next: Next<'_, State>) -> tide::Result {
let secure = request.url().scheme() == "https";
if self.0.into() && !secure {
let mut secured = request.url().to_owned();
secured.set_scheme("https").unwrap_or_default();
Ok(Response::builder(302).header("Location", secured.to_string()).build())
} else {
Ok(next.run(request).await)
}
}
}
#[async_trait]
impl<State: Clone + Send + Sync + 'static> Middleware<State> for NormalizePath {
async fn handle(&self, request: Request<State>, next: Next<'_, State>) -> tide::Result {
if !request.url().path().ends_with('/') {
Ok(Response::builder(301).header("Location", format!("{}/", request.url().path())).build())
} else {
Ok(next.run(request).await)
}
}
}
// Create Services // Create Services
async fn style() -> HttpResponse { async fn style(_req: Request<()>) -> tide::Result {
HttpResponse::Ok().content_type("text/css").body(include_str!("../static/style.css")) Ok(
Response::builder(200)
.content_type("text/css")
.body(include_str!("../static/style.css"))
.build(),
)
} }
// Required for creating a PWA // Required for creating a PWA
async fn manifest() -> HttpResponse { async fn manifest(_req: Request<()>) -> tide::Result {
HttpResponse::Ok().content_type("application/json").body(include_str!("../static/manifest.json")) Ok(
Response::builder(200)
.content_type("application/json")
.body(include_str!("../static/manifest.json"))
.build(),
)
} }
// Required for the manifest to be valid // Required for the manifest to be valid
async fn pwa_logo() -> HttpResponse { async fn pwa_logo(_req: Request<()>) -> tide::Result {
HttpResponse::Ok().content_type("image/png").body(include_bytes!("../static/logo.png").as_ref()) Ok(
Response::builder(200)
.content_type("image/png")
.body(include_bytes!("../static/logo.png").as_ref())
.build(),
)
} }
// Required for iOS App Icons // Required for iOS App Icons
async fn iphone_logo() -> HttpResponse { async fn iphone_logo(_req: Request<()>) -> tide::Result {
HttpResponse::Ok() Ok(
.content_type("image/png") Response::builder(200)
.body(include_bytes!("../static/touch-icon-iphone.png").as_ref()) .content_type("image/png")
.body(include_bytes!("../static/touch-icon-iphone.png").as_ref())
.build(),
)
} }
async fn robots() -> HttpResponse { async fn robots(_req: Request<()>) -> tide::Result {
HttpResponse::Ok() Ok(
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400") Response::builder(200)
.body("User-agent: *\nAllow: /") .content_type("text/plain")
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
.body("User-agent: *\nAllow: /")
.build(),
)
} }
async fn favicon() -> HttpResponse { async fn favicon(_req: Request<()>) -> tide::Result {
HttpResponse::Ok() Ok(
.content_type("image/x-icon") Response::builder(200)
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400") .content_type("image/vnd.microsoft.icon")
.body(include_bytes!("../static/favicon.ico").as_ref()) .header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
.body(include_bytes!("../static/favicon.ico").as_ref())
.build(),
)
} }
#[actix_web::main] #[async_std::main]
async fn main() -> std::io::Result<()> { async fn main() -> tide::Result<()> {
let mut address = "0.0.0.0:8080".to_string(); let mut address = "0.0.0.0:8080".to_string();
let mut force_https = false; let mut force_https = false;
@ -62,101 +120,96 @@ async fn main() -> std::io::Result<()> {
} }
} }
// start http server // Start HTTP server
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), &address); println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), &address);
HttpServer::new(move || { let mut app = tide::new();
App::new()
// Redirect to HTTPS if "--redirect-https" enabled // Redirect to HTTPS if "--redirect-https" enabled
.wrap_fn(move |req, srv| { app.with(HttpsRedirect(force_https));
let secure = req.connection_info().scheme() == "https";
let https_url = format!("https://{}{}", req.connection_info().host(), req.uri().to_string()); // Append trailing slash and remove double slashes
srv.call(req).map(move |res: Result<ServiceResponse, _>| { app.with(NormalizePath);
if force_https && !secure {
Ok(ServiceResponse::new( // Apply default headers for security
res.unwrap().request().to_owned(), app.with(After(|mut res: Response| async move {
HttpResponse::Found().header("Location", https_url).finish(), res.insert_header("Referrer-Policy", "no-referrer");
)) res.insert_header("X-Content-Type-Options", "nosniff");
} else { res.insert_header("X-Frame-Options", "DENY");
res res.insert_header(
} "Content-Security-Policy",
}) "default-src 'none'; manifest-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none';",
}) );
// Append trailing slash and remove double slashes Ok(res)
.wrap(middleware::NormalizePath::default()) }));
// Apply default headers for security
.wrap( // Read static files
middleware::DefaultHeaders::new() app.at("/style.css/").get(style);
.header("Referrer-Policy", "no-referrer") app.at("/favicon.ico/").get(favicon);
.header("X-Content-Type-Options", "nosniff") app.at("/robots.txt/").get(robots);
.header("X-Frame-Options", "DENY") app.at("/manifest.json/").get(manifest);
.header( app.at("/logo.png/").get(pwa_logo);
"Content-Security-Policy", app.at("/touch-icon-iphone.png/").get(iphone_logo);
"default-src 'none'; manifest-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none';",
), // Proxy media through Libreddit
) app.at("/proxy/*url/").get(proxy::handler);
// Default service in case no routes match
.default_service(web::get().to(|| utils::error("Nothing here".to_string()))) // Browse user profile
// Read static files app.at("/u/:name/").get(user::profile);
.route("/style.css/", web::get().to(style)) app.at("/u/:name/comments/:id/:title/").get(post::item);
.route("/favicon.ico/", web::get().to(favicon)) app.at("/u/:name/comments/:id/:title/:comment/").get(post::item);
.route("/robots.txt/", web::get().to(robots))
.route("/manifest.json/", web::get().to(manifest)) app.at("/user/:name/").get(user::profile);
.route("/logo.png/", web::get().to(pwa_logo)) app.at("/user/:name/comments/:id/:title/").get(post::item);
.route("/touch-icon-iphone.png/", web::get().to(iphone_logo)) app.at("/user/:name/comments/:id/:title/:comment/").get(post::item);
// Proxy media through Libreddit
.route("/proxy/{url:.*}/", web::get().to(proxy::handler)) // Configure settings
// Browse user profile app.at("/settings/").get(settings::get).post(settings::set);
.service(
web::scope("/{scope:user|u}").service( // Subreddit services
web::scope("/{username}").route("/", web::get().to(user::profile)).service( // See posts and info about subreddit
web::scope("/comments/{id}/{title}") app.at("/r/:sub/").get(subreddit::page);
.route("/", web::get().to(post::item)) // Handle subscribe/unsubscribe
.route("/{comment_id}/", web::get().to(post::item)), app.at("/r/:sub/subscribe/").post(subreddit::subscriptions);
), app.at("/r/:sub/unsubscribe/").post(subreddit::subscriptions);
), // View post on subreddit
) app.at("/r/:sub/comments/:id/:title/").get(post::item);
// Configure settings app.at("/r/:sub/comments/:id/:title/:comment_id/").get(post::item);
.service(web::resource("/settings/").route(web::get().to(settings::get)).route(web::post().to(settings::set))) // Search inside subreddit
// Subreddit services app.at("/r/:sub/search/").get(search::find);
.service( // View wiki of subreddit
web::scope("/r/{sub}") app.at("/r/:sub/w/").get(subreddit::wiki);
// See posts and info about subreddit app.at("/r/:sub/w/:page/").get(subreddit::wiki);
.route("/", web::get().to(subreddit::page)) app.at("/r/:sub/wiki/").get(subreddit::wiki);
.route("/{sort:hot|new|top|rising|controversial}/", web::get().to(subreddit::page)) app.at("/r/:sub/wiki/:page/").get(subreddit::wiki);
// Handle subscribe/unsubscribe // Sort subreddit posts
.route("/{action:subscribe|unsubscribe}/", web::post().to(subreddit::subscriptions)) app.at("/r/:sub/:sort/").get(subreddit::page);
// View post on subreddit
.service( // Front page
web::scope("/comments/{id}/{title}") app.at("/").get(subreddit::page);
.route("/", web::get().to(post::item))
.route("/{comment_id}/", web::get().to(post::item)), // View Reddit wiki
) app.at("/w/").get(subreddit::wiki);
// Search inside subreddit app.at("/w/:page/").get(subreddit::wiki);
.route("/search/", web::get().to(search::find)) app.at("/wiki/").get(subreddit::wiki);
// View wiki of subreddit app.at("/wiki/:page/").get(subreddit::wiki);
.service(
web::scope("/{scope:wiki|w}") // Search all of Reddit
.route("/", web::get().to(subreddit::wiki)) app.at("/search/").get(search::find);
.route("/{page}/", web::get().to(subreddit::wiki)),
), // Short link for post
) // .route("/{sort:best|hot|new|top|rising|controversial}/", web::get().to(subreddit::page))
// Front page // .route("/{id:.{5,6}}/", web::get().to(post::item))
.route("/", web::get().to(subreddit::page)) app.at("/:id/").get(|req: Request<()>| async {
.route("/{sort:best|hot|new|top|rising|controversial}/", web::get().to(subreddit::page)) match req.param("id").unwrap_or_default() {
// View Reddit wiki "best" | "hot" | "new" | "top" | "rising" | "controversial" => subreddit::page(req).await,
.service( _ => post::item(req).await,
web::scope("/wiki") }
.route("/", web::get().to(subreddit::wiki)) });
.route("/{page}/", web::get().to(subreddit::wiki)),
) // Default service in case no routes match
// Search all of Reddit app.at("*").get(|_| utils::error("Nothing here".to_string()));
.route("/search/", web::get().to(search::find))
// Short link for post app.listen("127.0.0.1:8080").await?;
.route("/{id:.{5,6}}/", web::get().to(post::item)) Ok(())
})
.bind(&address)
.unwrap_or_else(|e| panic!("Cannot bind to the address {}: {}", address, e))
.run()
.await
} }

View File

@ -1,6 +1,6 @@
// CRATES // CRATES
use crate::utils::*; use crate::utils::*;
use actix_web::{HttpRequest, HttpResponse}; use tide::Request;
use async_recursion::async_recursion; use async_recursion::async_recursion;
@ -16,9 +16,9 @@ struct PostTemplate {
prefs: Preferences, prefs: Preferences,
} }
pub async fn item(req: HttpRequest) -> HttpResponse { pub async fn item(req: Request<()>) -> tide::Result {
// Build Reddit API path // Build Reddit API path
let mut path: String = format!("{}.json?{}&raw_json=1", req.path(), req.query_string()); let mut path: String = format!("{}.json?{}&raw_json=1", req.url().path(), req.url().query().unwrap_or_default());
// Set sort to sort query parameter // Set sort to sort query parameter
let mut sort: String = param(&path, "sort"); let mut sort: String = param(&path, "sort");
@ -29,12 +29,17 @@ pub async fn item(req: HttpRequest) -> HttpResponse {
// If there's no sort query but there's a default sort, set sort to default_sort // If there's no sort query but there's a default sort, set sort to default_sort
if sort.is_empty() && !default_sort.is_empty() { if sort.is_empty() && !default_sort.is_empty() {
sort = default_sort; sort = default_sort;
path = format!("{}.json?{}&sort={}&raw_json=1", req.path(), req.query_string(), sort); path = format!(
"{}.json?{}&sort={}&raw_json=1",
req.url().path(),
req.url().query().unwrap_or_default(),
sort
);
} }
// Log the post ID being fetched in debug mode // Log the post ID being fetched in debug mode
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
dbg!(req.match_info().get("id").unwrap_or("")); dbg!(req.param("id").unwrap_or(""));
// Send a request to the url, receive JSON in response // Send a request to the url, receive JSON in response
match request(path).await { match request(path).await {
@ -45,15 +50,12 @@ pub async fn item(req: HttpRequest) -> HttpResponse {
let comments = parse_comments(&res[1]).await; let comments = parse_comments(&res[1]).await;
// Use the Post and Comment structs to generate a website to show users // Use the Post and Comment structs to generate a website to show users
let s = PostTemplate { template(PostTemplate {
comments, comments,
post, post,
sort, sort,
prefs: prefs(req), prefs: prefs(req),
} })
.render()
.unwrap();
HttpResponse::Ok().content_type("text/html").body(s)
} }
// If the Reddit API returns an error, exit and send error page to user // If the Reddit API returns an error, exit and send error page to user
Err(msg) => error(msg).await, Err(msg) => error(msg).await,

View File

@ -1,9 +1,8 @@
use actix_web::{client::Client, error, web, Error, HttpResponse, Result};
use url::Url;
use base64::decode; use base64::decode;
use surf::{Body, Url};
use tide::{Request, Response};
pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse> { pub async fn handler(req: Request<()>) -> tide::Result {
let domains = vec![ let domains = vec![
// THUMBNAILS // THUMBNAILS
"a.thumbs.redditmedia.com", "a.thumbs.redditmedia.com",
@ -21,27 +20,31 @@ pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse>
"v.redd.it", "v.redd.it",
]; ];
let decoded = decode(b64).map(|bytes| String::from_utf8(bytes).unwrap_or_default()); let decoded = decode(req.param("url").unwrap_or_default()).map(|bytes| String::from_utf8(bytes).unwrap_or_default());
match decoded { match decoded {
Ok(media) => match Url::parse(media.as_str()) { Ok(media) => match Url::parse(media.as_str()) {
Ok(url) => { Ok(url) => {
let domain = url.domain().unwrap_or_default(); if domains.contains(&url.domain().unwrap_or_default()) {
let http = surf::get(url).await.unwrap();
if domains.contains(&domain) { let content_length = http.header("Content-Length").map(|v| v.to_string()).unwrap_or_default();
Client::default().get(media.replace("&amp;", "&")).send().await.map_err(Error::from).map(|res| { let content_type = http.content_type().map(|m| m.to_string()).unwrap_or_default();
HttpResponse::build(res.status())
Ok(
Response::builder(http.status())
.body(Body::from_reader(http, None))
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400") .header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
.header("Content-Length", res.headers().get("Content-Length").unwrap().to_owned()) .header("Content-Length", content_length)
.header("Content-Type", res.headers().get("Content-Type").unwrap().to_owned()) .header("Content-Type", content_type)
.streaming(res) .build(),
}) )
} else { } else {
Err(error::ErrorForbidden("Resource must be from Reddit")) Err(tide::Error::from_str(403, "Resource must be from Reddit"))
} }
} }
_ => Err(error::ErrorBadRequest("Can't parse base64 into URL")), Err(_) => Err(tide::Error::from_str(400, "Can't parse base64 into URL")),
}, },
_ => Err(error::ErrorBadRequest("Can't decode base64")), Err(_) => Err(tide::Error::from_str(400, "Can't decode base64")),
} }
} }

View File

@ -1,7 +1,7 @@
// CRATES // CRATES
use crate::utils::{cookie, error, fetch_posts, param, prefs, request, val, Post, Preferences}; use crate::utils::{cookie, error, fetch_posts, param, prefs, request, template, val, Post, Preferences};
use actix_web::{HttpRequest, HttpResponse};
use askama::Template; use askama::Template;
use tide::Request;
// STRUCTS // STRUCTS
struct SearchParams { struct SearchParams {
@ -32,10 +32,10 @@ struct SearchTemplate {
} }
// SERVICES // SERVICES
pub async fn find(req: HttpRequest) -> HttpResponse { pub async fn find(req: Request<()>) -> tide::Result {
let nsfw_results = if cookie(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" }; let nsfw_results = if cookie(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" };
let path = format!("{}.json?{}{}", req.path(), req.query_string(), nsfw_results); let path = format!("{}.json?{}{}", req.url().path(), req.url().query().unwrap_or_default(), nsfw_results);
let sub = req.match_info().get("sub").unwrap_or("").to_string(); let sub = req.param("sub").unwrap_or("").to_string();
let sort = if param(&path, "sort").is_empty() { let sort = if param(&path, "sort").is_empty() {
"relevance".to_string() "relevance".to_string()
@ -50,24 +50,20 @@ pub async fn find(req: HttpRequest) -> HttpResponse {
}; };
match fetch_posts(&path, String::new()).await { match fetch_posts(&path, String::new()).await {
Ok((posts, after)) => HttpResponse::Ok().content_type("text/html").body( Ok((posts, after)) => template(SearchTemplate {
SearchTemplate { posts,
posts, subreddits,
subreddits, sub,
sub, params: SearchParams {
params: SearchParams { q: param(&path, "q"),
q: param(&path, "q"), sort,
sort, t: param(&path, "t"),
t: param(&path, "t"), before: param(&path, "after"),
before: param(&path, "after"), after,
after, restrict_sr: param(&path, "restrict_sr"),
restrict_sr: param(&path, "restrict_sr"), },
}, prefs: prefs(req),
prefs: prefs(req), }),
}
.render()
.unwrap(),
),
Err(msg) => error(msg).await, Err(msg) => error(msg).await,
} }
} }

View File

@ -1,7 +1,7 @@
// CRATES // CRATES
use crate::utils::{prefs, Preferences}; use crate::utils::{prefs, template, Preferences};
use actix_web::{cookie::Cookie, web::Form, HttpRequest, HttpResponse};
use askama::Template; use askama::Template;
use tide::{http::Cookie, Request, Response};
use time::{Duration, OffsetDateTime}; use time::{Duration, OffsetDateTime};
// STRUCTS // STRUCTS
@ -11,7 +11,7 @@ struct SettingsTemplate {
prefs: Preferences, prefs: Preferences,
} }
#[derive(serde::Deserialize)] #[derive(serde::Deserialize, Default)]
pub struct SettingsForm { pub struct SettingsForm {
theme: Option<String>, theme: Option<String>,
front_page: Option<String>, front_page: Option<String>,
@ -24,33 +24,35 @@ pub struct SettingsForm {
// FUNCTIONS // FUNCTIONS
// Retrieve cookies from request "Cookie" header // Retrieve cookies from request "Cookie" header
pub async fn get(req: HttpRequest) -> HttpResponse { pub async fn get(req: Request<()>) -> tide::Result {
let s = SettingsTemplate { prefs: prefs(req) }.render().unwrap(); template(SettingsTemplate { prefs: prefs(req) })
HttpResponse::Ok().content_type("text/html").body(s)
} }
// Set cookies using response "Set-Cookie" header // Set cookies using response "Set-Cookie" header
pub async fn set(_req: HttpRequest, form: Form<SettingsForm>) -> HttpResponse { pub async fn set(mut req: Request<()>) -> tide::Result {
let mut res = HttpResponse::Found(); let form: SettingsForm = req.body_form().await.unwrap_or_default();
let mut res = Response::builder(302)
.content_type("text/html")
.header("Location", "/settings")
.body(r#"Redirecting to <a href="/settings">settings</a>..."#)
.build();
let names = vec!["theme", "front_page", "layout", "wide", "comment_sort", "show_nsfw"]; let names = vec!["theme", "front_page", "layout", "wide", "comment_sort", "show_nsfw"];
let values = vec![&form.theme, &form.front_page, &form.layout, &form.wide, &form.comment_sort, &form.show_nsfw]; let values = vec![form.theme, form.front_page, form.layout, form.wide, form.comment_sort, form.show_nsfw];
for (i, name) in names.iter().enumerate() { for (i, name) in names.iter().enumerate() {
match values[i] { match values.get(i) {
Some(value) => res.cookie( Some(value) => res.insert_cookie(
Cookie::build(name.to_owned(), value) Cookie::build(name.to_owned(), value.to_owned().unwrap_or_default())
.path("/") .path("/")
.http_only(true) .http_only(true)
.expires(OffsetDateTime::now_utc() + Duration::weeks(52)) .expires(OffsetDateTime::now_utc() + Duration::weeks(52))
.finish(), .finish(),
), ),
None => res.del_cookie(&Cookie::named(name.to_owned())), None => res.remove_cookie(Cookie::named(name.to_owned())),
}; };
} }
res Ok(res)
.content_type("text/html")
.set_header("Location", "/settings")
.body(r#"Redirecting to <a href="/settings">settings</a>..."#)
} }

View File

@ -1,7 +1,7 @@
// CRATES // CRATES
use crate::utils::*; use crate::utils::*;
use actix_web::{cookie::Cookie, HttpRequest, HttpResponse, Result};
use askama::Template; use askama::Template;
use tide::{http::Cookie, Request, Response};
use time::{Duration, OffsetDateTime}; use time::{Duration, OffsetDateTime};
// STRUCTS // STRUCTS
@ -25,14 +25,14 @@ struct WikiTemplate {
} }
// SERVICES // SERVICES
pub async fn page(req: HttpRequest) -> HttpResponse { pub async fn page(req: Request<()>) -> tide::Result {
// Build Reddit API path
let subscribed = cookie(&req, "subscriptions"); let subscribed = cookie(&req, "subscriptions");
let front_page = cookie(&req, "front_page"); let front_page = cookie(&req, "front_page");
let sort = req.match_info().get("sort").unwrap_or("hot").to_string(); let sort = req.param("sort").unwrap_or_else(|_| req.param("id").unwrap_or("hot")).to_string();
let sub = req let sub = req
.match_info() .param("sub")
.get("sub")
.map(String::from) .map(String::from)
.unwrap_or(if front_page == "default" || front_page.is_empty() { .unwrap_or(if front_page == "default" || front_page.is_empty() {
if subscribed.is_empty() { if subscribed.is_empty() {
@ -44,7 +44,7 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
front_page.to_owned() front_page.to_owned()
}); });
let path = format!("/r/{}/{}.json?{}", sub, sort, req.query_string()); let path = format!("/r/{}/{}.json?{}&raw_json=1", sub, sort, req.url().query().unwrap_or_default());
match fetch_posts(&path, String::new()).await { match fetch_posts(&path, String::new()).await {
Ok((posts, after)) => { Ok((posts, after)) => {
@ -54,7 +54,7 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
subreddit(&sub).await.unwrap_or_default() subreddit(&sub).await.unwrap_or_default()
} else if sub == subscribed { } else if sub == subscribed {
// Subscription feed // Subscription feed
if req.path().starts_with("/r/") { if req.url().path().starts_with("/r/") {
subreddit(&sub).await.unwrap_or_default() subreddit(&sub).await.unwrap_or_default()
} else { } else {
Subreddit::default() Subreddit::default()
@ -69,42 +69,55 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
Subreddit::default() Subreddit::default()
}; };
let s = SubredditTemplate { template(SubredditTemplate {
sub, sub,
posts, posts,
sort: (sort, param(&path, "t")), sort: (sort, param(&path, "t")),
ends: (param(&path, "after"), after), ends: (param(&path, "after"), after),
prefs: prefs(req), prefs: prefs(req),
} })
.render()
.unwrap();
HttpResponse::Ok().content_type("text/html").body(s)
} }
Err(msg) => error(msg).await, Err(msg) => error(msg).await,
} }
} }
// Sub or unsub by setting subscription cookie using response "Set-Cookie" header // Sub or unsub by setting subscription cookie using response "Set-Cookie" header
pub async fn subscriptions(req: HttpRequest) -> HttpResponse { pub async fn subscriptions(req: Request<()>) -> tide::Result {
let mut res = HttpResponse::Found(); let sub = req.param("sub").unwrap_or_default().to_string();
let query = req.url().query().unwrap_or_default().to_string();
let action: Vec<String> = req.url().path().split('/').map(String::from).collect();
let sub = req.match_info().get("sub").unwrap_or_default().to_string(); let mut sub_list = prefs(req).subs;
let action = req.match_info().get("action").unwrap_or_default().to_string();
let mut sub_list = prefs(req.to_owned()).subs;
// Modify sub list based on action // Modify sub list based on action
if action == "subscribe" && !sub_list.contains(&sub) { if action.contains(&"subscribe".to_string()) && !sub_list.contains(&sub) {
sub_list.push(sub.to_owned()); sub_list.push(sub.to_owned());
sub_list.sort_by_key(|a| a.to_lowercase()); sub_list.sort_by_key(|a| a.to_lowercase())
} else if action == "unsubscribe" { } else if action.contains(&"unsubscribe".to_string()) {
sub_list.retain(|s| s != &sub); sub_list.retain(|s| s != &sub);
} }
// Redirect back to subreddit
// check for redirect parameter if unsubscribing from outside sidebar
let redirect_path = param(format!("/?{}", query).as_str(), "redirect");
let path = if !redirect_path.is_empty() {
format!("/{}/", redirect_path)
} else {
format!("/r/{}", sub)
};
let mut res = Response::builder(302)
.content_type("text/html")
.header("Location", path.to_owned())
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path))
.build();
// Delete cookie if empty, else set // Delete cookie if empty, else set
if sub_list.is_empty() { if sub_list.is_empty() {
res.del_cookie(&Cookie::build("subscriptions", "").path("/").finish()); // res.del_cookie(&Cookie::build("subscriptions", "").path("/").finish());
res.remove_cookie(Cookie::build("subscriptions", "").path("/").finish());
} else { } else {
res.cookie( res.insert_cookie(
Cookie::build("subscriptions", sub_list.join("+")) Cookie::build("subscriptions", sub_list.join("+"))
.path("/") .path("/")
.http_only(true) .http_only(true)
@ -113,38 +126,21 @@ pub async fn subscriptions(req: HttpRequest) -> HttpResponse {
); );
} }
// Redirect back to subreddit Ok(res)
// check for redirect parameter if unsubscribing from outside sidebar
let redirect_path = param(&req.uri().to_string(), "redirect");
let path = if !redirect_path.is_empty() && redirect_path.starts_with('/') {
redirect_path
} else {
format!("/r/{}", sub)
};
res
.content_type("text/html")
.set_header("Location", path.to_owned())
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path))
} }
pub async fn wiki(req: HttpRequest) -> HttpResponse { pub async fn wiki(req: Request<()>) -> tide::Result {
let sub = req.match_info().get("sub").unwrap_or("reddit.com").to_string(); let sub = req.param("sub").unwrap_or("reddit.com").to_string();
let page = req.match_info().get("page").unwrap_or("index").to_string(); let page = req.param("page").unwrap_or("index").to_string();
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page); let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
match request(path).await { match request(path).await {
Ok(res) => { Ok(res) => template(WikiTemplate {
let s = WikiTemplate { sub,
sub, wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()),
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()), page,
page, prefs: prefs(req),
prefs: prefs(req), }),
}
.render()
.unwrap();
HttpResponse::Ok().content_type("text/html").body(s)
}
Err(msg) => error(msg).await, Err(msg) => error(msg).await,
} }
} }
@ -163,8 +159,14 @@ async fn subreddit(sub: &str) -> Result<Subreddit, String> {
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64; let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
// Fetch subreddit icon either from the community_icon or icon_img value // Fetch subreddit icon either from the community_icon or icon_img value
let community_icon: &str = res["data"]["community_icon"].as_str().map_or("", |s| s.split('?').collect::<Vec<&str>>()[0]); let community_icon: &str = res["data"]["community_icon"]
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() }; .as_str()
.map_or("", |s| s.split('?').collect::<Vec<&str>>()[0]);
let icon = if community_icon.is_empty() {
val(&res, "icon_img")
} else {
community_icon.to_string()
};
let sub = Subreddit { let sub = Subreddit {
name: val(&res, "display_name"), name: val(&res, "display_name"),

View File

@ -1,7 +1,7 @@
// CRATES // CRATES
use crate::utils::{error, fetch_posts, format_url, param, prefs, request, Post, Preferences, User}; use crate::utils::*;
use actix_web::{HttpRequest, HttpResponse, Result};
use askama::Template; use askama::Template;
use tide::Request;
use time::OffsetDateTime; use time::OffsetDateTime;
// STRUCTS // STRUCTS
@ -16,13 +16,13 @@ struct UserTemplate {
} }
// FUNCTIONS // FUNCTIONS
pub async fn profile(req: HttpRequest) -> HttpResponse { pub async fn profile(req: Request<()>) -> tide::Result {
// Build the Reddit JSON API path // Build the Reddit JSON API path
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string()); let path = format!("{}.json?{}&raw_json=1", req.url().path(), req.url().query().unwrap_or_default());
// Retrieve other variables from Libreddit request // Retrieve other variables from Libreddit request
let sort = param(&path, "sort"); let sort = param(&path, "sort");
let username = req.match_info().get("username").unwrap_or("").to_string(); let username = req.param("name").unwrap_or("").to_string();
// Request user posts/comments from Reddit // Request user posts/comments from Reddit
let posts = fetch_posts(&path, "Comment".to_string()).await; let posts = fetch_posts(&path, "Comment".to_string()).await;
@ -32,16 +32,13 @@ pub async fn profile(req: HttpRequest) -> HttpResponse {
// If you can get user posts, also request user data // If you can get user posts, also request user data
let user = user(&username).await.unwrap_or_default(); let user = user(&username).await.unwrap_or_default();
let s = UserTemplate { template(UserTemplate {
user, user,
posts, posts,
sort: (sort, param(&path, "t")), sort: (sort, param(&path, "t")),
ends: (param(&path, "after"), after), ends: (param(&path, "after"), after),
prefs: prefs(req), prefs: prefs(req),
} })
.render()
.unwrap();
HttpResponse::Ok().content_type("text/html").body(s)
} }
// If there is an error show error page // If there is an error show error page
Err(msg) => error(msg).await, Err(msg) => error(msg).await,
@ -51,7 +48,7 @@ pub async fn profile(req: HttpRequest) -> HttpResponse {
// USER // USER
async fn user(name: &str) -> Result<User, String> { async fn user(name: &str) -> Result<User, String> {
// Build the Reddit JSON API path // Build the Reddit JSON API path
let path: String = format!("/user/{}/about.json", name); let path: String = format!("/user/{}/about.json?raw_json=1", name);
// Send a request to the url // Send a request to the url
match request(path).await { match request(path).await {

View File

@ -1,15 +1,14 @@
// //
// CRATES // CRATES
// //
use actix_web::{cookie::Cookie, HttpRequest, HttpResponse, Result};
use askama::Template; use askama::Template;
use base64::encode; use base64::encode;
use cached::proc_macro::cached; use cached::proc_macro::cached;
use regex::Regex; use regex::Regex;
use serde_json::{from_str, Value}; use serde_json::{from_str, Value};
use std::collections::HashMap; use std::collections::HashMap;
use tide::{http::url::Url, http::Cookie, Request, Response};
use time::{Duration, OffsetDateTime}; use time::{Duration, OffsetDateTime};
use url::Url;
// //
// STRUCTS // STRUCTS
@ -147,7 +146,7 @@ pub struct Preferences {
// //
// Build preferences from cookies // Build preferences from cookies
pub fn prefs(req: HttpRequest) -> Preferences { pub fn prefs(req: Request<()>) -> Preferences {
Preferences { Preferences {
theme: cookie(&req, "theme"), theme: cookie(&req, "theme"),
front_page: cookie(&req, "front_page"), front_page: cookie(&req, "front_page"),
@ -155,21 +154,32 @@ pub fn prefs(req: HttpRequest) -> Preferences {
wide: cookie(&req, "wide"), wide: cookie(&req, "wide"),
show_nsfw: cookie(&req, "show_nsfw"), show_nsfw: cookie(&req, "show_nsfw"),
comment_sort: cookie(&req, "comment_sort"), comment_sort: cookie(&req, "comment_sort"),
subs: cookie(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), subs: cookie(&req, "subscriptions")
.split('+')
.map(String::from)
.filter(|s| !s.is_empty())
.collect(),
} }
} }
// Grab a query param from a url // Grab a query param from a url
pub fn param(path: &str, value: &str) -> String { pub fn param(path: &str, value: &str) -> String {
match Url::parse(format!("https://libredd.it/{}", path).as_str()) { match Url::parse(format!("https://libredd.it/{}", path).as_str()) {
Ok(url) => url.query_pairs().into_owned().collect::<HashMap<_, _>>().get(value).unwrap_or(&String::new()).to_owned(), Ok(url) => url
.query_pairs()
.into_owned()
.collect::<HashMap<_, _>>()
.get(value)
.unwrap_or(&String::new())
.to_owned(),
_ => String::new(), _ => String::new(),
} }
} }
// Parse Cookie value from request // Parse Cookie value from request
pub fn cookie(req: &HttpRequest, name: &str) -> String { pub fn cookie(req: &Request<()>, name: &str) -> String {
actix_web::HttpMessage::cookie(req, name).unwrap_or_else(|| Cookie::new(name, "")).value().to_string() let cookie = req.cookie(name).unwrap_or_else(|| Cookie::named(name));
cookie.value().to_string()
} }
// Direct urls to proxy if proxy is enabled // Direct urls to proxy if proxy is enabled
@ -177,7 +187,7 @@ pub fn format_url(url: &str) -> String {
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" { if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
String::new() String::new()
} else { } else {
format!("/proxy/{}", encode(url).as_str()) format!("/proxy/{}/", encode(url).as_str())
} }
} }
@ -420,102 +430,57 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
// NETWORKING // NETWORKING
// //
pub async fn error(msg: String) -> HttpResponse { pub fn template(f: impl Template) -> tide::Result {
Ok(
Response::builder(200)
.content_type("text/html")
.body(f.render().unwrap_or_default())
.build(),
)
}
pub async fn error(msg: String) -> tide::Result {
let body = ErrorTemplate { let body = ErrorTemplate {
msg, msg,
prefs: Preferences::default(), prefs: Preferences::default(),
} }
.render() .render()
.unwrap_or_default(); .unwrap_or_default();
HttpResponse::NotFound().content_type("text/html").body(body)
Ok(Response::builder(404).content_type("text/html").body(body).build())
} }
// Make a request to a Reddit API and parse the JSON response // Make a request to a Reddit API and parse the JSON response
#[cached(size = 100, time = 30, result = true)] #[cached(size = 100, time = 30, result = true)]
pub async fn request(path: String) -> Result<Value, String> { pub async fn request(path: String) -> Result<Value, String> {
let url = format!("https://www.reddit.com{}", path); let url = format!("https://www.reddit.com{}", path);
// Build reddit-compliant user agent for Libreddit
let user_agent = format!("web:libreddit:{}", env!("CARGO_PKG_VERSION")); let user_agent = format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"));
// Send request using awc // Send request using surf
// async fn send(url: &str) -> Result<String, (bool, String)> { let req = surf::get(&url).header("User-Agent", user_agent.as_str());
// let client = actix_web::client::Client::default(); let client = surf::client().with(surf::middleware::Redirect::new(5));
// let response = client.get(url).header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"))).send().await;
// match response { let res = client.send(req).await;
// Ok(mut payload) => {
// // Get first number of response HTTP status code
// match payload.status().to_string().chars().next() {
// // If success
// Some('2') => Ok(String::from_utf8(payload.body().limit(20_000_000).await.unwrap_or_default().to_vec()).unwrap_or_default()),
// // If redirection
// Some('3') => match payload.headers().get("location") {
// Some(location) => Err((true, location.to_str().unwrap_or_default().to_string())),
// None => Err((false, "Page not found".to_string())),
// },
// // Otherwise
// _ => Err((false, "Page not found".to_string())),
// }
// }
// Err(e) => { dbg!(e); Err((false, "Couldn't send request to Reddit, this instance may be being rate-limited. Try another.".to_string())) },
// }
// }
// // Print error if debugging then return error based on error message let body = res.unwrap().take_body().into_string().await;
// fn err(url: String, msg: String) -> Result<Value, String> {
// // #[cfg(debug_assertions)]
// dbg!(format!("{} - {}", url, msg));
// Err(msg)
// };
// // Parse JSON from body. If parsing fails, return error match body {
// fn json(url: String, body: String) -> Result<Value, String> {
// match from_str(body.as_str()) {
// Ok(json) => Ok(json),
// Err(_) => err(url, "Failed to parse page JSON data".to_string()),
// }
// }
// // Make request to Reddit using send function
// match send(&url).await {
// // If success, parse and return body
// Ok(body) => json(url, body),
// // Follow any redirects
// Err((true, location)) => match send(location.as_str()).await {
// // If success, parse and return body
// Ok(body) => json(url, body),
// // Follow any redirects again
// Err((true, location)) => err(url, location),
// // Return errors if request fails
// Err((_, msg)) => err(url, msg),
// },
// // Return errors if request fails
// Err((_, msg)) => err(url, msg),
// }
// Send request using ureq
match ureq::get(&url).set("User-Agent", user_agent.as_str()).call() {
// If response is success // If response is success
Ok(response) => { Ok(response) => {
// Parse the response from Reddit as JSON // Parse the response from Reddit as JSON
let json_string = &response.into_string().unwrap_or_default(); match from_str(&response) {
match from_str(json_string) {
Ok(json) => Ok(json), Ok(json) => Ok(json),
Err(e) => { Err(e) => {
println!("{} - Failed to parse page JSON data: {} - {}", url, e, json_string); println!("{} - Failed to parse page JSON data: {}", url, e);
Err("Failed to parse page JSON data".to_string()) Err("Failed to parse page JSON data".to_string())
} }
} }
} }
// If response is error
Err(ureq::Error::Status(_, _)) => {
#[cfg(debug_assertions)]
dbg!(format!("{} - Page not found", url));
Err("Page not found".to_string())
}
// If failed to send request // If failed to send request
Err(e) => { Err(e) => {
println!("{} - Couldn't send request to Reddit: {}", url, e); println!("{} - Couldn't send request to Reddit: {}", url, e);
Err("Couldn't send request to Reddit, this instance may be being rate-limited. Try another.".to_string()) Err("Couldn't send request to Reddit".to_string())
} }
} }
} }

View File

@ -1,15 +1,15 @@
{ {
"name": "Libreddit", "name": "Libreddit",
"short_name": "Libreddit", "short_name": "Libreddit",
"display": "standalone", "display": "standalone",
"background_color": "#1F1F1F", "background_color": "#1F1F1F",
"description": "An alternative private front-end to Reddit", "description": "An alternative private front-end to Reddit",
"theme_color": "#1F1F1F", "theme_color": "#1F1F1F",
"icons": [ "icons": [
{ {
"src": "./logo.png", "src": "./logo.png",
"sizes": "512x512", "sizes": "512x512",
"type": "image/png" "type": "image/png"
} }
] ]
} }

View File

@ -15,11 +15,11 @@
<!-- Android --> <!-- Android -->
<meta name="mobile-web-app-capable" content="yes"> <meta name="mobile-web-app-capable" content="yes">
<!-- iOS Logo --> <!-- iOS Logo -->
<link href="/touch-icon-iphone.png" rel="apple-touch-icon"> <link href="/touch-icon-iphone.png/" rel="apple-touch-icon">
<!-- PWA Manifest --> <!-- PWA Manifest -->
<link rel="manifest" type="application/json" href="/manifest.json"> <link rel="manifest" type="application/json" href="/manifest.json/">
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico"> <link rel="shortcut icon" type="image/x-icon" href="/favicon.ico/">
<link rel="stylesheet" type="text/css" href="/style.css"> <link rel="stylesheet" type="text/css" href="/style.css/">
{% endblock %} {% endblock %}
</head> </head>
<body class=" <body class="

View File

@ -9,7 +9,7 @@
{% block content %} {% block content %}
<div id="settings"> <div id="settings">
<form action="/settings" method="POST"> <form action="/settings/" method="POST">
<div class="prefs"> <div class="prefs">
<p>Appearance</p> <p>Appearance</p>
<div id="theme"> <div id="theme">
@ -57,7 +57,7 @@
{% for sub in prefs.subs %} {% for sub in prefs.subs %}
<li> <li>
<span>{{ sub }}</span> <span>{{ sub }}</span>
<form action="/r/{{ sub }}/unsubscribe/?redirect=/settings" method="POST"> <form action="/r/{{ sub }}/unsubscribe/?redirect=settings" method="POST">
<button class="unsubscribe">Unsubscribe</button> <button class="unsubscribe">Unsubscribe</button>
</form> </form>
</li> </li>

View File

@ -80,11 +80,11 @@
</div> </div>
<div id="sub_subscription"> <div id="sub_subscription">
{% if prefs.subs.contains(sub.name) %} {% if prefs.subs.contains(sub.name) %}
<form action="/r/{{ sub.name }}/unsubscribe" method="POST"> <form action="/r/{{ sub.name }}/unsubscribe/" method="POST">
<button class="unsubscribe">Unsubscribe</button> <button class="unsubscribe">Unsubscribe</button>
</form> </form>
{% else %} {% else %}
<form action="/r/{{ sub.name }}/subscribe" method="POST"> <form action="/r/{{ sub.name }}/subscribe/" method="POST">
<button class="subscribe">Subscribe</button> <button class="subscribe">Subscribe</button>
</form> </form>
{% endif %} {% endif %}