Move from Actix Web to Tide (#99)
* Initial commit * Port posts * Pinpoint Tide Bug * Revert testing * Add basic sub support * Unwrap nested routes * Front page & sync templates * Port remaining functions * Log request errors * Clean main and settings * Handle /w/ requests * Create template() util * Reduce caching time to 30s * Fix subscription redirects * Handle frontpage sorting
This commit is contained in:
parent
402b3149e1
commit
ebbdd7185f
1742
Cargo.lock
generated
1742
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
14
Cargo.toml
14
Cargo.toml
@ -8,15 +8,15 @@ authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
tide = "0.16"
|
||||
async-std = { version = "1", features = ["attributes"] }
|
||||
surf = "2"
|
||||
base64 = "0.13"
|
||||
actix-web = { version = "3.3", features = ["rustls"] }
|
||||
cached = "0.23"
|
||||
futures = "0.3"
|
||||
askama = "0.10"
|
||||
ureq = "2"
|
||||
serde = { version = "1.0", default_features = false, features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
async-recursion = "0.3"
|
||||
url = "2.2"
|
||||
regex = "1.4"
|
||||
regex = "1"
|
||||
time = "0.2"
|
||||
cached = "0.23"
|
@ -1,4 +1,4 @@
|
||||
edition = "2018"
|
||||
tab_spaces = 2
|
||||
hard_tabs = true
|
||||
max_width = 175
|
||||
max_width = 150
|
295
src/main.rs
295
src/main.rs
@ -1,9 +1,7 @@
|
||||
// Import Crates
|
||||
use actix_web::{
|
||||
dev::{Service, ServiceResponse},
|
||||
middleware, web, App, HttpResponse, HttpServer,
|
||||
};
|
||||
use futures::future::FutureExt;
|
||||
// use askama::filters::format;
|
||||
use surf::utils::async_trait;
|
||||
use tide::{utils::After, Middleware, Next, Request, Response};
|
||||
|
||||
// Reference local files
|
||||
mod post;
|
||||
@ -14,43 +12,103 @@ mod subreddit;
|
||||
mod user;
|
||||
mod utils;
|
||||
|
||||
// Build middleware
|
||||
struct HttpsRedirect<HttpsOnly>(HttpsOnly);
|
||||
struct NormalizePath;
|
||||
|
||||
#[async_trait]
|
||||
impl<State, HttpsOnly> Middleware<State> for HttpsRedirect<HttpsOnly>
|
||||
where
|
||||
State: Clone + Send + Sync + 'static,
|
||||
HttpsOnly: Into<bool> + Copy + Send + Sync + 'static,
|
||||
{
|
||||
async fn handle(&self, request: Request<State>, next: Next<'_, State>) -> tide::Result {
|
||||
let secure = request.url().scheme() == "https";
|
||||
|
||||
if self.0.into() && !secure {
|
||||
let mut secured = request.url().to_owned();
|
||||
secured.set_scheme("https").unwrap_or_default();
|
||||
|
||||
Ok(Response::builder(302).header("Location", secured.to_string()).build())
|
||||
} else {
|
||||
Ok(next.run(request).await)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<State: Clone + Send + Sync + 'static> Middleware<State> for NormalizePath {
|
||||
async fn handle(&self, request: Request<State>, next: Next<'_, State>) -> tide::Result {
|
||||
if !request.url().path().ends_with('/') {
|
||||
Ok(Response::builder(301).header("Location", format!("{}/", request.url().path())).build())
|
||||
} else {
|
||||
Ok(next.run(request).await)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create Services
|
||||
async fn style() -> HttpResponse {
|
||||
HttpResponse::Ok().content_type("text/css").body(include_str!("../static/style.css"))
|
||||
async fn style(_req: Request<()>) -> tide::Result {
|
||||
Ok(
|
||||
Response::builder(200)
|
||||
.content_type("text/css")
|
||||
.body(include_str!("../static/style.css"))
|
||||
.build(),
|
||||
)
|
||||
}
|
||||
|
||||
// Required for creating a PWA
|
||||
async fn manifest() -> HttpResponse {
|
||||
HttpResponse::Ok().content_type("application/json").body(include_str!("../static/manifest.json"))
|
||||
async fn manifest(_req: Request<()>) -> tide::Result {
|
||||
Ok(
|
||||
Response::builder(200)
|
||||
.content_type("application/json")
|
||||
.body(include_str!("../static/manifest.json"))
|
||||
.build(),
|
||||
)
|
||||
}
|
||||
|
||||
// Required for the manifest to be valid
|
||||
async fn pwa_logo() -> HttpResponse {
|
||||
HttpResponse::Ok().content_type("image/png").body(include_bytes!("../static/logo.png").as_ref())
|
||||
async fn pwa_logo(_req: Request<()>) -> tide::Result {
|
||||
Ok(
|
||||
Response::builder(200)
|
||||
.content_type("image/png")
|
||||
.body(include_bytes!("../static/logo.png").as_ref())
|
||||
.build(),
|
||||
)
|
||||
}
|
||||
|
||||
// Required for iOS App Icons
|
||||
async fn iphone_logo() -> HttpResponse {
|
||||
HttpResponse::Ok()
|
||||
.content_type("image/png")
|
||||
.body(include_bytes!("../static/touch-icon-iphone.png").as_ref())
|
||||
async fn iphone_logo(_req: Request<()>) -> tide::Result {
|
||||
Ok(
|
||||
Response::builder(200)
|
||||
.content_type("image/png")
|
||||
.body(include_bytes!("../static/touch-icon-iphone.png").as_ref())
|
||||
.build(),
|
||||
)
|
||||
}
|
||||
|
||||
async fn robots() -> HttpResponse {
|
||||
HttpResponse::Ok()
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body("User-agent: *\nAllow: /")
|
||||
async fn robots(_req: Request<()>) -> tide::Result {
|
||||
Ok(
|
||||
Response::builder(200)
|
||||
.content_type("text/plain")
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body("User-agent: *\nAllow: /")
|
||||
.build(),
|
||||
)
|
||||
}
|
||||
|
||||
async fn favicon() -> HttpResponse {
|
||||
HttpResponse::Ok()
|
||||
.content_type("image/x-icon")
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body(include_bytes!("../static/favicon.ico").as_ref())
|
||||
async fn favicon(_req: Request<()>) -> tide::Result {
|
||||
Ok(
|
||||
Response::builder(200)
|
||||
.content_type("image/vnd.microsoft.icon")
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body(include_bytes!("../static/favicon.ico").as_ref())
|
||||
.build(),
|
||||
)
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
#[async_std::main]
|
||||
async fn main() -> tide::Result<()> {
|
||||
let mut address = "0.0.0.0:8080".to_string();
|
||||
let mut force_https = false;
|
||||
|
||||
@ -62,101 +120,96 @@ async fn main() -> std::io::Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
// start http server
|
||||
// Start HTTP server
|
||||
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), &address);
|
||||
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
// Redirect to HTTPS if "--redirect-https" enabled
|
||||
.wrap_fn(move |req, srv| {
|
||||
let secure = req.connection_info().scheme() == "https";
|
||||
let https_url = format!("https://{}{}", req.connection_info().host(), req.uri().to_string());
|
||||
srv.call(req).map(move |res: Result<ServiceResponse, _>| {
|
||||
if force_https && !secure {
|
||||
Ok(ServiceResponse::new(
|
||||
res.unwrap().request().to_owned(),
|
||||
HttpResponse::Found().header("Location", https_url).finish(),
|
||||
))
|
||||
} else {
|
||||
res
|
||||
}
|
||||
})
|
||||
})
|
||||
// Append trailing slash and remove double slashes
|
||||
.wrap(middleware::NormalizePath::default())
|
||||
// Apply default headers for security
|
||||
.wrap(
|
||||
middleware::DefaultHeaders::new()
|
||||
.header("Referrer-Policy", "no-referrer")
|
||||
.header("X-Content-Type-Options", "nosniff")
|
||||
.header("X-Frame-Options", "DENY")
|
||||
.header(
|
||||
"Content-Security-Policy",
|
||||
"default-src 'none'; manifest-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none';",
|
||||
),
|
||||
)
|
||||
// Default service in case no routes match
|
||||
.default_service(web::get().to(|| utils::error("Nothing here".to_string())))
|
||||
// Read static files
|
||||
.route("/style.css/", web::get().to(style))
|
||||
.route("/favicon.ico/", web::get().to(favicon))
|
||||
.route("/robots.txt/", web::get().to(robots))
|
||||
.route("/manifest.json/", web::get().to(manifest))
|
||||
.route("/logo.png/", web::get().to(pwa_logo))
|
||||
.route("/touch-icon-iphone.png/", web::get().to(iphone_logo))
|
||||
// Proxy media through Libreddit
|
||||
.route("/proxy/{url:.*}/", web::get().to(proxy::handler))
|
||||
// Browse user profile
|
||||
.service(
|
||||
web::scope("/{scope:user|u}").service(
|
||||
web::scope("/{username}").route("/", web::get().to(user::profile)).service(
|
||||
web::scope("/comments/{id}/{title}")
|
||||
.route("/", web::get().to(post::item))
|
||||
.route("/{comment_id}/", web::get().to(post::item)),
|
||||
),
|
||||
),
|
||||
)
|
||||
// Configure settings
|
||||
.service(web::resource("/settings/").route(web::get().to(settings::get)).route(web::post().to(settings::set)))
|
||||
// Subreddit services
|
||||
.service(
|
||||
web::scope("/r/{sub}")
|
||||
// See posts and info about subreddit
|
||||
.route("/", web::get().to(subreddit::page))
|
||||
.route("/{sort:hot|new|top|rising|controversial}/", web::get().to(subreddit::page))
|
||||
// Handle subscribe/unsubscribe
|
||||
.route("/{action:subscribe|unsubscribe}/", web::post().to(subreddit::subscriptions))
|
||||
// View post on subreddit
|
||||
.service(
|
||||
web::scope("/comments/{id}/{title}")
|
||||
.route("/", web::get().to(post::item))
|
||||
.route("/{comment_id}/", web::get().to(post::item)),
|
||||
)
|
||||
// Search inside subreddit
|
||||
.route("/search/", web::get().to(search::find))
|
||||
// View wiki of subreddit
|
||||
.service(
|
||||
web::scope("/{scope:wiki|w}")
|
||||
.route("/", web::get().to(subreddit::wiki))
|
||||
.route("/{page}/", web::get().to(subreddit::wiki)),
|
||||
),
|
||||
)
|
||||
// Front page
|
||||
.route("/", web::get().to(subreddit::page))
|
||||
.route("/{sort:best|hot|new|top|rising|controversial}/", web::get().to(subreddit::page))
|
||||
// View Reddit wiki
|
||||
.service(
|
||||
web::scope("/wiki")
|
||||
.route("/", web::get().to(subreddit::wiki))
|
||||
.route("/{page}/", web::get().to(subreddit::wiki)),
|
||||
)
|
||||
// Search all of Reddit
|
||||
.route("/search/", web::get().to(search::find))
|
||||
// Short link for post
|
||||
.route("/{id:.{5,6}}/", web::get().to(post::item))
|
||||
})
|
||||
.bind(&address)
|
||||
.unwrap_or_else(|e| panic!("Cannot bind to the address {}: {}", address, e))
|
||||
.run()
|
||||
.await
|
||||
let mut app = tide::new();
|
||||
|
||||
// Redirect to HTTPS if "--redirect-https" enabled
|
||||
app.with(HttpsRedirect(force_https));
|
||||
|
||||
// Append trailing slash and remove double slashes
|
||||
app.with(NormalizePath);
|
||||
|
||||
// Apply default headers for security
|
||||
app.with(After(|mut res: Response| async move {
|
||||
res.insert_header("Referrer-Policy", "no-referrer");
|
||||
res.insert_header("X-Content-Type-Options", "nosniff");
|
||||
res.insert_header("X-Frame-Options", "DENY");
|
||||
res.insert_header(
|
||||
"Content-Security-Policy",
|
||||
"default-src 'none'; manifest-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none';",
|
||||
);
|
||||
Ok(res)
|
||||
}));
|
||||
|
||||
// Read static files
|
||||
app.at("/style.css/").get(style);
|
||||
app.at("/favicon.ico/").get(favicon);
|
||||
app.at("/robots.txt/").get(robots);
|
||||
app.at("/manifest.json/").get(manifest);
|
||||
app.at("/logo.png/").get(pwa_logo);
|
||||
app.at("/touch-icon-iphone.png/").get(iphone_logo);
|
||||
|
||||
// Proxy media through Libreddit
|
||||
app.at("/proxy/*url/").get(proxy::handler);
|
||||
|
||||
// Browse user profile
|
||||
app.at("/u/:name/").get(user::profile);
|
||||
app.at("/u/:name/comments/:id/:title/").get(post::item);
|
||||
app.at("/u/:name/comments/:id/:title/:comment/").get(post::item);
|
||||
|
||||
app.at("/user/:name/").get(user::profile);
|
||||
app.at("/user/:name/comments/:id/:title/").get(post::item);
|
||||
app.at("/user/:name/comments/:id/:title/:comment/").get(post::item);
|
||||
|
||||
// Configure settings
|
||||
app.at("/settings/").get(settings::get).post(settings::set);
|
||||
|
||||
// Subreddit services
|
||||
// See posts and info about subreddit
|
||||
app.at("/r/:sub/").get(subreddit::page);
|
||||
// Handle subscribe/unsubscribe
|
||||
app.at("/r/:sub/subscribe/").post(subreddit::subscriptions);
|
||||
app.at("/r/:sub/unsubscribe/").post(subreddit::subscriptions);
|
||||
// View post on subreddit
|
||||
app.at("/r/:sub/comments/:id/:title/").get(post::item);
|
||||
app.at("/r/:sub/comments/:id/:title/:comment_id/").get(post::item);
|
||||
// Search inside subreddit
|
||||
app.at("/r/:sub/search/").get(search::find);
|
||||
// View wiki of subreddit
|
||||
app.at("/r/:sub/w/").get(subreddit::wiki);
|
||||
app.at("/r/:sub/w/:page/").get(subreddit::wiki);
|
||||
app.at("/r/:sub/wiki/").get(subreddit::wiki);
|
||||
app.at("/r/:sub/wiki/:page/").get(subreddit::wiki);
|
||||
// Sort subreddit posts
|
||||
app.at("/r/:sub/:sort/").get(subreddit::page);
|
||||
|
||||
// Front page
|
||||
app.at("/").get(subreddit::page);
|
||||
|
||||
// View Reddit wiki
|
||||
app.at("/w/").get(subreddit::wiki);
|
||||
app.at("/w/:page/").get(subreddit::wiki);
|
||||
app.at("/wiki/").get(subreddit::wiki);
|
||||
app.at("/wiki/:page/").get(subreddit::wiki);
|
||||
|
||||
// Search all of Reddit
|
||||
app.at("/search/").get(search::find);
|
||||
|
||||
// Short link for post
|
||||
// .route("/{sort:best|hot|new|top|rising|controversial}/", web::get().to(subreddit::page))
|
||||
// .route("/{id:.{5,6}}/", web::get().to(post::item))
|
||||
app.at("/:id/").get(|req: Request<()>| async {
|
||||
match req.param("id").unwrap_or_default() {
|
||||
"best" | "hot" | "new" | "top" | "rising" | "controversial" => subreddit::page(req).await,
|
||||
_ => post::item(req).await,
|
||||
}
|
||||
});
|
||||
|
||||
// Default service in case no routes match
|
||||
app.at("*").get(|_| utils::error("Nothing here".to_string()));
|
||||
|
||||
app.listen("127.0.0.1:8080").await?;
|
||||
Ok(())
|
||||
}
|
||||
|
22
src/post.rs
22
src/post.rs
@ -1,6 +1,6 @@
|
||||
// CRATES
|
||||
use crate::utils::*;
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use tide::Request;
|
||||
|
||||
use async_recursion::async_recursion;
|
||||
|
||||
@ -16,9 +16,9 @@ struct PostTemplate {
|
||||
prefs: Preferences,
|
||||
}
|
||||
|
||||
pub async fn item(req: HttpRequest) -> HttpResponse {
|
||||
pub async fn item(req: Request<()>) -> tide::Result {
|
||||
// Build Reddit API path
|
||||
let mut path: String = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
|
||||
let mut path: String = format!("{}.json?{}&raw_json=1", req.url().path(), req.url().query().unwrap_or_default());
|
||||
|
||||
// Set sort to sort query parameter
|
||||
let mut sort: String = param(&path, "sort");
|
||||
@ -29,12 +29,17 @@ pub async fn item(req: HttpRequest) -> HttpResponse {
|
||||
// If there's no sort query but there's a default sort, set sort to default_sort
|
||||
if sort.is_empty() && !default_sort.is_empty() {
|
||||
sort = default_sort;
|
||||
path = format!("{}.json?{}&sort={}&raw_json=1", req.path(), req.query_string(), sort);
|
||||
path = format!(
|
||||
"{}.json?{}&sort={}&raw_json=1",
|
||||
req.url().path(),
|
||||
req.url().query().unwrap_or_default(),
|
||||
sort
|
||||
);
|
||||
}
|
||||
|
||||
// Log the post ID being fetched in debug mode
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(req.match_info().get("id").unwrap_or(""));
|
||||
dbg!(req.param("id").unwrap_or(""));
|
||||
|
||||
// Send a request to the url, receive JSON in response
|
||||
match request(path).await {
|
||||
@ -45,15 +50,12 @@ pub async fn item(req: HttpRequest) -> HttpResponse {
|
||||
let comments = parse_comments(&res[1]).await;
|
||||
|
||||
// Use the Post and Comment structs to generate a website to show users
|
||||
let s = PostTemplate {
|
||||
template(PostTemplate {
|
||||
comments,
|
||||
post,
|
||||
sort,
|
||||
prefs: prefs(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
})
|
||||
}
|
||||
// If the Reddit API returns an error, exit and send error page to user
|
||||
Err(msg) => error(msg).await,
|
||||
|
35
src/proxy.rs
35
src/proxy.rs
@ -1,9 +1,8 @@
|
||||
use actix_web::{client::Client, error, web, Error, HttpResponse, Result};
|
||||
use url::Url;
|
||||
|
||||
use base64::decode;
|
||||
use surf::{Body, Url};
|
||||
use tide::{Request, Response};
|
||||
|
||||
pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse> {
|
||||
pub async fn handler(req: Request<()>) -> tide::Result {
|
||||
let domains = vec![
|
||||
// THUMBNAILS
|
||||
"a.thumbs.redditmedia.com",
|
||||
@ -21,27 +20,31 @@ pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse>
|
||||
"v.redd.it",
|
||||
];
|
||||
|
||||
let decoded = decode(b64).map(|bytes| String::from_utf8(bytes).unwrap_or_default());
|
||||
let decoded = decode(req.param("url").unwrap_or_default()).map(|bytes| String::from_utf8(bytes).unwrap_or_default());
|
||||
|
||||
match decoded {
|
||||
Ok(media) => match Url::parse(media.as_str()) {
|
||||
Ok(url) => {
|
||||
let domain = url.domain().unwrap_or_default();
|
||||
if domains.contains(&url.domain().unwrap_or_default()) {
|
||||
let http = surf::get(url).await.unwrap();
|
||||
|
||||
if domains.contains(&domain) {
|
||||
Client::default().get(media.replace("&", "&")).send().await.map_err(Error::from).map(|res| {
|
||||
HttpResponse::build(res.status())
|
||||
let content_length = http.header("Content-Length").map(|v| v.to_string()).unwrap_or_default();
|
||||
let content_type = http.content_type().map(|m| m.to_string()).unwrap_or_default();
|
||||
|
||||
Ok(
|
||||
Response::builder(http.status())
|
||||
.body(Body::from_reader(http, None))
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.header("Content-Length", res.headers().get("Content-Length").unwrap().to_owned())
|
||||
.header("Content-Type", res.headers().get("Content-Type").unwrap().to_owned())
|
||||
.streaming(res)
|
||||
})
|
||||
.header("Content-Length", content_length)
|
||||
.header("Content-Type", content_type)
|
||||
.build(),
|
||||
)
|
||||
} else {
|
||||
Err(error::ErrorForbidden("Resource must be from Reddit"))
|
||||
Err(tide::Error::from_str(403, "Resource must be from Reddit"))
|
||||
}
|
||||
}
|
||||
_ => Err(error::ErrorBadRequest("Can't parse base64 into URL")),
|
||||
Err(_) => Err(tide::Error::from_str(400, "Can't parse base64 into URL")),
|
||||
},
|
||||
_ => Err(error::ErrorBadRequest("Can't decode base64")),
|
||||
Err(_) => Err(tide::Error::from_str(400, "Can't decode base64")),
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
// CRATES
|
||||
use crate::utils::{cookie, error, fetch_posts, param, prefs, request, val, Post, Preferences};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use crate::utils::{cookie, error, fetch_posts, param, prefs, request, template, val, Post, Preferences};
|
||||
use askama::Template;
|
||||
use tide::Request;
|
||||
|
||||
// STRUCTS
|
||||
struct SearchParams {
|
||||
@ -32,10 +32,10 @@ struct SearchTemplate {
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn find(req: HttpRequest) -> HttpResponse {
|
||||
pub async fn find(req: Request<()>) -> tide::Result {
|
||||
let nsfw_results = if cookie(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" };
|
||||
let path = format!("{}.json?{}{}", req.path(), req.query_string(), nsfw_results);
|
||||
let sub = req.match_info().get("sub").unwrap_or("").to_string();
|
||||
let path = format!("{}.json?{}{}", req.url().path(), req.url().query().unwrap_or_default(), nsfw_results);
|
||||
let sub = req.param("sub").unwrap_or("").to_string();
|
||||
|
||||
let sort = if param(&path, "sort").is_empty() {
|
||||
"relevance".to_string()
|
||||
@ -50,24 +50,20 @@ pub async fn find(req: HttpRequest) -> HttpResponse {
|
||||
};
|
||||
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok((posts, after)) => HttpResponse::Ok().content_type("text/html").body(
|
||||
SearchTemplate {
|
||||
posts,
|
||||
subreddits,
|
||||
sub,
|
||||
params: SearchParams {
|
||||
q: param(&path, "q"),
|
||||
sort,
|
||||
t: param(&path, "t"),
|
||||
before: param(&path, "after"),
|
||||
after,
|
||||
restrict_sr: param(&path, "restrict_sr"),
|
||||
},
|
||||
prefs: prefs(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap(),
|
||||
),
|
||||
Ok((posts, after)) => template(SearchTemplate {
|
||||
posts,
|
||||
subreddits,
|
||||
sub,
|
||||
params: SearchParams {
|
||||
q: param(&path, "q"),
|
||||
sort,
|
||||
t: param(&path, "t"),
|
||||
before: param(&path, "after"),
|
||||
after,
|
||||
restrict_sr: param(&path, "restrict_sr"),
|
||||
},
|
||||
prefs: prefs(req),
|
||||
}),
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
// CRATES
|
||||
use crate::utils::{prefs, Preferences};
|
||||
use actix_web::{cookie::Cookie, web::Form, HttpRequest, HttpResponse};
|
||||
use crate::utils::{prefs, template, Preferences};
|
||||
use askama::Template;
|
||||
use tide::{http::Cookie, Request, Response};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
// STRUCTS
|
||||
@ -11,7 +11,7 @@ struct SettingsTemplate {
|
||||
prefs: Preferences,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
#[derive(serde::Deserialize, Default)]
|
||||
pub struct SettingsForm {
|
||||
theme: Option<String>,
|
||||
front_page: Option<String>,
|
||||
@ -24,33 +24,35 @@ pub struct SettingsForm {
|
||||
// FUNCTIONS
|
||||
|
||||
// Retrieve cookies from request "Cookie" header
|
||||
pub async fn get(req: HttpRequest) -> HttpResponse {
|
||||
let s = SettingsTemplate { prefs: prefs(req) }.render().unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
pub async fn get(req: Request<()>) -> tide::Result {
|
||||
template(SettingsTemplate { prefs: prefs(req) })
|
||||
}
|
||||
|
||||
// Set cookies using response "Set-Cookie" header
|
||||
pub async fn set(_req: HttpRequest, form: Form<SettingsForm>) -> HttpResponse {
|
||||
let mut res = HttpResponse::Found();
|
||||
pub async fn set(mut req: Request<()>) -> tide::Result {
|
||||
let form: SettingsForm = req.body_form().await.unwrap_or_default();
|
||||
|
||||
let mut res = Response::builder(302)
|
||||
.content_type("text/html")
|
||||
.header("Location", "/settings")
|
||||
.body(r#"Redirecting to <a href="/settings">settings</a>..."#)
|
||||
.build();
|
||||
|
||||
let names = vec!["theme", "front_page", "layout", "wide", "comment_sort", "show_nsfw"];
|
||||
let values = vec![&form.theme, &form.front_page, &form.layout, &form.wide, &form.comment_sort, &form.show_nsfw];
|
||||
let values = vec![form.theme, form.front_page, form.layout, form.wide, form.comment_sort, form.show_nsfw];
|
||||
|
||||
for (i, name) in names.iter().enumerate() {
|
||||
match values[i] {
|
||||
Some(value) => res.cookie(
|
||||
Cookie::build(name.to_owned(), value)
|
||||
match values.get(i) {
|
||||
Some(value) => res.insert_cookie(
|
||||
Cookie::build(name.to_owned(), value.to_owned().unwrap_or_default())
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
|
||||
.finish(),
|
||||
),
|
||||
None => res.del_cookie(&Cookie::named(name.to_owned())),
|
||||
None => res.remove_cookie(Cookie::named(name.to_owned())),
|
||||
};
|
||||
}
|
||||
|
||||
res
|
||||
.content_type("text/html")
|
||||
.set_header("Location", "/settings")
|
||||
.body(r#"Redirecting to <a href="/settings">settings</a>..."#)
|
||||
Ok(res)
|
||||
}
|
||||
|
104
src/subreddit.rs
104
src/subreddit.rs
@ -1,7 +1,7 @@
|
||||
// CRATES
|
||||
use crate::utils::*;
|
||||
use actix_web::{cookie::Cookie, HttpRequest, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
use tide::{http::Cookie, Request, Response};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
// STRUCTS
|
||||
@ -25,14 +25,14 @@ struct WikiTemplate {
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn page(req: HttpRequest) -> HttpResponse {
|
||||
pub async fn page(req: Request<()>) -> tide::Result {
|
||||
// Build Reddit API path
|
||||
let subscribed = cookie(&req, "subscriptions");
|
||||
let front_page = cookie(&req, "front_page");
|
||||
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
|
||||
let sort = req.param("sort").unwrap_or_else(|_| req.param("id").unwrap_or("hot")).to_string();
|
||||
|
||||
let sub = req
|
||||
.match_info()
|
||||
.get("sub")
|
||||
.param("sub")
|
||||
.map(String::from)
|
||||
.unwrap_or(if front_page == "default" || front_page.is_empty() {
|
||||
if subscribed.is_empty() {
|
||||
@ -44,7 +44,7 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
|
||||
front_page.to_owned()
|
||||
});
|
||||
|
||||
let path = format!("/r/{}/{}.json?{}", sub, sort, req.query_string());
|
||||
let path = format!("/r/{}/{}.json?{}&raw_json=1", sub, sort, req.url().query().unwrap_or_default());
|
||||
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok((posts, after)) => {
|
||||
@ -54,7 +54,7 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
|
||||
subreddit(&sub).await.unwrap_or_default()
|
||||
} else if sub == subscribed {
|
||||
// Subscription feed
|
||||
if req.path().starts_with("/r/") {
|
||||
if req.url().path().starts_with("/r/") {
|
||||
subreddit(&sub).await.unwrap_or_default()
|
||||
} else {
|
||||
Subreddit::default()
|
||||
@ -69,42 +69,55 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
|
||||
Subreddit::default()
|
||||
};
|
||||
|
||||
let s = SubredditTemplate {
|
||||
template(SubredditTemplate {
|
||||
sub,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), after),
|
||||
prefs: prefs(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
})
|
||||
}
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
// Sub or unsub by setting subscription cookie using response "Set-Cookie" header
|
||||
pub async fn subscriptions(req: HttpRequest) -> HttpResponse {
|
||||
let mut res = HttpResponse::Found();
|
||||
pub async fn subscriptions(req: Request<()>) -> tide::Result {
|
||||
let sub = req.param("sub").unwrap_or_default().to_string();
|
||||
let query = req.url().query().unwrap_or_default().to_string();
|
||||
let action: Vec<String> = req.url().path().split('/').map(String::from).collect();
|
||||
|
||||
let sub = req.match_info().get("sub").unwrap_or_default().to_string();
|
||||
let action = req.match_info().get("action").unwrap_or_default().to_string();
|
||||
let mut sub_list = prefs(req.to_owned()).subs;
|
||||
let mut sub_list = prefs(req).subs;
|
||||
|
||||
// Modify sub list based on action
|
||||
if action == "subscribe" && !sub_list.contains(&sub) {
|
||||
if action.contains(&"subscribe".to_string()) && !sub_list.contains(&sub) {
|
||||
sub_list.push(sub.to_owned());
|
||||
sub_list.sort_by_key(|a| a.to_lowercase());
|
||||
} else if action == "unsubscribe" {
|
||||
sub_list.sort_by_key(|a| a.to_lowercase())
|
||||
} else if action.contains(&"unsubscribe".to_string()) {
|
||||
sub_list.retain(|s| s != &sub);
|
||||
}
|
||||
|
||||
// Redirect back to subreddit
|
||||
// check for redirect parameter if unsubscribing from outside sidebar
|
||||
let redirect_path = param(format!("/?{}", query).as_str(), "redirect");
|
||||
let path = if !redirect_path.is_empty() {
|
||||
format!("/{}/", redirect_path)
|
||||
} else {
|
||||
format!("/r/{}", sub)
|
||||
};
|
||||
|
||||
let mut res = Response::builder(302)
|
||||
.content_type("text/html")
|
||||
.header("Location", path.to_owned())
|
||||
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path))
|
||||
.build();
|
||||
|
||||
// Delete cookie if empty, else set
|
||||
if sub_list.is_empty() {
|
||||
res.del_cookie(&Cookie::build("subscriptions", "").path("/").finish());
|
||||
// res.del_cookie(&Cookie::build("subscriptions", "").path("/").finish());
|
||||
res.remove_cookie(Cookie::build("subscriptions", "").path("/").finish());
|
||||
} else {
|
||||
res.cookie(
|
||||
res.insert_cookie(
|
||||
Cookie::build("subscriptions", sub_list.join("+"))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
@ -113,38 +126,21 @@ pub async fn subscriptions(req: HttpRequest) -> HttpResponse {
|
||||
);
|
||||
}
|
||||
|
||||
// Redirect back to subreddit
|
||||
// check for redirect parameter if unsubscribing from outside sidebar
|
||||
let redirect_path = param(&req.uri().to_string(), "redirect");
|
||||
let path = if !redirect_path.is_empty() && redirect_path.starts_with('/') {
|
||||
redirect_path
|
||||
} else {
|
||||
format!("/r/{}", sub)
|
||||
};
|
||||
|
||||
res
|
||||
.content_type("text/html")
|
||||
.set_header("Location", path.to_owned())
|
||||
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path))
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub async fn wiki(req: HttpRequest) -> HttpResponse {
|
||||
let sub = req.match_info().get("sub").unwrap_or("reddit.com").to_string();
|
||||
let page = req.match_info().get("page").unwrap_or("index").to_string();
|
||||
pub async fn wiki(req: Request<()>) -> tide::Result {
|
||||
let sub = req.param("sub").unwrap_or("reddit.com").to_string();
|
||||
let page = req.param("page").unwrap_or("index").to_string();
|
||||
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
|
||||
|
||||
match request(path).await {
|
||||
Ok(res) => {
|
||||
let s = WikiTemplate {
|
||||
sub,
|
||||
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()),
|
||||
page,
|
||||
prefs: prefs(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
Ok(res) => template(WikiTemplate {
|
||||
sub,
|
||||
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()),
|
||||
page,
|
||||
prefs: prefs(req),
|
||||
}),
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
@ -163,8 +159,14 @@ async fn subreddit(sub: &str) -> Result<Subreddit, String> {
|
||||
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
|
||||
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let community_icon: &str = res["data"]["community_icon"].as_str().map_or("", |s| s.split('?').collect::<Vec<&str>>()[0]);
|
||||
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
||||
let community_icon: &str = res["data"]["community_icon"]
|
||||
.as_str()
|
||||
.map_or("", |s| s.split('?').collect::<Vec<&str>>()[0]);
|
||||
let icon = if community_icon.is_empty() {
|
||||
val(&res, "icon_img")
|
||||
} else {
|
||||
community_icon.to_string()
|
||||
};
|
||||
|
||||
let sub = Subreddit {
|
||||
name: val(&res, "display_name"),
|
||||
|
19
src/user.rs
19
src/user.rs
@ -1,7 +1,7 @@
|
||||
// CRATES
|
||||
use crate::utils::{error, fetch_posts, format_url, param, prefs, request, Post, Preferences, User};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use crate::utils::*;
|
||||
use askama::Template;
|
||||
use tide::Request;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
// STRUCTS
|
||||
@ -16,13 +16,13 @@ struct UserTemplate {
|
||||
}
|
||||
|
||||
// FUNCTIONS
|
||||
pub async fn profile(req: HttpRequest) -> HttpResponse {
|
||||
pub async fn profile(req: Request<()>) -> tide::Result {
|
||||
// Build the Reddit JSON API path
|
||||
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
|
||||
let path = format!("{}.json?{}&raw_json=1", req.url().path(), req.url().query().unwrap_or_default());
|
||||
|
||||
// Retrieve other variables from Libreddit request
|
||||
let sort = param(&path, "sort");
|
||||
let username = req.match_info().get("username").unwrap_or("").to_string();
|
||||
let username = req.param("name").unwrap_or("").to_string();
|
||||
|
||||
// Request user posts/comments from Reddit
|
||||
let posts = fetch_posts(&path, "Comment".to_string()).await;
|
||||
@ -32,16 +32,13 @@ pub async fn profile(req: HttpRequest) -> HttpResponse {
|
||||
// If you can get user posts, also request user data
|
||||
let user = user(&username).await.unwrap_or_default();
|
||||
|
||||
let s = UserTemplate {
|
||||
template(UserTemplate {
|
||||
user,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), after),
|
||||
prefs: prefs(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
})
|
||||
}
|
||||
// If there is an error show error page
|
||||
Err(msg) => error(msg).await,
|
||||
@ -51,7 +48,7 @@ pub async fn profile(req: HttpRequest) -> HttpResponse {
|
||||
// USER
|
||||
async fn user(name: &str) -> Result<User, String> {
|
||||
// Build the Reddit JSON API path
|
||||
let path: String = format!("/user/{}/about.json", name);
|
||||
let path: String = format!("/user/{}/about.json?raw_json=1", name);
|
||||
|
||||
// Send a request to the url
|
||||
match request(path).await {
|
||||
|
115
src/utils.rs
115
src/utils.rs
@ -1,15 +1,14 @@
|
||||
//
|
||||
// CRATES
|
||||
//
|
||||
use actix_web::{cookie::Cookie, HttpRequest, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
use base64::encode;
|
||||
use cached::proc_macro::cached;
|
||||
use regex::Regex;
|
||||
use serde_json::{from_str, Value};
|
||||
use std::collections::HashMap;
|
||||
use tide::{http::url::Url, http::Cookie, Request, Response};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
use url::Url;
|
||||
|
||||
//
|
||||
// STRUCTS
|
||||
@ -147,7 +146,7 @@ pub struct Preferences {
|
||||
//
|
||||
|
||||
// Build preferences from cookies
|
||||
pub fn prefs(req: HttpRequest) -> Preferences {
|
||||
pub fn prefs(req: Request<()>) -> Preferences {
|
||||
Preferences {
|
||||
theme: cookie(&req, "theme"),
|
||||
front_page: cookie(&req, "front_page"),
|
||||
@ -155,21 +154,32 @@ pub fn prefs(req: HttpRequest) -> Preferences {
|
||||
wide: cookie(&req, "wide"),
|
||||
show_nsfw: cookie(&req, "show_nsfw"),
|
||||
comment_sort: cookie(&req, "comment_sort"),
|
||||
subs: cookie(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
|
||||
subs: cookie(&req, "subscriptions")
|
||||
.split('+')
|
||||
.map(String::from)
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
// Grab a query param from a url
|
||||
pub fn param(path: &str, value: &str) -> String {
|
||||
match Url::parse(format!("https://libredd.it/{}", path).as_str()) {
|
||||
Ok(url) => url.query_pairs().into_owned().collect::<HashMap<_, _>>().get(value).unwrap_or(&String::new()).to_owned(),
|
||||
Ok(url) => url
|
||||
.query_pairs()
|
||||
.into_owned()
|
||||
.collect::<HashMap<_, _>>()
|
||||
.get(value)
|
||||
.unwrap_or(&String::new())
|
||||
.to_owned(),
|
||||
_ => String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Cookie value from request
|
||||
pub fn cookie(req: &HttpRequest, name: &str) -> String {
|
||||
actix_web::HttpMessage::cookie(req, name).unwrap_or_else(|| Cookie::new(name, "")).value().to_string()
|
||||
pub fn cookie(req: &Request<()>, name: &str) -> String {
|
||||
let cookie = req.cookie(name).unwrap_or_else(|| Cookie::named(name));
|
||||
cookie.value().to_string()
|
||||
}
|
||||
|
||||
// Direct urls to proxy if proxy is enabled
|
||||
@ -177,7 +187,7 @@ pub fn format_url(url: &str) -> String {
|
||||
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
|
||||
String::new()
|
||||
} else {
|
||||
format!("/proxy/{}", encode(url).as_str())
|
||||
format!("/proxy/{}/", encode(url).as_str())
|
||||
}
|
||||
}
|
||||
|
||||
@ -420,102 +430,57 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||
// NETWORKING
|
||||
//
|
||||
|
||||
pub async fn error(msg: String) -> HttpResponse {
|
||||
pub fn template(f: impl Template) -> tide::Result {
|
||||
Ok(
|
||||
Response::builder(200)
|
||||
.content_type("text/html")
|
||||
.body(f.render().unwrap_or_default())
|
||||
.build(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn error(msg: String) -> tide::Result {
|
||||
let body = ErrorTemplate {
|
||||
msg,
|
||||
prefs: Preferences::default(),
|
||||
}
|
||||
.render()
|
||||
.unwrap_or_default();
|
||||
HttpResponse::NotFound().content_type("text/html").body(body)
|
||||
|
||||
Ok(Response::builder(404).content_type("text/html").body(body).build())
|
||||
}
|
||||
|
||||
// Make a request to a Reddit API and parse the JSON response
|
||||
#[cached(size = 100, time = 30, result = true)]
|
||||
pub async fn request(path: String) -> Result<Value, String> {
|
||||
let url = format!("https://www.reddit.com{}", path);
|
||||
// Build reddit-compliant user agent for Libreddit
|
||||
let user_agent = format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"));
|
||||
|
||||
// Send request using awc
|
||||
// async fn send(url: &str) -> Result<String, (bool, String)> {
|
||||
// let client = actix_web::client::Client::default();
|
||||
// let response = client.get(url).header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"))).send().await;
|
||||
// Send request using surf
|
||||
let req = surf::get(&url).header("User-Agent", user_agent.as_str());
|
||||
let client = surf::client().with(surf::middleware::Redirect::new(5));
|
||||
|
||||
// match response {
|
||||
// Ok(mut payload) => {
|
||||
// // Get first number of response HTTP status code
|
||||
// match payload.status().to_string().chars().next() {
|
||||
// // If success
|
||||
// Some('2') => Ok(String::from_utf8(payload.body().limit(20_000_000).await.unwrap_or_default().to_vec()).unwrap_or_default()),
|
||||
// // If redirection
|
||||
// Some('3') => match payload.headers().get("location") {
|
||||
// Some(location) => Err((true, location.to_str().unwrap_or_default().to_string())),
|
||||
// None => Err((false, "Page not found".to_string())),
|
||||
// },
|
||||
// // Otherwise
|
||||
// _ => Err((false, "Page not found".to_string())),
|
||||
// }
|
||||
// }
|
||||
// Err(e) => { dbg!(e); Err((false, "Couldn't send request to Reddit, this instance may be being rate-limited. Try another.".to_string())) },
|
||||
// }
|
||||
// }
|
||||
let res = client.send(req).await;
|
||||
|
||||
// // Print error if debugging then return error based on error message
|
||||
// fn err(url: String, msg: String) -> Result<Value, String> {
|
||||
// // #[cfg(debug_assertions)]
|
||||
// dbg!(format!("{} - {}", url, msg));
|
||||
// Err(msg)
|
||||
// };
|
||||
let body = res.unwrap().take_body().into_string().await;
|
||||
|
||||
// // Parse JSON from body. If parsing fails, return error
|
||||
// fn json(url: String, body: String) -> Result<Value, String> {
|
||||
// match from_str(body.as_str()) {
|
||||
// Ok(json) => Ok(json),
|
||||
// Err(_) => err(url, "Failed to parse page JSON data".to_string()),
|
||||
// }
|
||||
// }
|
||||
|
||||
// // Make request to Reddit using send function
|
||||
// match send(&url).await {
|
||||
// // If success, parse and return body
|
||||
// Ok(body) => json(url, body),
|
||||
// // Follow any redirects
|
||||
// Err((true, location)) => match send(location.as_str()).await {
|
||||
// // If success, parse and return body
|
||||
// Ok(body) => json(url, body),
|
||||
// // Follow any redirects again
|
||||
// Err((true, location)) => err(url, location),
|
||||
// // Return errors if request fails
|
||||
// Err((_, msg)) => err(url, msg),
|
||||
// },
|
||||
// // Return errors if request fails
|
||||
// Err((_, msg)) => err(url, msg),
|
||||
// }
|
||||
|
||||
// Send request using ureq
|
||||
match ureq::get(&url).set("User-Agent", user_agent.as_str()).call() {
|
||||
match body {
|
||||
// If response is success
|
||||
Ok(response) => {
|
||||
// Parse the response from Reddit as JSON
|
||||
let json_string = &response.into_string().unwrap_or_default();
|
||||
match from_str(json_string) {
|
||||
match from_str(&response) {
|
||||
Ok(json) => Ok(json),
|
||||
Err(e) => {
|
||||
println!("{} - Failed to parse page JSON data: {} - {}", url, e, json_string);
|
||||
println!("{} - Failed to parse page JSON data: {}", url, e);
|
||||
Err("Failed to parse page JSON data".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
// If response is error
|
||||
Err(ureq::Error::Status(_, _)) => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Page not found", url));
|
||||
Err("Page not found".to_string())
|
||||
}
|
||||
// If failed to send request
|
||||
Err(e) => {
|
||||
println!("{} - Couldn't send request to Reddit: {}", url, e);
|
||||
Err("Couldn't send request to Reddit, this instance may be being rate-limited. Try another.".to_string())
|
||||
Err("Couldn't send request to Reddit".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,15 +1,15 @@
|
||||
{
|
||||
"name": "Libreddit",
|
||||
"short_name": "Libreddit",
|
||||
"display": "standalone",
|
||||
"background_color": "#1F1F1F",
|
||||
"description": "An alternative private front-end to Reddit",
|
||||
"theme_color": "#1F1F1F",
|
||||
"icons": [
|
||||
{
|
||||
"src": "./logo.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
]
|
||||
}
|
||||
"name": "Libreddit",
|
||||
"short_name": "Libreddit",
|
||||
"display": "standalone",
|
||||
"background_color": "#1F1F1F",
|
||||
"description": "An alternative private front-end to Reddit",
|
||||
"theme_color": "#1F1F1F",
|
||||
"icons": [
|
||||
{
|
||||
"src": "./logo.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
]
|
||||
}
|
@ -15,11 +15,11 @@
|
||||
<!-- Android -->
|
||||
<meta name="mobile-web-app-capable" content="yes">
|
||||
<!-- iOS Logo -->
|
||||
<link href="/touch-icon-iphone.png" rel="apple-touch-icon">
|
||||
<link href="/touch-icon-iphone.png/" rel="apple-touch-icon">
|
||||
<!-- PWA Manifest -->
|
||||
<link rel="manifest" type="application/json" href="/manifest.json">
|
||||
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico">
|
||||
<link rel="stylesheet" type="text/css" href="/style.css">
|
||||
<link rel="manifest" type="application/json" href="/manifest.json/">
|
||||
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico/">
|
||||
<link rel="stylesheet" type="text/css" href="/style.css/">
|
||||
{% endblock %}
|
||||
</head>
|
||||
<body class="
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
{% block content %}
|
||||
<div id="settings">
|
||||
<form action="/settings" method="POST">
|
||||
<form action="/settings/" method="POST">
|
||||
<div class="prefs">
|
||||
<p>Appearance</p>
|
||||
<div id="theme">
|
||||
@ -57,7 +57,7 @@
|
||||
{% for sub in prefs.subs %}
|
||||
<li>
|
||||
<span>{{ sub }}</span>
|
||||
<form action="/r/{{ sub }}/unsubscribe/?redirect=/settings" method="POST">
|
||||
<form action="/r/{{ sub }}/unsubscribe/?redirect=settings" method="POST">
|
||||
<button class="unsubscribe">Unsubscribe</button>
|
||||
</form>
|
||||
</li>
|
||||
|
@ -80,11 +80,11 @@
|
||||
</div>
|
||||
<div id="sub_subscription">
|
||||
{% if prefs.subs.contains(sub.name) %}
|
||||
<form action="/r/{{ sub.name }}/unsubscribe" method="POST">
|
||||
<form action="/r/{{ sub.name }}/unsubscribe/" method="POST">
|
||||
<button class="unsubscribe">Unsubscribe</button>
|
||||
</form>
|
||||
{% else %}
|
||||
<form action="/r/{{ sub.name }}/subscribe" method="POST">
|
||||
<form action="/r/{{ sub.name }}/subscribe/" method="POST">
|
||||
<button class="subscribe">Subscribe</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
|
Loading…
Reference in New Issue
Block a user