diff --git a/Cargo.toml b/Cargo.toml
index 51e2d14..26dc2c0 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -3,19 +3,23 @@ name = "libreddit"
description = " Alternative private front-end to Reddit"
license = "AGPL-3.0"
repository = "https://github.com/spikecodes/libreddit"
-version = "0.4.2"
+version = "0.5.0"
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
edition = "2018"
[dependencies]
askama = { version = "0.10.5", default-features = false }
async-recursion = "0.3.2"
-async-std = { version = "1.9.0", features = ["attributes"] }
-async-tls = { version = "0.11.0", default-features = false, features = ["client"] }
cached = "0.23.0"
clap = { version = "2.33.3", default-features = false }
-regex = "1.4.4"
+regex = "1.4.5"
serde = { version = "1.0.124", features = ["derive"] }
+cookie = "0.15.0"
+futures-lite = "1.11.3"
+hyper = { version = "0.14.4", features = ["full"] }
+hyper-rustls = "0.22.1"
+route-recognizer = "0.3.0"
serde_json = "1.0.64"
-tide = { version = "0.16.0", default-features = false, features = ["h1-server", "cookies"] }
-time = "0.2.25"
+tokio = { version = "1.3.0", features = ["full"] }
+time = "0.2.26"
+url = "2.2.1"
diff --git a/src/client.rs b/src/client.rs
new file mode 100644
index 0000000..cd5fe15
--- /dev/null
+++ b/src/client.rs
@@ -0,0 +1,132 @@
+use cached::proc_macro::cached;
+use futures_lite::{future::Boxed, FutureExt};
+use hyper::{body::Buf, client, Body, Request, Response, Uri};
+use serde_json::Value;
+use std::{result::Result, str::FromStr};
+// use async_recursion::async_recursion;
+
+use crate::server::RequestExt;
+
+pub async fn proxy(req: Request
, format: &str) -> Result, String> {
+ let mut url = format.to_string();
+
+ for (name, value) in req.params().iter() {
+ url = url.replace(&format!("{{{}}}", name), value);
+ }
+
+ stream(&url).await
+}
+
+async fn stream(url: &str) -> Result, String> {
+ // First parameter is target URL (mandatory).
+ let url = Uri::from_str(url).unwrap();
+
+ // Prepare the HTTPS connector.
+ let https = hyper_rustls::HttpsConnector::with_native_roots();
+
+ // Build the hyper client from the HTTPS connector.
+ let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
+
+ client
+ .get(url)
+ .await
+ .map(|mut res| {
+ let mut rm = |key: &str| res.headers_mut().remove(key);
+
+ rm("access-control-expose-headers");
+ rm("server");
+ rm("vary");
+ rm("etag");
+ rm("x-cdn");
+ rm("x-cdn-client-region");
+ rm("x-cdn-name");
+ rm("x-cdn-server-region");
+
+ res
+ })
+ .map_err(|e| e.to_string())
+}
+
+fn request(url: String) -> Boxed, String>> {
+ // Prepare the HTTPS connector.
+ let https = hyper_rustls::HttpsConnector::with_native_roots();
+
+ // Build the hyper client from the HTTPS connector.
+ let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
+
+ let req = |uri: String| {
+ Request::builder()
+ .method("GET")
+ .uri(&uri)
+ .header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION")))
+ .header("Host", "www.reddit.com")
+ .header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
+ .header("Accept-Language", "en-US,en;q=0.5")
+ .header("Connection", "keep-alive")
+ .body(Body::empty())
+ .expect("request builder")
+ };
+
+ async move {
+ match client.request(req(url)).await {
+ Ok(response) => {
+ if response.status().to_string().starts_with('3') {
+ request(response.headers().get("Location").unwrap().to_str().unwrap_or_default().to_string()).await
+ } else {
+ Ok(response)
+ }
+ }
+ Err(e) => Err(e.to_string()),
+ }
+ }
+ .boxed()
+}
+
+// Make a request to a Reddit API and parse the JSON response
+#[cached(size = 100, time = 30, result = true)]
+pub async fn json(path: String) -> Result {
+ // Build Reddit url from path
+ let url = format!("https://www.reddit.com{}", path);
+
+ // Closure to quickly build errors
+ let err = |msg: &str, e: String| -> Result {
+ eprintln!("{} - {}: {}", url, msg, e);
+ Err(msg.to_string())
+ };
+
+ // Fetch the url...
+ match request(url.clone()).await {
+ Ok(response) => {
+ // asynchronously aggregate the chunks of the body
+ match hyper::body::aggregate(response).await {
+ Ok(body) => {
+ // Parse the response from Reddit as JSON
+ match serde_json::from_reader(body.reader()) {
+ Ok(value) => {
+ let json: Value = value;
+ // If Reddit returned an error
+ if json["error"].is_i64() {
+ Err(
+ json["reason"]
+ .as_str()
+ .unwrap_or_else(|| {
+ json["message"].as_str().unwrap_or_else(|| {
+ eprintln!("{} - Error parsing reddit error", url);
+ "Error parsing reddit error"
+ })
+ })
+ .to_string(),
+ )
+ } else {
+ Ok(json)
+ }
+ }
+ Err(e) => err("Failed to parse page JSON data", e.to_string()),
+ }
+ }
+ Err(e) => err("Failed receiving JSON body from Reddit", e.to_string()),
+ }
+ }
+ Err(e) => err("Couldn't send request to Reddit", e.to_string()),
+ }
+}
diff --git a/src/main.rs b/src/main.rs
index 6526840..b516bb4 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -11,7 +11,6 @@
// Reference local files
mod post;
-mod proxy;
mod search;
mod settings;
mod subreddit;
@@ -19,99 +18,70 @@ mod user;
mod utils;
// Import Crates
-use clap::{App, Arg};
-use proxy::handler;
-use tide::{
- utils::{async_trait, After},
- Middleware, Next, Request, Response,
-};
+use clap::{App as cli, Arg};
+
+use futures_lite::FutureExt;
+use hyper::{Body, Request, Response};
+
+mod client;
+use client::proxy;
+use server::RequestExt;
use utils::{error, redirect};
-// Build middleware
-struct HttpsRedirect(HttpsOnly);
-struct NormalizePath;
-
-#[async_trait]
-impl Middleware for HttpsRedirect
-where
- State: Clone + Send + Sync + 'static,
- HttpsOnly: Into + Copy + Send + Sync + 'static,
-{
- async fn handle(&self, request: Request, next: Next<'_, State>) -> tide::Result {
- let secure = request.url().scheme() == "https";
-
- if self.0.into() && !secure {
- let mut secured = request.url().to_owned();
- secured.set_scheme("https").unwrap_or_default();
-
- Ok(redirect(secured.to_string()))
- } else {
- Ok(next.run(request).await)
- }
- }
-}
-
-#[async_trait]
-impl Middleware for NormalizePath {
- async fn handle(&self, request: Request, next: Next<'_, State>) -> tide::Result {
- let path = request.url().path();
- let query = request.url().query().unwrap_or_default();
- if path.ends_with('/') {
- Ok(next.run(request).await)
- } else {
- let normalized = if query.is_empty() {
- format!("{}/", path.replace("//", "/"))
- } else {
- format!("{}/?{}", path.replace("//", "/"), query)
- };
- Ok(redirect(normalized))
- }
- }
-}
+mod server;
// Create Services
// Required for the manifest to be valid
-async fn pwa_logo(_req: Request<()>) -> tide::Result {
- Ok(Response::builder(200).content_type("image/png").body(include_bytes!("../static/logo.png").as_ref()).build())
+async fn pwa_logo() -> Result, String> {
+ Ok(
+ Response::builder()
+ .status(200)
+ .header("content-type", "image/png")
+ .body(include_bytes!("../static/logo.png").as_ref().into())
+ .unwrap_or_default(),
+ )
}
// Required for iOS App Icons
-async fn iphone_logo(_req: Request<()>) -> tide::Result {
+async fn iphone_logo() -> Result, String> {
Ok(
- Response::builder(200)
- .content_type("image/png")
- .body(include_bytes!("../static/apple-touch-icon.png").as_ref())
- .build(),
+ Response::builder()
+ .status(200)
+ .header("content-type", "image/png")
+ .body(include_bytes!("../static/apple-touch-icon.png").as_ref().into())
+ .unwrap_or_default(),
)
}
-async fn favicon(_req: Request<()>) -> tide::Result {
+async fn favicon() -> Result, String> {
Ok(
- Response::builder(200)
- .content_type("image/vnd.microsoft.icon")
+ Response::builder()
+ .status(200)
+ .header("content-type", "image/vnd.microsoft.icon")
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
- .body(include_bytes!("../static/favicon.ico").as_ref())
- .build(),
+ .body(include_bytes!("../static/favicon.ico").as_ref().into())
+ .unwrap_or_default(),
)
}
-async fn resource(body: &str, content_type: &str, cache: bool) -> tide::Result {
- let mut res = Response::new(200);
+async fn resource(body: &str, content_type: &str, cache: bool) -> Result, String> {
+ let mut res = Response::builder()
+ .status(200)
+ .header("content-type", content_type)
+ .body(body.to_string().into())
+ .unwrap_or_default();
if cache {
- res.insert_header("Cache-Control", "public, max-age=1209600, s-maxage=86400");
+ res.headers_mut().insert("Cache-Control", "public, max-age=1209600, s-maxage=86400".parse().unwrap());
}
- res.set_content_type(content_type);
- res.set_body(body);
-
Ok(res)
}
-#[async_std::main]
-async fn main() -> tide::Result<()> {
- let matches = App::new("Libreddit")
+#[tokio::main]
+async fn main() {
+ let matches = cli::new("Libreddit")
.version(env!("CARGO_PKG_VERSION"))
.about("Private front-end for Reddit written in Rust ")
.arg(
@@ -136,142 +106,126 @@ async fn main() -> tide::Result<()> {
Arg::with_name("redirect-https")
.short("r")
.long("redirect-https")
- .help("Redirect all HTTP requests to HTTPS")
+ .help("Redirect all HTTP requests to HTTPS (no longer functional)")
.takes_value(false),
)
.get_matches();
let address = matches.value_of("address").unwrap_or("0.0.0.0");
let port = matches.value_of("port").unwrap_or("8080");
- let force_https = matches.is_present("redirect-https");
+ let _force_https = matches.is_present("redirect-https");
let listener = format!("{}:{}", address, port);
println!("Starting Libreddit...");
- // Start HTTP server
- let mut app = tide::new();
+ // Begin constructing a server
+ let mut app = server::Server::new();
- // Redirect to HTTPS if "--redirect-https" enabled
- app.with(HttpsRedirect(force_https));
-
- // Append trailing slash and remove double slashes
- app.with(NormalizePath);
-
- // Apply default headers for security
- app.with(After(|mut res: Response| async move {
- res.insert_header("Referrer-Policy", "no-referrer");
- res.insert_header("X-Content-Type-Options", "nosniff");
- res.insert_header("X-Frame-Options", "DENY");
- res.insert_header(
- "Content-Security-Policy",
- "default-src 'none'; manifest-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none';",
- );
- Ok(res)
- }));
+ // Define default headers (added to all responses)
+ app.default_headers = headers! {
+ "Referrer-Policy" => "no-referrer",
+ "X-Content-Type-Options" => "nosniff",
+ "X-Frame-Options" => "DENY",
+ "Content-Security-Policy" => "default-src 'none'; manifest-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none';"
+ };
// Read static files
- app.at("/style.css/").get(|_| resource(include_str!("../static/style.css"), "text/css", false));
+ app.at("/style.css").get(|_| resource(include_str!("../static/style.css"), "text/css", false).boxed());
app
- .at("/manifest.json/")
- .get(|_| resource(include_str!("../static/manifest.json"), "application/json", false));
- app.at("/robots.txt/").get(|_| resource("User-agent: *\nAllow: /", "text/plain", true));
- app.at("/favicon.ico/").get(favicon);
- app.at("/logo.png/").get(pwa_logo);
- app.at("/touch-icon-iphone.png/").get(iphone_logo);
- app.at("/apple-touch-icon.png/").get(iphone_logo);
+ .at("/manifest.json")
+ .get(|_| resource(include_str!("../static/manifest.json"), "application/json", false).boxed());
+ app.at("/robots.txt").get(|_| resource("User-agent: *\nAllow: /", "text/plain", true).boxed());
+ app.at("/favicon.ico").get(|_| favicon().boxed());
+ app.at("/logo.png").get(|_| pwa_logo().boxed());
+ app.at("/touch-icon-iphone.png").get(|_| iphone_logo().boxed());
+ app.at("/apple-touch-icon.png").get(|_| iphone_logo().boxed());
// Proxy media through Libreddit
- app
- .at("/vid/:id/:size/") /* */
- .get(|req| handler(req, "https://v.redd.it/{}/DASH_{}", vec!["id", "size"]));
- app
- .at("/img/:id/") /* */
- .get(|req| handler(req, "https://i.redd.it/{}", vec!["id"]));
- app
- .at("/thumb/:point/:id/") /* */
- .get(|req| handler(req, "https://{}.thumbs.redditmedia.com/{}", vec!["point", "id"]));
- app
- .at("/emoji/:id/:name/") /* */
- .get(|req| handler(req, "https://emoji.redditmedia.com/{}/{}", vec!["id", "name"]));
- app
- .at("/preview/:loc/:id/:query/")
- .get(|req| handler(req, "https://{}view.redd.it/{}?{}", vec!["loc", "id", "query"]));
- app
- .at("/style/*path/") /* */
- .get(|req| handler(req, "https://styles.redditmedia.com/{}", vec!["path"]));
- app
- .at("/static/*path/") /* */
- .get(|req| handler(req, "https://www.redditstatic.com/{}", vec!["path"]));
+ app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed());
+ app.at("/img/:id").get(|r| proxy(r, "https://i.redd.it/{id}").boxed());
+ app.at("/thumb/:point/:id").get(|r| proxy(r, "https://{point}.thumbs.redditmedia.com/{id}").boxed());
+ app.at("/emoji/:id/:name").get(|r| proxy(r, "https://emoji.redditmedia.com/{id}/{name}").boxed());
+ app.at("/preview/:loc/:id/:query").get(|r| proxy(r, "https://{loc}view.redd.it/{id}?{query}").boxed());
+ app.at("/style/*path").get(|r| proxy(r, "https://styles.redditmedia.com/{path}").boxed());
+ app.at("/static/*path").get(|r| proxy(r, "https://www.redditstatic.com/{path}").boxed());
// Browse user profile
- app.at("/u/:name/").get(user::profile);
- app.at("/u/:name/comments/:id/:title/").get(post::item);
- app.at("/u/:name/comments/:id/:title/:comment_id/").get(post::item);
+ app
+ .at("/u/:name")
+ .get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
+ app.at("/u/:name/comments/:id/:title").get(|r| post::item(r).boxed());
+ app.at("/u/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
- app.at("/user/:name/").get(user::profile);
- app.at("/user/:name/comments/:id/").get(post::item);
- app.at("/user/:name/comments/:id/:title/").get(post::item);
- app.at("/user/:name/comments/:id/:title/:comment_id/").get(post::item);
+ app.at("/user/:name").get(|r| user::profile(r).boxed());
+ app.at("/user/:name/comments/:id").get(|r| post::item(r).boxed());
+ app.at("/user/:name/comments/:id/:title").get(|r| post::item(r).boxed());
+ app.at("/user/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
// Configure settings
- app.at("/settings/").get(settings::get).post(settings::set);
- app.at("/settings/restore/").get(settings::restore);
+ app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed());
+ app.at("/settings/restore").get(|r| settings::restore(r).boxed());
// Subreddit services
- app.at("/r/:sub/").get(subreddit::community);
+ app.at("/r/:sub").get(|r| subreddit::community(r).boxed());
- app.at("/r/:sub/subscribe/").post(subreddit::subscriptions);
- app.at("/r/:sub/unsubscribe/").post(subreddit::subscriptions);
+ app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions(r).boxed());
+ app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions(r).boxed());
- app.at("/r/:sub/comments/:id/").get(post::item);
- app.at("/r/:sub/comments/:id/:title/").get(post::item);
- app.at("/r/:sub/comments/:id/:title/:comment_id/").get(post::item);
+ app.at("/r/:sub/comments/:id").get(|r| post::item(r).boxed());
+ app.at("/r/:sub/comments/:id/:title").get(|r| post::item(r).boxed());
+ app.at("/r/:sub/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed());
- app.at("/r/:sub/search/").get(search::find);
+ app.at("/r/:sub/search").get(|r| search::find(r).boxed());
- app.at("/r/:sub/wiki/").get(subreddit::wiki);
- app.at("/r/:sub/wiki/:page/").get(subreddit::wiki);
- app.at("/r/:sub/w/").get(subreddit::wiki);
- app.at("/r/:sub/w/:page/").get(subreddit::wiki);
+ app.at("/r/:sub/wiki/").get(|r| subreddit::wiki(r).boxed());
+ app.at("/r/:sub/wiki/:page").get(|r| subreddit::wiki(r).boxed());
+ app.at("/r/:sub/w").get(|r| subreddit::wiki(r).boxed());
+ app.at("/r/:sub/w/:page").get(|r| subreddit::wiki(r).boxed());
- app.at("/r/:sub/:sort/").get(subreddit::community);
+ app.at("/r/:sub/:sort").get(|r| subreddit::community(r).boxed());
// Comments handler
- app.at("/comments/:id/").get(post::item);
+ app.at("/comments/:id/").get(|r| post::item(r).boxed());
// Front page
- app.at("/").get(subreddit::community);
+ app.at("/").get(|r| subreddit::community(r).boxed());
// View Reddit wiki
- app.at("/w/").get(subreddit::wiki);
- app.at("/w/:page/").get(subreddit::wiki);
- app.at("/wiki/").get(subreddit::wiki);
- app.at("/wiki/:page/").get(subreddit::wiki);
+ app.at("/w").get(|r| subreddit::wiki(r).boxed());
+ app.at("/w/:page").get(|r| subreddit::wiki(r).boxed());
+ app.at("/wiki").get(|r| subreddit::wiki(r).boxed());
+ app.at("/wiki/:page").get(|r| subreddit::wiki(r).boxed());
// Search all of Reddit
- app.at("/search/").get(search::find);
+ app.at("/search").get(|r| search::find(r).boxed());
// Handle about pages
- app.at("/about/").get(|req| error(req, "About pages aren't here yet".to_string()));
+ app.at("/about").get(|req| error(req, "About pages aren't added yet".to_string()).boxed());
- app.at("/:id/").get(|req: Request<()>| async {
- match req.param("id") {
- // Sort front page
- Ok("best") | Ok("hot") | Ok("new") | Ok("top") | Ok("rising") | Ok("controversial") => subreddit::community(req).await,
- // Short link for post
- Ok(id) if id.len() > 4 && id.len() < 7 => post::item(req).await,
- // Error message for unknown pages
- _ => error(req, "Nothing here".to_string()).await,
+ app.at("/:id").get(|req: Request| {
+ async {
+ match req.param("id") {
+ // Sort front page
+ // Some("best") | Some("hot") | Some("new") | Some("top") | Some("rising") | Some("controversial") => subreddit::community(req).await,
+ // Short link for post
+ Some(id) if id.len() > 4 && id.len() < 7 => post::item(req).await,
+ // Error message for unknown pages
+ _ => error(req, "Nothing here".to_string()).await,
+ }
}
+ .boxed()
});
// Default service in case no routes match
- app.at("*").get(|req| error(req, "Nothing here".to_string()));
+ app.at("/*").get(|req| error(req, "Nothing here".to_string()).boxed());
println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), listener);
- app.listen(&listener).await?;
+ let server = app.listen(listener);
- Ok(())
+ // Run this server for... forever!
+ if let Err(e) = server.await {
+ eprintln!("Server error: {}", e);
+ }
}
diff --git a/src/post.rs b/src/post.rs
index 87998d7..5de9b47 100644
--- a/src/post.rs
+++ b/src/post.rs
@@ -1,9 +1,9 @@
// CRATES
+use crate::client::json;
use crate::esc;
-use crate::utils::{
- cookie, error, format_num, format_url, param, request, rewrite_urls, template, time, val, Author, Comment, Flags, Flair, FlairPart, Media, Post, Preferences,
-};
-use tide::Request;
+use crate::server::RequestExt;
+use crate::utils::{cookie, error, format_num, format_url, param, rewrite_urls, template, time, val, Author, Comment, Flags, Flair, FlairPart, Media, Post, Preferences};
+use hyper::{Body, Request, Response};
use async_recursion::async_recursion;
@@ -20,9 +20,9 @@ struct PostTemplate {
single_thread: bool,
}
-pub async fn item(req: Request<()>) -> tide::Result {
+pub async fn item(req: Request) -> Result, String> {
// Build Reddit API path
- let mut path: String = format!("{}.json?{}&raw_json=1", req.url().path(), req.url().query().unwrap_or_default());
+ let mut path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default());
// Set sort to sort query parameter
let mut sort: String = param(&path, "sort");
@@ -33,23 +33,23 @@ pub async fn item(req: Request<()>) -> tide::Result {
// If there's no sort query but there's a default sort, set sort to default_sort
if sort.is_empty() && !default_sort.is_empty() {
sort = default_sort;
- path = format!("{}.json?{}&sort={}&raw_json=1", req.url().path(), req.url().query().unwrap_or_default(), sort);
+ path = format!("{}.json?{}&sort={}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), sort);
}
// Log the post ID being fetched in debug mode
#[cfg(debug_assertions)]
- dbg!(req.param("id").unwrap_or(""));
+ dbg!(req.param("id").unwrap_or_default());
- let single_thread = &req.param("comment_id").is_ok();
+ let single_thread = &req.param("comment_id").is_some();
let highlighted_comment = &req.param("comment_id").unwrap_or_default();
// Send a request to the url, receive JSON in response
- match request(path).await {
+ match json(path).await {
// Otherwise, grab the JSON output from the request
Ok(res) => {
// Parse the JSON into Post and Comment structs
let post = parse_post(&res[0]).await;
- let comments = parse_comments(&res[1], &post.permalink, &post.author.name, *highlighted_comment).await;
+ let comments = parse_comments(&res[1], &post.permalink, &post.author.name, highlighted_comment).await;
// Use the Post and Comment structs to generate a website to show users
template(PostTemplate {
diff --git a/src/proxy.rs b/src/proxy.rs
deleted file mode 100644
index 433f763..0000000
--- a/src/proxy.rs
+++ /dev/null
@@ -1,88 +0,0 @@
-use async_std::{io, net::TcpStream, prelude::*};
-use async_tls::TlsConnector;
-use tide::{http::url::Url, Request, Response};
-
-/// Handle tide routes to proxy by parsing `params` from `req`uest.
-pub async fn handler(req: Request<()>, format: &str, params: Vec<&str>) -> tide::Result {
- let mut url = format.to_string();
-
- for name in params {
- let param = req.param(name).unwrap_or_default();
- url = url.replacen("{}", param, 1);
- }
-
- request(url).await
-}
-
-/// Sends a request to a Reddit media domain and proxy the response.
-///
-/// Relays the `Content-Length` and `Content-Type` header.
-async fn request(url: String) -> tide::Result {
- // Parse url into parts
- let parts = Url::parse(&url)?;
- let host = parts.host().map(|host| host.to_string()).unwrap_or_default();
- let domain = parts.domain().unwrap_or_default();
- let path = format!("{}?{}", parts.path(), parts.query().unwrap_or_default());
- // Build reddit-compliant user agent for Libreddit
- let user_agent = format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"));
-
- // Construct a request body
- let req = format!(
- "GET {} HTTP/1.1\r\nHost: {}\r\nAccept: */*\r\nConnection: close\r\nUser-Agent: {}\r\n\r\n",
- path, host, user_agent
- );
-
- // Initialize TLS connector for requests
- let connector = TlsConnector::default();
-
- // Open a TCP connection
- let tcp_stream = TcpStream::connect(format!("{}:443", domain)).await?;
-
- // Use the connector to start the handshake process
- let mut tls_stream = connector.connect(domain, tcp_stream).await?;
-
- // Write the aforementioned HTTP request to the stream
- tls_stream.write_all(req.as_bytes()).await?;
-
- // And read the response
- let mut writer = Vec::new();
- io::copy(&mut tls_stream, &mut writer).await?;
-
- // Find the delimiter which separates the body and headers
- match (0..writer.len()).find(|i| writer[i.to_owned()] == 10_u8 && writer[i - 2] == 10_u8) {
- Some(delim) => {
- // Split the response into the body and headers
- let split = writer.split_at(delim);
- let headers_str = String::from_utf8_lossy(split.0);
- let headers = headers_str.split("\r\n").collect::>();
- let body = split.1[1..split.1.len()].to_vec();
-
- // Parse the status code from the first header line
- let status: u16 = headers[0].split(' ').collect::>()[1].parse().unwrap_or_default();
-
- // Define a closure for easier header fetching
- let header = |name: &str| {
- headers
- .iter()
- .find(|x| x.starts_with(name))
- .map(|f| f.split(": ").collect::>()[1])
- .unwrap_or_default()
- };
-
- // Parse Content-Length and Content-Type from headers
- let content_length = header("Content-Length");
- let content_type = header("Content-Type");
-
- // Build response
- Ok(
- Response::builder(status)
- .body(tide::http::Body::from_bytes(body))
- .header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
- .header("Content-Length", content_length)
- .header("Content-Type", content_type)
- .build(),
- )
- }
- None => Ok(Response::builder(503).body("Couldn't parse media".to_string()).build()),
- }
-}
diff --git a/src/search.rs b/src/search.rs
index 3970482..3fc9c66 100644
--- a/src/search.rs
+++ b/src/search.rs
@@ -1,7 +1,8 @@
// CRATES
-use crate::utils::{cookie, error, param, request, template, val, Post, Preferences};
+use crate::utils::{cookie, error, param, template, val, Post, Preferences};
+use crate::{client::json, RequestExt};
use askama::Template;
-use tide::Request;
+use hyper::{Body, Request, Response};
// STRUCTS
struct SearchParams {
@@ -32,10 +33,10 @@ struct SearchTemplate {
}
// SERVICES
-pub async fn find(req: Request<()>) -> tide::Result {
+pub async fn find(req: Request) -> Result, String> {
let nsfw_results = if cookie(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" };
- let path = format!("{}.json?{}{}", req.url().path(), req.url().query().unwrap_or_default(), nsfw_results);
- let sub = req.param("sub").unwrap_or("").to_string();
+ let path = format!("{}.json?{}{}", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
+ let sub = req.param("sub").unwrap_or_default();
let query = param(&path, "q");
let sort = if param(&path, "sort").is_empty() {
@@ -73,7 +74,7 @@ async fn search_subreddits(q: &str) -> Vec {
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit=3", q.replace(' ', "+"));
// Send a request to the url
- match request(subreddit_search_path).await {
+ match json(subreddit_search_path).await {
// If success, receive JSON in response
Ok(response) => {
match response["data"]["children"].as_array() {
diff --git a/src/server.rs b/src/server.rs
new file mode 100644
index 0000000..6d7ce8e
--- /dev/null
+++ b/src/server.rs
@@ -0,0 +1,207 @@
+use cookie::Cookie;
+use futures_lite::{future::Boxed, Future, FutureExt};
+use hyper::{
+ service::{make_service_fn, service_fn},
+ HeaderMap,
+};
+use hyper::{Body, Method, Request, Response, Server as HyperServer};
+use route_recognizer::{Params, Router};
+use std::{pin::Pin, result::Result};
+use time::Duration;
+
+type BoxResponse = Pin, String>> + Send>>;
+
+pub struct Route<'a> {
+ router: &'a mut Router) -> BoxResponse>,
+ path: String,
+}
+
+pub struct Server {
+ pub default_headers: HeaderMap,
+ router: Router) -> BoxResponse>,
+}
+
+#[macro_export]
+macro_rules! headers(
+ { $($key:expr => $value:expr),+ } => {
+ {
+ let mut m = hyper::HeaderMap::new();
+ $(
+ m.insert($key, hyper::header::HeaderValue::from_str($value).unwrap());
+ )+
+ m
+ }
+ };
+);
+
+pub trait RequestExt {
+ fn params(&self) -> Params;
+ fn param(&self, name: &str) -> Option;
+ fn set_params(&mut self, params: Params) -> Option;
+ fn cookies(&self) -> Vec;
+ fn cookie(&self, name: &str) -> Option;
+}
+
+pub trait ResponseExt {
+ fn cookies(&self) -> Vec;
+ fn insert_cookie(&mut self, cookie: Cookie);
+ fn remove_cookie(&mut self, name: String);
+}
+
+impl RequestExt for Request {
+ fn params(&self) -> Params {
+ self.extensions().get::().unwrap_or(&Params::new()).to_owned()
+ // self.extensions()
+ // .get::()
+ // .and_then(|meta| meta.route_params())
+ // .expect("Routerify: No RouteParams added while processing request")
+ }
+
+ fn param(&self, name: &str) -> Option {
+ self.params().find(name).map(|s| s.to_owned())
+ }
+
+ fn set_params(&mut self, params: Params) -> Option {
+ self.extensions_mut().insert(params)
+ }
+
+ fn cookies(&self) -> Vec {
+ let mut cookies = Vec::new();
+ if let Some(header) = self.headers().get("Cookie") {
+ for cookie in header.to_str().unwrap_or_default().split("; ") {
+ cookies.push(Cookie::parse(cookie).unwrap_or(Cookie::named("")));
+ }
+ }
+ cookies
+ }
+
+ fn cookie(&self, name: &str) -> Option {
+ self.cookies().iter().find(|c| c.name() == name).map(|c| c.to_owned())
+ }
+}
+
+impl ResponseExt for Response {
+ fn cookies(&self) -> Vec {
+ let mut cookies = Vec::new();
+ for header in self.headers().get_all("Cookie") {
+ if let Ok(cookie) = Cookie::parse(header.to_str().unwrap_or_default()) {
+ cookies.push(cookie);
+ }
+ }
+ cookies
+ }
+
+ fn insert_cookie(&mut self, cookie: Cookie) {
+ self.headers_mut().append("Set-Cookie", cookie.to_string().parse().unwrap());
+ }
+
+ fn remove_cookie(&mut self, name: String) {
+ let mut cookie = Cookie::named(name);
+ cookie.set_path("/");
+ cookie.set_max_age(Duration::second());
+ self.headers_mut().append("Set-Cookie", cookie.to_string().parse().unwrap());
+ }
+}
+
+impl Route<'_> {
+ fn method(&mut self, method: Method, dest: fn(Request) -> BoxResponse) -> &mut Self {
+ self.router.add(&format!("/{}{}", method.as_str(), self.path), dest);
+ self
+ }
+
+ /// Add an endpoint for `GET` requests
+ pub fn get(&mut self, dest: fn(Request) -> BoxResponse) -> &mut Self {
+ self.method(Method::GET, dest)
+ }
+
+ /// Add an endpoint for `POST` requests
+ pub fn post(&mut self, dest: fn(Request) -> BoxResponse) -> &mut Self {
+ self.method(Method::POST, dest)
+ }
+}
+
+impl Server {
+ pub fn new() -> Self {
+ Server {
+ default_headers: HeaderMap::new(),
+ router: Router::new(),
+ }
+ }
+
+ pub fn at(&mut self, path: &str) -> Route {
+ Route {
+ path: path.to_owned(),
+ router: &mut self.router,
+ }
+ }
+
+ pub fn listen(self, addr: String) -> Boxed> {
+ let make_svc = make_service_fn(move |_conn| {
+ let router = self.router.clone();
+ let default_headers = self.default_headers.clone();
+
+ // This is the `Service` that will handle the connection.
+ // `service_fn` is a helper to convert a function that
+ // returns a Response into a `Service`.
+ // let shared_router = router.clone();
+ async move {
+ Ok::<_, String>(service_fn(move |req: Request| {
+ let headers = default_headers.clone();
+
+ // Remove double slashes
+ let mut path = req.uri().path().replace("//", "/");
+
+ // Remove trailing slashes
+ if path.ends_with('/') && path != "/" {
+ path.pop();
+ }
+
+ // Match the visited path with an added route
+ match router.recognize(&format!("/{}{}", req.method().as_str(), path)) {
+ // If a route was configured for this path
+ Ok(found) => {
+ let mut parammed = req;
+ parammed.set_params(found.params().to_owned());
+
+ // Run the route's function
+ let yeet = (found.handler().to_owned().to_owned())(parammed);
+ async move {
+ let res: Result, String> = yeet.await;
+ // Add default headers to response
+ res.map(|mut response| {
+ response.headers_mut().extend(headers);
+ response
+ })
+ }
+ .boxed()
+ }
+ // If there was a routing error
+ Err(e) => async move {
+ // Return a 404 error
+ let res: Result, String> = Ok(Response::builder().status(404).body(e.into()).unwrap_or_default());
+ // Add default headers to response
+ res.map(|mut response| {
+ response.headers_mut().extend(headers);
+ response
+ })
+ }
+ .boxed(),
+ }
+ }))
+ }
+ });
+
+ let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {} as address (example format: 0.0.0.0:8080)", addr));
+
+ let server = HyperServer::bind(address).serve(make_svc);
+
+ let graceful = server.with_graceful_shutdown(shutdown_signal());
+
+ graceful.boxed()
+ }
+}
+
+async fn shutdown_signal() {
+ // Wait for the CTRL+C signal
+ tokio::signal::ctrl_c().await.expect("Failed to install CTRL+C signal handler");
+}
diff --git a/src/settings.rs b/src/settings.rs
index 03056e2..5318870 100644
--- a/src/settings.rs
+++ b/src/settings.rs
@@ -1,7 +1,12 @@
+use std::collections::HashMap;
+
// CRATES
+use crate::server::ResponseExt;
use crate::utils::{redirect, template, Preferences};
use askama::Template;
-use tide::{http::Cookie, Request};
+use cookie::Cookie;
+use futures_lite::StreamExt;
+use hyper::{Body, Request, Response};
use time::{Duration, OffsetDateTime};
// STRUCTS
@@ -11,7 +16,7 @@ struct SettingsTemplate {
prefs: Preferences,
}
-#[derive(serde::Deserialize, Default)]
+#[derive(serde::Deserialize, Default, Debug)]
#[serde(default)]
pub struct Form {
theme: Option,
@@ -27,29 +32,49 @@ pub struct Form {
// FUNCTIONS
// Retrieve cookies from request "Cookie" header
-pub async fn get(req: Request<()>) -> tide::Result {
+pub async fn get(req: Request) -> Result, String> {
template(SettingsTemplate { prefs: Preferences::new(req) })
}
// Set cookies using response "Set-Cookie" header
-pub async fn set(mut req: Request<()>) -> tide::Result {
- let form: Form = req.body_form().await.unwrap_or_default();
+pub async fn set(req: Request) -> Result, String> {
+ // Split the body into parts
+ let (parts, mut body) = req.into_parts();
+
+ // Grab existing cookies
+ let mut cookies = Vec::new();
+ for header in parts.headers.get_all("Cookie") {
+ if let Ok(cookie) = Cookie::parse(header.to_str().unwrap_or_default()) {
+ cookies.push(cookie);
+ }
+ }
+
+ // Aggregate the body...
+ // let whole_body = hyper::body::aggregate(req).await.map_err(|e| e.to_string())?;
+ let body_bytes = body
+ .try_fold(Vec::new(), |mut data, chunk| {
+ data.extend_from_slice(&chunk);
+ Ok(data)
+ })
+ .await
+ .map_err(|e| e.to_string())?;
+
+ let form = url::form_urlencoded::parse(&body_bytes).collect::>();
let mut res = redirect("/settings".to_string());
- let names = vec!["theme", "front_page", "layout", "wide", "comment_sort", "show_nsfw"];
- let values = vec![form.theme, form.front_page, form.layout, form.wide, form.comment_sort, form.show_nsfw];
+ let names = vec!["theme", "front_page", "layout", "wide", "comment_sort", "show_nsfw", "subscriptions"];
- for (i, name) in names.iter().enumerate() {
- match values.get(i) {
+ for name in names {
+ match form.get(name) {
Some(value) => res.insert_cookie(
- Cookie::build(name.to_owned(), value.to_owned().unwrap_or_default())
+ Cookie::build(name.to_owned(), value.to_owned())
.path("/")
.http_only(true)
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
.finish(),
),
- None => res.remove_cookie(Cookie::named(name.to_owned())),
+ None => res.remove_cookie(name.to_string()),
};
}
@@ -57,29 +82,41 @@ pub async fn set(mut req: Request<()>) -> tide::Result {
}
// Set cookies using response "Set-Cookie" header
-pub async fn restore(req: Request<()>) -> tide::Result {
- let form: Form = req.query()?;
+pub async fn restore(req: Request) -> Result, String> {
+ // Split the body into parts
+ let (parts, _) = req.into_parts();
- let path = match form.redirect {
+ // Grab existing cookies
+ let mut cookies = Vec::new();
+ for header in parts.headers.get_all("Cookie") {
+ if let Ok(cookie) = Cookie::parse(header.to_str().unwrap_or_default()) {
+ cookies.push(cookie);
+ }
+ }
+
+ let query = parts.uri.query().unwrap_or_default().as_bytes();
+
+ let form = url::form_urlencoded::parse(query).collect::>();
+
+ let names = vec!["theme", "front_page", "layout", "wide", "comment_sort", "show_nsfw", "subscriptions"];
+
+ let path = match form.get("redirect") {
Some(value) => format!("/{}/", value),
None => "/".to_string(),
};
let mut res = redirect(path);
- let names = vec!["theme", "front_page", "layout", "wide", "comment_sort", "show_nsfw", "subscriptions"];
- let values = vec![form.theme, form.front_page, form.layout, form.wide, form.comment_sort, form.show_nsfw, form.subscriptions];
-
- for (i, name) in names.iter().enumerate() {
- match values.get(i) {
+ for name in names {
+ match form.get(name) {
Some(value) => res.insert_cookie(
- Cookie::build(name.to_owned(), value.to_owned().unwrap_or_default())
+ Cookie::build(name.to_owned(), value.to_owned())
.path("/")
.http_only(true)
.expires(OffsetDateTime::now_utc() + Duration::weeks(52))
.finish(),
),
- None => res.remove_cookie(Cookie::named(name.to_owned())),
+ None => res.remove_cookie(name.to_string()),
};
}
diff --git a/src/subreddit.rs b/src/subreddit.rs
index fe464bd..79d3716 100644
--- a/src/subreddit.rs
+++ b/src/subreddit.rs
@@ -1,8 +1,10 @@
// CRATES
use crate::esc;
-use crate::utils::{cookie, error, format_num, format_url, param, redirect, request, rewrite_urls, template, val, Post, Preferences, Subreddit};
+use crate::utils::{cookie, error, format_num, format_url, param, redirect, rewrite_urls, template, val, Post, Preferences, Subreddit};
+use crate::{client::json, server::ResponseExt, RequestExt};
use askama::Template;
-use tide::{http::Cookie, Request};
+use cookie::Cookie;
+use hyper::{Body, Request, Response};
use time::{Duration, OffsetDateTime};
// STRUCTS
@@ -26,11 +28,11 @@ struct WikiTemplate {
}
// SERVICES
-pub async fn community(req: Request<()>) -> tide::Result {
+pub async fn community(req: Request) -> Result, String> {
// Build Reddit API path
let subscribed = cookie(&req, "subscriptions");
let front_page = cookie(&req, "front_page");
- let sort = req.param("sort").unwrap_or_else(|_| req.param("id").unwrap_or("hot")).to_string();
+ let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or("hot".to_string()));
let sub = req.param("sub").map(String::from).unwrap_or(if front_page == "default" || front_page.is_empty() {
if subscribed.is_empty() {
@@ -42,7 +44,7 @@ pub async fn community(req: Request<()>) -> tide::Result {
front_page.to_owned()
});
- let path = format!("/r/{}/{}.json?{}&raw_json=1", sub, sort, req.url().query().unwrap_or_default());
+ let path = format!("/r/{}/{}.json?{}&raw_json=1", sub, sort, req.uri().query().unwrap_or_default());
match Post::fetch(&path, String::new()).await {
Ok((posts, after)) => {
@@ -52,7 +54,7 @@ pub async fn community(req: Request<()>) -> tide::Result {
subreddit(&sub).await.unwrap_or_default()
} else if sub == subscribed {
// Subscription feed
- if req.url().path().starts_with("/r/") {
+ if req.uri().path().starts_with("/r/") {
subreddit(&sub).await.unwrap_or_default()
} else {
Subreddit::default()
@@ -85,10 +87,10 @@ pub async fn community(req: Request<()>) -> tide::Result {
}
// Sub or unsub by setting subscription cookie using response "Set-Cookie" header
-pub async fn subscriptions(req: Request<()>) -> tide::Result {
+pub async fn subscriptions(req: Request) -> Result, String> {
let sub = req.param("sub").unwrap_or_default().to_string();
- let query = req.url().query().unwrap_or_default().to_string();
- let action: Vec = req.url().path().split('/').map(String::from).collect();
+ let query = req.uri().query().unwrap_or_default().to_string();
+ let action: Vec = req.uri().path().split('/').map(String::from).collect();
let mut sub_list = Preferences::new(req).subscriptions;
@@ -119,8 +121,7 @@ pub async fn subscriptions(req: Request<()>) -> tide::Result {
// Delete cookie if empty, else set
if sub_list.is_empty() {
- // res.del_cookie(&Cookie::build("subscriptions", "").path("/").finish());
- res.remove_cookie(Cookie::build("subscriptions", "").path("/").finish());
+ res.remove_cookie("subscriptions".to_string());
} else {
res.insert_cookie(
Cookie::build("subscriptions", sub_list.join("+"))
@@ -134,12 +135,12 @@ pub async fn subscriptions(req: Request<()>) -> tide::Result {
Ok(res)
}
-pub async fn wiki(req: Request<()>) -> tide::Result {
- let sub = req.param("sub").unwrap_or("reddit.com").to_string();
- let page = req.param("page").unwrap_or("index").to_string();
+pub async fn wiki(req: Request) -> Result, String> {
+ let sub = req.param("sub").unwrap_or("reddit.com".to_string());
+ let page = req.param("page").unwrap_or("index".to_string());
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
- match request(path).await {
+ match json(path).await {
Ok(response) => template(WikiTemplate {
sub,
wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or_default()),
@@ -156,7 +157,7 @@ async fn subreddit(sub: &str) -> Result {
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
// Send a request to the url
- match request(path).await {
+ match json(path).await {
// If success, receive JSON in response
Ok(res) => {
// Metadata regarding the subreddit
diff --git a/src/user.rs b/src/user.rs
index 47fa0c3..4b2633a 100644
--- a/src/user.rs
+++ b/src/user.rs
@@ -1,8 +1,10 @@
// CRATES
+use crate::client::json;
use crate::esc;
-use crate::utils::{error, format_url, param, request, template, Post, Preferences, User};
+use crate::server::RequestExt;
+use crate::utils::{error, format_url, param, template, Post, Preferences, User};
use askama::Template;
-use tide::Request;
+use hyper::{Body, Request, Response};
use time::OffsetDateTime;
// STRUCTS
@@ -17,13 +19,13 @@ struct UserTemplate {
}
// FUNCTIONS
-pub async fn profile(req: Request<()>) -> tide::Result {
+pub async fn profile(req: Request) -> Result, String> {
// Build the Reddit JSON API path
- let path = format!("{}.json?{}&raw_json=1", req.url().path(), req.url().query().unwrap_or_default());
+ let path = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default());
// Retrieve other variables from Libreddit request
let sort = param(&path, "sort");
- let username = req.param("name").unwrap_or("").to_string();
+ let username = req.param("name").unwrap_or_default();
// Request user posts/comments from Reddit
let posts = Post::fetch(&path, "Comment".to_string()).await;
@@ -52,7 +54,7 @@ async fn user(name: &str) -> Result {
let path: String = format!("/user/{}/about.json?raw_json=1", name);
// Send a request to the url
- match request(path).await {
+ match json(path).await {
// If success, receive JSON in response
Ok(res) => {
// Grab creation date as unix timestamp
diff --git a/src/utils.rs b/src/utils.rs
index 42ef14b..76250c4 100644
--- a/src/utils.rs
+++ b/src/utils.rs
@@ -1,17 +1,15 @@
//
// CRATES
//
-use crate::esc;
+use crate::{client::json, esc, server::RequestExt};
use askama::Template;
-use async_recursion::async_recursion;
-use async_std::{io, net::TcpStream, prelude::*};
-use async_tls::TlsConnector;
-use cached::proc_macro::cached;
+use cookie::Cookie;
+use hyper::{Body, Request, Response};
use regex::Regex;
-use serde_json::{from_str, Error, Value};
+use serde_json::Value;
use std::collections::HashMap;
-use tide::{http::url::Url, http::Cookie, Request, Response};
use time::{Duration, OffsetDateTime};
+use url::Url;
// Post flair with content, background color and foreground color
pub struct Flair {
@@ -204,7 +202,7 @@ impl Post {
let post_list;
// Send a request to the url
- match request(path.to_string()).await {
+ match json(path.to_string()).await {
// If success, receive JSON in response
Ok(response) => {
res = response;
@@ -372,7 +370,7 @@ pub struct Preferences {
impl Preferences {
// Build preferences from cookies
- pub fn new(req: Request<()>) -> Self {
+ pub fn new(req: Request) -> Self {
Self {
theme: cookie(&req, "theme"),
front_page: cookie(&req, "front_page"),
@@ -398,7 +396,7 @@ pub fn param(path: &str, value: &str) -> String {
}
// Parse a cookie value from request
-pub fn cookie(req: &Request<()>, name: &str) -> String {
+pub fn cookie(req: &Request, name: &str) -> String {
let cookie = req.cookie(name).unwrap_or_else(|| Cookie::named(name));
cookie.value().to_string()
}
@@ -512,19 +510,26 @@ macro_rules! esc {
// NETWORKING
//
-pub fn template(t: impl Template) -> tide::Result {
- Ok(Response::builder(200).content_type("text/html").body(t.render().unwrap_or_default()).build())
+pub fn template(t: impl Template) -> Result, String> {
+ Ok(
+ Response::builder()
+ .status(200)
+ .header("content-type", "text/html")
+ .body(t.render().unwrap_or_default().into())
+ .unwrap_or_default(),
+ )
}
-pub fn redirect(path: String) -> Response {
- Response::builder(302)
- .content_type("text/html")
+pub fn redirect(path: String) -> Response {
+ Response::builder()
+ .status(302)
+ .header("content-type", "text/html")
.header("Location", &path)
- .body(format!("Redirecting to {0}...", path))
- .build()
+ .body(format!("Redirecting to {0}...", path).into())
+ .unwrap_or_default()
}
-pub async fn error(req: Request<()>, msg: String) -> tide::Result {
+pub async fn error(req: Request, msg: String) -> Result, String> {
let body = ErrorTemplate {
msg,
prefs: Preferences::new(req),
@@ -532,93 +537,51 @@ pub async fn error(req: Request<()>, msg: String) -> tide::Result {
.render()
.unwrap_or_default();
- Ok(Response::builder(404).content_type("text/html").body(body).build())
+ Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
}
-#[async_recursion]
-async fn connect(path: String) -> io::Result {
- // Build reddit-compliant user agent for Libreddit
- let user_agent = format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"));
+// #[async_recursion]
+// async fn connect(path: String) -> io::Result {
- // Construct an HTTP request body
- let req = format!(
- "GET {} HTTP/1.1\r\nHost: www.reddit.com\r\nAccept: */*\r\nConnection: close\r\nUser-Agent: {}\r\n\r\n",
- path, user_agent
- );
+// // Construct an HTTP request body
+// let req = format!(
+// "GET {} HTTP/1.1\r\nHost: www.reddit.com\r\nAccept: */*\r\nConnection: close\r\nUser-Agent: {}\r\n\r\n",
+// path, user_agent
+// );
- // Open a TCP connection
- let tcp_stream = TcpStream::connect("www.reddit.com:443").await?;
+// // Open a TCP connection
+// let tcp_stream = TcpStream::connect("www.reddit.com:443").await?;
- // Initialize TLS connector for requests
- let connector = TlsConnector::default();
+// // Initialize TLS connector for requests
+// let connector = TlsConnector::default();
- // Use the connector to start the handshake process
- let mut tls_stream = connector.connect("www.reddit.com", tcp_stream).await?;
+// // Use the connector to start the handshake process
+// let mut tls_stream = connector.connect("www.reddit.com", tcp_stream).await?;
- // Write the crafted HTTP request to the stream
- tls_stream.write_all(req.as_bytes()).await?;
+// // Write the crafted HTTP request to the stream
+// tls_stream.write_all(req.as_bytes()).await?;
- // And read the response
- let mut writer = Vec::new();
- io::copy(&mut tls_stream, &mut writer).await?;
- let response = String::from_utf8_lossy(&writer).to_string();
+// // And read the response
+// let mut writer = Vec::new();
+// io::copy(&mut tls_stream, &mut writer).await?;
+// let response = String::from_utf8_lossy(&writer).to_string();
- let split = response.split("\r\n\r\n").collect::>();
+// let split = response.split("\r\n\r\n").collect::>();
- let headers = split[0].split("\r\n").collect::>();
- let status: i16 = headers[0].split(' ').collect::>()[1].parse().unwrap_or(200);
- let body = split[1].to_string();
+// let headers = split[0].split("\r\n").collect::>();
+// let status: i16 = headers[0].split(' ').collect::>()[1].parse().unwrap_or(200);
+// let body = split[1].to_string();
- if (300..400).contains(&status) {
- let location = headers
- .iter()
- .find(|header| header.starts_with("location:"))
- .map(|f| f.to_owned())
- .unwrap_or_default()
- .split(": ")
- .collect::>()[1];
- connect(location.replace("https://www.reddit.com", "")).await
- } else {
- Ok(body)
- }
-}
-
-// Make a request to a Reddit API and parse the JSON response
-#[cached(size = 100, time = 30, result = true)]
-pub async fn request(path: String) -> Result {
- let url = format!("https://www.reddit.com{}", path);
-
- let err = |msg: &str, e: String| -> Result {
- eprintln!("{} - {}: {}", url, msg, e);
- Err(msg.to_string())
- };
-
- match connect(path).await {
- Ok(body) => {
- // Parse the response from Reddit as JSON
- let parsed: Result = from_str(&body);
- match parsed {
- Ok(json) => {
- // If Reddit returned an error
- if json["error"].is_i64() {
- Err(
- json["reason"]
- .as_str()
- .unwrap_or_else(|| {
- json["message"].as_str().unwrap_or_else(|| {
- eprintln!("{} - Error parsing reddit error", url);
- "Error parsing reddit error"
- })
- })
- .to_string(),
- )
- } else {
- Ok(json)
- }
- }
- Err(e) => err("Failed to parse page JSON data", e.to_string()),
- }
- }
- Err(e) => err("Couldn't send request to Reddit", e.to_string()),
- }
-}
+// if (300..400).contains(&status) {
+// let location = headers
+// .iter()
+// .find(|header| header.starts_with("location:"))
+// .map(|f| f.to_owned())
+// .unwrap_or_default()
+// .split(": ")
+// .collect::>()[1];
+// connect(location.replace("https://www.reddit.com", "")).await
+// } else {
+// Ok(body)
+// }
+// }