Compare commits
18 Commits
Author | SHA1 | Date | |
---|---|---|---|
f0b69f8a4a | |||
118ff9485c | |||
4a51b7cfb0 | |||
f877face80 | |||
f0e8deb000 | |||
e70dfe2c0b | |||
2e89a85858 | |||
e59b2b1346 | |||
1c36549134 | |||
5fb88d4744 | |||
6c7188a1b9 | |||
84009fbb8e | |||
bf783c2f3a | |||
213babb057 | |||
7dbc02d930 | |||
10873dd0c6 | |||
c0d1519341 | |||
8709c49f39 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -1 +1,2 @@
|
|||||||
/target
|
/target
|
||||||
|
Cargo.lock
|
2184
Cargo.lock
generated
2184
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
16
Cargo.toml
16
Cargo.toml
@ -3,19 +3,19 @@ name = "libreddit"
|
|||||||
description = " Alternative private front-end to Reddit"
|
description = " Alternative private front-end to Reddit"
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
repository = "https://github.com/spikecodes/libreddit"
|
repository = "https://github.com/spikecodes/libreddit"
|
||||||
version = "0.3.1"
|
version = "0.4.0"
|
||||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tide = { version = "0.16.0", default-features = false, features = ["h1-server", "cookies"] }
|
|
||||||
async-std = { version = "1.9.0", features = ["attributes"] }
|
|
||||||
surf = { version = "2.2.0", default-features = false, features = ["curl-client", "encoding"] }
|
|
||||||
cached = "0.23.0"
|
|
||||||
askama = { version = "0.10.5", default-features = false }
|
askama = { version = "0.10.5", default-features = false }
|
||||||
serde = { version = "1.0.123", features = ["derive"] }
|
|
||||||
serde_json = "1.0.64"
|
|
||||||
async-recursion = "0.3.2"
|
async-recursion = "0.3.2"
|
||||||
regex = "1.4.3"
|
async-std = { version = "1.9.0", features = ["attributes"] }
|
||||||
|
async-tls = { version = "0.11.0", default-features = false, features = ["client"] }
|
||||||
|
cached = "0.23.0"
|
||||||
clap = { version = "2.33.3", default-features = false }
|
clap = { version = "2.33.3", default-features = false }
|
||||||
|
regex = "1.4.3"
|
||||||
|
serde = { version = "1.0.124", features = ["derive"] }
|
||||||
|
serde_json = "1.0.64"
|
||||||
|
tide = { version = "0.16.0", default-features = false, features = ["h1-server", "cookies"] }
|
||||||
time = "0.2.25"
|
time = "0.2.25"
|
||||||
|
16
README.md
16
README.md
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
> An alternative private front-end to Reddit
|
> An alternative private front-end to Reddit
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -43,13 +43,13 @@ Feel free to [open an issue](https://github.com/spikecodes/libreddit/issues/new)
|
|||||||
|-|-|-|
|
|-|-|-|
|
||||||
| [libredd.it](https://libredd.it) (official) | 🇺🇸 US | |
|
| [libredd.it](https://libredd.it) (official) | 🇺🇸 US | |
|
||||||
| [libreddit.spike.codes](https://libreddit.spike.codes) (official) | 🇺🇸 US | |
|
| [libreddit.spike.codes](https://libreddit.spike.codes) (official) | 🇺🇸 US | |
|
||||||
| [libreddit.dothq.co](https://libreddit.dothq.co) | 🇺🇸 US | ✅ |
|
| [libreddit.dothq.co](https://libreddit.dothq.co) | 🇺🇸 US | |
|
||||||
| [libreddit.kavin.rocks](https://libreddit.kavin.rocks) | 🇮🇳 IN | ✅ |
|
| [libreddit.kavin.rocks](https://libreddit.kavin.rocks) | 🇮🇳 IN | ✅ |
|
||||||
| [libreddit.himiko.cloud](https://libreddit.himiko.cloud) | 🇧🇬 BG | |
|
| [libreddit.himiko.cloud](https://libreddit.himiko.cloud) | 🇫🇮 FI | |
|
||||||
| [libreddit.bcow.xyz](https://libreddit.bcow.xyz) | 🇺🇸 US | |
|
| [libreddit.bcow.xyz](https://libreddit.bcow.xyz) | 🇺🇸 US | |
|
||||||
| [spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion](http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion) | 🇮🇳 IN | |
|
| [spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion](http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion) | 🇮🇳 IN | |
|
||||||
| [fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion](http://fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion) | 🇩🇪 DE | |
|
| [fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion](http://fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion) | 🇩🇪 DE | |
|
||||||
| [libreddit.himiko7xl2skojc6odi7hykl626gt4qki3vxdbv33u2u3af76d6k32ad.onion](http://libreddit.himiko7xl2skojc6odi7hykl626gt4qki3vxdbv33u2u3af76d6k32ad.onion) | 🇧🇬 BG | |
|
| [libreddit.himiko7xl2skojc6odi7hykl626gt4qki3vxdbv33u2u3af76d6k32ad.onion](http://libreddit.himiko7xl2skojc6odi7hykl626gt4qki3vxdbv33u2u3af76d6k32ad.onion) | 🇫🇮 FI | |
|
||||||
| [dflv6yjt7il3n3tggf4qhcmkzbti2ppytqx3o7pjrzwgntutpewscyid.onion](http://dflv6yjt7il3n3tggf4qhcmkzbti2ppytqx3o7pjrzwgntutpewscyid.onion/) | 🇺🇸 US | |
|
| [dflv6yjt7il3n3tggf4qhcmkzbti2ppytqx3o7pjrzwgntutpewscyid.onion](http://dflv6yjt7il3n3tggf4qhcmkzbti2ppytqx3o7pjrzwgntutpewscyid.onion/) | 🇺🇸 US | |
|
||||||
|
|
||||||
A checkmark in the "Cloudflare" category here refers to the use of the reverse proxy, [Cloudflare](https://cloudflare). The checkmark will not be listed for a site which uses Cloudflare DNS but rather the proxying service which grants Cloudflare the ability to monitor traffic to the website.
|
A checkmark in the "Cloudflare" category here refers to the use of the reverse proxy, [Cloudflare](https://cloudflare). The checkmark will not be listed for a site which uses Cloudflare DNS but rather the proxying service which grants Cloudflare the ability to monitor traffic to the website.
|
||||||
@ -197,6 +197,14 @@ Once installed, deploy Libreddit to `0.0.0.0:8080` by running:
|
|||||||
libreddit
|
libreddit
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Proxying using NGINX
|
||||||
|
|
||||||
|
**NOTE** If you're [proxying Libreddit through a NGINX Reverse Proxy](https://github.com/spikecodes/libreddit/issues/122#issuecomment-782226853), add
|
||||||
|
```nginx
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
```
|
||||||
|
to your NGINX configuration file above your `proxy_pass` line.
|
||||||
|
|
||||||
## Building
|
## Building
|
||||||
|
|
||||||
```
|
```
|
||||||
|
20
src/main.rs
20
src/main.rs
@ -1,3 +1,14 @@
|
|||||||
|
// Global specifiers
|
||||||
|
#![forbid(unsafe_code)]
|
||||||
|
#![warn(clippy::pedantic, clippy::all)]
|
||||||
|
#![allow(
|
||||||
|
clippy::needless_pass_by_value,
|
||||||
|
clippy::match_wildcard_for_single_variants,
|
||||||
|
clippy::cast_possible_truncation,
|
||||||
|
clippy::similar_names,
|
||||||
|
clippy::cast_possible_wrap
|
||||||
|
)]
|
||||||
|
|
||||||
// Reference local files
|
// Reference local files
|
||||||
mod post;
|
mod post;
|
||||||
mod proxy;
|
mod proxy;
|
||||||
@ -48,10 +59,10 @@ impl<State: Clone + Send + Sync + 'static> Middleware<State> for NormalizePath {
|
|||||||
if path.ends_with('/') {
|
if path.ends_with('/') {
|
||||||
Ok(next.run(request).await)
|
Ok(next.run(request).await)
|
||||||
} else {
|
} else {
|
||||||
let normalized = if query != "" {
|
let normalized = if query.is_empty() {
|
||||||
format!("{}/?{}", path.replace("//", "/"), query)
|
|
||||||
} else {
|
|
||||||
format!("{}/", path.replace("//", "/"))
|
format!("{}/", path.replace("//", "/"))
|
||||||
|
} else {
|
||||||
|
format!("{}/?{}", path.replace("//", "/"), query)
|
||||||
};
|
};
|
||||||
Ok(redirect(normalized))
|
Ok(redirect(normalized))
|
||||||
}
|
}
|
||||||
@ -226,6 +237,9 @@ async fn main() -> tide::Result<()> {
|
|||||||
|
|
||||||
app.at("/r/:sub/:sort/").get(subreddit::page);
|
app.at("/r/:sub/:sort/").get(subreddit::page);
|
||||||
|
|
||||||
|
// Comments handler
|
||||||
|
app.at("/comments/:id/").get(post::item);
|
||||||
|
|
||||||
// Front page
|
// Front page
|
||||||
app.at("/").get(subreddit::page);
|
app.at("/").get(subreddit::page);
|
||||||
|
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
use crate::utils::*;
|
use crate::utils::{
|
||||||
|
cookie, error, format_num, format_url, param, request, rewrite_urls, template, time, val, Author, Comment, Flags, Flair, FlairPart, Media, Post, Preferences,
|
||||||
|
};
|
||||||
use tide::Request;
|
use tide::Request;
|
||||||
|
|
||||||
use async_recursion::async_recursion;
|
use async_recursion::async_recursion;
|
||||||
@ -196,7 +198,7 @@ async fn parse_comments(json: &serde_json::Value, post_link: &str, post_author:
|
|||||||
distinguished: val(&comment, "distinguished"),
|
distinguished: val(&comment, "distinguished"),
|
||||||
},
|
},
|
||||||
score: if data["score_hidden"].as_bool().unwrap_or_default() {
|
score: if data["score_hidden"].as_bool().unwrap_or_default() {
|
||||||
"•".to_string()
|
"\u{2022}".to_string()
|
||||||
} else {
|
} else {
|
||||||
format_num(score)
|
format_num(score)
|
||||||
},
|
},
|
||||||
|
74
src/proxy.rs
74
src/proxy.rs
@ -1,6 +1,8 @@
|
|||||||
use surf::Body;
|
use async_std::{io, net::TcpStream, prelude::*};
|
||||||
use tide::{Request, Response};
|
use async_tls::TlsConnector;
|
||||||
|
use tide::{http::url::Url, Request, Response};
|
||||||
|
|
||||||
|
/// Handle tide routes to proxy by parsing `params` from `req`uest.
|
||||||
pub async fn handler(req: Request<()>, format: &str, params: Vec<&str>) -> tide::Result {
|
pub async fn handler(req: Request<()>, format: &str, params: Vec<&str>) -> tide::Result {
|
||||||
let mut url = format.to_string();
|
let mut url = format.to_string();
|
||||||
|
|
||||||
@ -12,21 +14,75 @@ pub async fn handler(req: Request<()>, format: &str, params: Vec<&str>) -> tide:
|
|||||||
request(url).await
|
request(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sends a request to a Reddit media domain and proxy the response.
|
||||||
|
///
|
||||||
|
/// Relays the `Content-Length` and `Content-Type` header.
|
||||||
async fn request(url: String) -> tide::Result {
|
async fn request(url: String) -> tide::Result {
|
||||||
match surf::get(url).await {
|
// Parse url into parts
|
||||||
Ok(res) => {
|
let parts = Url::parse(&url).unwrap();
|
||||||
let content_length = res.header("Content-Length").map(|v| v.to_string()).unwrap_or_default();
|
let host = parts.host().unwrap().to_string();
|
||||||
let content_type = res.content_type().map(|m| m.to_string()).unwrap_or_default();
|
let domain = parts.domain().unwrap_or_default();
|
||||||
|
let path = format!("{}?{}", parts.path(), parts.query().unwrap_or_default());
|
||||||
|
// Build reddit-compliant user agent for Libreddit
|
||||||
|
let user_agent = format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"));
|
||||||
|
|
||||||
|
// Construct a request body
|
||||||
|
let req = format!(
|
||||||
|
"GET {} HTTP/1.1\r\nHost: {}\r\nAccept: */*\r\nConnection: close\r\nUser-Agent: {}\r\n\r\n",
|
||||||
|
path, host, user_agent
|
||||||
|
);
|
||||||
|
|
||||||
|
// Initialize TLS connector for requests
|
||||||
|
let connector = TlsConnector::default();
|
||||||
|
|
||||||
|
// Open a TCP connection
|
||||||
|
let tcp_stream = TcpStream::connect(format!("{}:443", domain)).await.unwrap();
|
||||||
|
|
||||||
|
// Use the connector to start the handshake process
|
||||||
|
let mut tls_stream = connector.connect(domain, tcp_stream).await.unwrap();
|
||||||
|
|
||||||
|
// Write the aforementioned HTTP request to the stream
|
||||||
|
tls_stream.write_all(req.as_bytes()).await.unwrap();
|
||||||
|
|
||||||
|
// And read the response
|
||||||
|
let mut writer = Vec::new();
|
||||||
|
io::copy(&mut tls_stream, &mut writer).await.unwrap();
|
||||||
|
|
||||||
|
// Find the delimiter which separates the body and headers
|
||||||
|
match (0..writer.len()).find(|i| writer[i.to_owned()] == 10_u8 && writer[i - 2] == 10_u8) {
|
||||||
|
Some(delim) => {
|
||||||
|
// Split the response into the body and headers
|
||||||
|
let split = writer.split_at(delim);
|
||||||
|
let headers_str = String::from_utf8_lossy(split.0);
|
||||||
|
let headers = headers_str.split("\r\n").collect::<Vec<&str>>();
|
||||||
|
let body = split.1[1..split.1.len()].to_vec();
|
||||||
|
|
||||||
|
// Parse the status code from the first header line
|
||||||
|
let status: u16 = headers[0].split(' ').collect::<Vec<&str>>()[1].parse().unwrap_or_default();
|
||||||
|
|
||||||
|
// Define a closure for easier header fetching
|
||||||
|
let header = |name: &str| {
|
||||||
|
headers
|
||||||
|
.iter()
|
||||||
|
.find(|x| x.starts_with(name))
|
||||||
|
.map(|f| f.split(": ").collect::<Vec<&str>>()[1])
|
||||||
|
.unwrap_or_default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parse Content-Length and Content-Type from headers
|
||||||
|
let content_length = header("Content-Length");
|
||||||
|
let content_type = header("Content-Type");
|
||||||
|
|
||||||
|
// Build response
|
||||||
Ok(
|
Ok(
|
||||||
Response::builder(res.status())
|
Response::builder(status)
|
||||||
.body(Body::from_reader(res, None))
|
.body(tide::http::Body::from_bytes(body))
|
||||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||||
.header("Content-Length", content_length)
|
.header("Content-Length", content_length)
|
||||||
.header("Content-Type", content_type)
|
.header("Content-Type", content_type)
|
||||||
.build(),
|
.build(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
Err(e) => Ok(Response::builder(503).body(e.to_string()).build()),
|
None => Ok(Response::builder(503).body("Couldn't parse media".to_string()).build()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -84,7 +84,7 @@ async fn search_subreddits(q: &str) -> Vec<Subreddit> {
|
|||||||
name: val(subreddit, "display_name_prefixed"),
|
name: val(subreddit, "display_name_prefixed"),
|
||||||
url: val(subreddit, "url"),
|
url: val(subreddit, "url"),
|
||||||
description: val(subreddit, "public_description"),
|
description: val(subreddit, "public_description"),
|
||||||
subscribers: subreddit["data"]["subscribers"].as_u64().unwrap_or_default() as i64,
|
subscribers: subreddit["data"]["subscribers"].as_f64().unwrap_or_default() as i64,
|
||||||
})
|
})
|
||||||
.collect::<Vec<Subreddit>>(),
|
.collect::<Vec<Subreddit>>(),
|
||||||
_ => Vec::new(),
|
_ => Vec::new(),
|
||||||
|
@ -13,7 +13,7 @@ struct SettingsTemplate {
|
|||||||
|
|
||||||
#[derive(serde::Deserialize, Default)]
|
#[derive(serde::Deserialize, Default)]
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub struct SettingsForm {
|
pub struct Form {
|
||||||
theme: Option<String>,
|
theme: Option<String>,
|
||||||
front_page: Option<String>,
|
front_page: Option<String>,
|
||||||
layout: Option<String>,
|
layout: Option<String>,
|
||||||
@ -33,7 +33,7 @@ pub async fn get(req: Request<()>) -> tide::Result {
|
|||||||
|
|
||||||
// Set cookies using response "Set-Cookie" header
|
// Set cookies using response "Set-Cookie" header
|
||||||
pub async fn set(mut req: Request<()>) -> tide::Result {
|
pub async fn set(mut req: Request<()>) -> tide::Result {
|
||||||
let form: SettingsForm = req.body_form().await.unwrap_or_default();
|
let form: Form = req.body_form().await.unwrap_or_default();
|
||||||
|
|
||||||
let mut res = redirect("/settings".to_string());
|
let mut res = redirect("/settings".to_string());
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ pub async fn set(mut req: Request<()>) -> tide::Result {
|
|||||||
|
|
||||||
// Set cookies using response "Set-Cookie" header
|
// Set cookies using response "Set-Cookie" header
|
||||||
pub async fn restore(req: Request<()>) -> tide::Result {
|
pub async fn restore(req: Request<()>) -> tide::Result {
|
||||||
let form: SettingsForm = req.query()?;
|
let form: Form = req.query()?;
|
||||||
|
|
||||||
let path = match form.redirect {
|
let path = match form.redirect {
|
||||||
Some(value) => format!("/{}/", value),
|
Some(value) => format!("/{}/", value),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
use crate::utils::*;
|
use crate::utils::{cookie, error, format_num, format_url, param, redirect, request, rewrite_urls, template, val, Post, Preferences, Subreddit};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use tide::{http::Cookie, Request};
|
use tide::{http::Cookie, Request};
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
@ -108,10 +108,10 @@ pub async fn subscriptions(req: Request<()>) -> tide::Result {
|
|||||||
// Redirect back to subreddit
|
// Redirect back to subreddit
|
||||||
// check for redirect parameter if unsubscribing from outside sidebar
|
// check for redirect parameter if unsubscribing from outside sidebar
|
||||||
let redirect_path = param(&format!("/?{}", query), "redirect");
|
let redirect_path = param(&format!("/?{}", query), "redirect");
|
||||||
let path = if !redirect_path.is_empty() {
|
let path = if redirect_path.is_empty() {
|
||||||
format!("/{}/", redirect_path)
|
|
||||||
} else {
|
|
||||||
format!("/r/{}", sub)
|
format!("/r/{}", sub)
|
||||||
|
} else {
|
||||||
|
format!("/{}/", redirect_path)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut res = redirect(path);
|
let mut res = redirect(path);
|
||||||
@ -139,9 +139,9 @@ pub async fn wiki(req: Request<()>) -> tide::Result {
|
|||||||
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
|
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
|
||||||
|
|
||||||
match request(path).await {
|
match request(path).await {
|
||||||
Ok(res) => template(WikiTemplate {
|
Ok(response) => template(WikiTemplate {
|
||||||
sub,
|
sub,
|
||||||
wiki: rewrite_urls(res["data"]["content_html"].as_str().unwrap_or_default()),
|
wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or_default()),
|
||||||
page,
|
page,
|
||||||
prefs: Preferences::new(req),
|
prefs: Preferences::new(req),
|
||||||
}),
|
}),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
use crate::utils::*;
|
use crate::utils::{error, format_url, param, request, template, Post, Preferences, User};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use tide::Request;
|
use tide::Request;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
115
src/utils.rs
115
src/utils.rs
@ -2,6 +2,9 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
//
|
//
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
|
use async_recursion::async_recursion;
|
||||||
|
use async_std::{io, net::TcpStream, prelude::*};
|
||||||
|
use async_tls::TlsConnector;
|
||||||
use cached::proc_macro::cached;
|
use cached::proc_macro::cached;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde_json::{from_str, Error, Value};
|
use serde_json::{from_str, Error, Value};
|
||||||
@ -123,7 +126,7 @@ impl Media {
|
|||||||
let url = if post_type == "self" || post_type == "link" {
|
let url = if post_type == "self" || post_type == "link" {
|
||||||
url_val.as_str().unwrap_or_default().to_string()
|
url_val.as_str().unwrap_or_default().to_string()
|
||||||
} else {
|
} else {
|
||||||
format_url(url_val.as_str().unwrap_or_default()).to_string()
|
format_url(url_val.as_str().unwrap_or_default())
|
||||||
};
|
};
|
||||||
|
|
||||||
(
|
(
|
||||||
@ -249,7 +252,7 @@ impl Post {
|
|||||||
distinguished: val(post, "distinguished"),
|
distinguished: val(post, "distinguished"),
|
||||||
},
|
},
|
||||||
score: if data["hide_score"].as_bool().unwrap_or_default() {
|
score: if data["hide_score"].as_bool().unwrap_or_default() {
|
||||||
"•".to_string()
|
"\u{2022}".to_string()
|
||||||
} else {
|
} else {
|
||||||
format_num(score)
|
format_num(score)
|
||||||
},
|
},
|
||||||
@ -408,10 +411,10 @@ pub fn format_url(url: &str) -> String {
|
|||||||
Ok(parsed) => {
|
Ok(parsed) => {
|
||||||
let domain = parsed.domain().unwrap_or_default();
|
let domain = parsed.domain().unwrap_or_default();
|
||||||
|
|
||||||
let capture = |regex: &str, format: &str, levels: i16| {
|
let capture = |regex: &str, format: &str, segments: i16| {
|
||||||
Regex::new(regex)
|
Regex::new(regex)
|
||||||
.map(|re| match re.captures(url) {
|
.map(|re| match re.captures(url) {
|
||||||
Some(caps) => match levels {
|
Some(caps) => match segments {
|
||||||
1 => [format, &caps[1], "/"].join(""),
|
1 => [format, &caps[1], "/"].join(""),
|
||||||
2 => [format, &caps[1], "/", &caps[2], "/"].join(""),
|
2 => [format, &caps[1], "/", &caps[2], "/"].join(""),
|
||||||
_ => String::new(),
|
_ => String::new(),
|
||||||
@ -510,54 +513,90 @@ pub async fn error(req: Request<()>, msg: String) -> tide::Result {
|
|||||||
Ok(Response::builder(404).content_type("text/html").body(body).build())
|
Ok(Response::builder(404).content_type("text/html").body(body).build())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_recursion]
|
||||||
|
async fn connect(path: String) -> io::Result<String> {
|
||||||
|
// Build reddit-compliant user agent for Libreddit
|
||||||
|
let user_agent = format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"));
|
||||||
|
|
||||||
|
// Construct an HTTP request body
|
||||||
|
let req = format!(
|
||||||
|
"GET {} HTTP/1.1\r\nHost: www.reddit.com\r\nAccept: */*\r\nConnection: close\r\nUser-Agent: {}\r\n\r\n",
|
||||||
|
path, user_agent
|
||||||
|
);
|
||||||
|
|
||||||
|
// Open a TCP connection
|
||||||
|
let tcp_stream = TcpStream::connect("www.reddit.com:443").await?;
|
||||||
|
|
||||||
|
// Initialize TLS connector for requests
|
||||||
|
let connector = TlsConnector::default();
|
||||||
|
|
||||||
|
// Use the connector to start the handshake process
|
||||||
|
let mut tls_stream = connector.connect("www.reddit.com", tcp_stream).await?;
|
||||||
|
|
||||||
|
// Write the crafted HTTP request to the stream
|
||||||
|
tls_stream.write_all(req.as_bytes()).await?;
|
||||||
|
|
||||||
|
// And read the response
|
||||||
|
let mut writer = Vec::new();
|
||||||
|
io::copy(&mut tls_stream, &mut writer).await?;
|
||||||
|
let response = String::from_utf8_lossy(&writer).to_string();
|
||||||
|
|
||||||
|
let split = response.split("\r\n\r\n").collect::<Vec<&str>>();
|
||||||
|
|
||||||
|
let headers = split[0].split("\r\n").collect::<Vec<&str>>();
|
||||||
|
let status: i16 = headers[0].split(' ').collect::<Vec<&str>>()[1].parse().unwrap_or(200);
|
||||||
|
let body = split[1].to_string();
|
||||||
|
|
||||||
|
if (300..400).contains(&status) {
|
||||||
|
let location = headers
|
||||||
|
.iter()
|
||||||
|
.find(|header| header.starts_with("location:"))
|
||||||
|
.map(|f| f.to_owned())
|
||||||
|
.unwrap_or_default()
|
||||||
|
.split(": ")
|
||||||
|
.collect::<Vec<&str>>()[1];
|
||||||
|
connect(location.replace("https://www.reddit.com", "")).await
|
||||||
|
} else {
|
||||||
|
Ok(body)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Make a request to a Reddit API and parse the JSON response
|
// Make a request to a Reddit API and parse the JSON response
|
||||||
#[cached(size = 100, time = 30, result = true)]
|
#[cached(size = 100, time = 30, result = true)]
|
||||||
pub async fn request(path: String) -> Result<Value, String> {
|
pub async fn request(path: String) -> Result<Value, String> {
|
||||||
let url = format!("https://www.reddit.com{}", path);
|
let url = format!("https://www.reddit.com{}", path);
|
||||||
// Build reddit-compliant user agent for Libreddit
|
|
||||||
let user_agent = format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"));
|
|
||||||
|
|
||||||
// Send request using surf
|
|
||||||
let req = surf::get(&url).header("User-Agent", user_agent.as_str());
|
|
||||||
let client = surf::client().with(surf::middleware::Redirect::new(5));
|
|
||||||
|
|
||||||
let res = client.send(req).await;
|
|
||||||
|
|
||||||
let err = |msg: &str, e: String| -> Result<Value, String> {
|
let err = |msg: &str, e: String| -> Result<Value, String> {
|
||||||
eprintln!("{} - {}: {}", url, msg, e);
|
eprintln!("{} - {}: {}", url, msg, e);
|
||||||
Err(msg.to_string())
|
Err(msg.to_string())
|
||||||
};
|
};
|
||||||
|
|
||||||
match res {
|
match connect(path).await {
|
||||||
Ok(mut response) => match response.take_body().into_string().await {
|
Ok(body) => {
|
||||||
// If response is success
|
// Parse the response from Reddit as JSON
|
||||||
Ok(body) => {
|
let parsed: Result<Value, Error> = from_str(&body);
|
||||||
// Parse the response from Reddit as JSON
|
match parsed {
|
||||||
let parsed: Result<Value, Error> = from_str(&body);
|
Ok(json) => {
|
||||||
match parsed {
|
// If Reddit returned an error
|
||||||
Ok(json) => {
|
if json["error"].is_i64() {
|
||||||
// If Reddit returned an error
|
Err(
|
||||||
if json["error"].is_i64() {
|
json["reason"]
|
||||||
Err(
|
.as_str()
|
||||||
json["reason"]
|
.unwrap_or_else(|| {
|
||||||
.as_str()
|
json["message"].as_str().unwrap_or_else(|| {
|
||||||
.unwrap_or_else(|| {
|
eprintln!("{} - Error parsing reddit error", url);
|
||||||
json["message"].as_str().unwrap_or_else(|| {
|
"Error parsing reddit error"
|
||||||
eprintln!("{} - Error parsing reddit error", url);
|
|
||||||
"Error parsing reddit error"
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
.to_string(),
|
})
|
||||||
)
|
.to_string(),
|
||||||
} else {
|
)
|
||||||
Ok(json)
|
} else {
|
||||||
}
|
Ok(json)
|
||||||
}
|
}
|
||||||
Err(e) => err("Failed to parse page JSON data", e.to_string()),
|
|
||||||
}
|
}
|
||||||
|
Err(e) => err("Failed to parse page JSON data", e.to_string()),
|
||||||
}
|
}
|
||||||
Err(e) => err("Couldn't parse request body", e.to_string()),
|
}
|
||||||
},
|
|
||||||
Err(e) => err("Couldn't send request to Reddit", e.to_string()),
|
Err(e) => err("Couldn't send request to Reddit", e.to_string()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1115,46 +1115,6 @@ td, th {
|
|||||||
|
|
||||||
/* Mobile */
|
/* Mobile */
|
||||||
|
|
||||||
@media screen and (max-width: 480px) {
|
|
||||||
#version { display: none; }
|
|
||||||
|
|
||||||
.post {
|
|
||||||
grid-template: "post_header post_header post_thumbnail" auto
|
|
||||||
"post_title post_title post_thumbnail" 1fr
|
|
||||||
"post_media post_media post_thumbnail" auto
|
|
||||||
"post_body post_body post_thumbnail" auto
|
|
||||||
"post_score post_footer post_thumbnail" auto
|
|
||||||
/ auto 1fr fit-content(min(20%, 152px));
|
|
||||||
}
|
|
||||||
|
|
||||||
.post_score {
|
|
||||||
margin: 5px 0px 20px 15px;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.compact .post_score { padding: 0; }
|
|
||||||
|
|
||||||
.post_score::before { content: "↑" }
|
|
||||||
|
|
||||||
.post_header { font-size: 14px; }
|
|
||||||
.post_footer { margin-left: 15px; }
|
|
||||||
|
|
||||||
.replies > .comment {
|
|
||||||
margin-left: -25px;
|
|
||||||
padding: 5px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.comment_left {
|
|
||||||
min-width: 45px;
|
|
||||||
padding: 5px 0px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.comment_author { margin-left: 10px; }
|
|
||||||
.comment_score { min-width: 35px; }
|
|
||||||
.comment_data::marker { font-size: 18px; }
|
|
||||||
.created { width: 100%; }
|
|
||||||
}
|
|
||||||
|
|
||||||
@media screen and (max-width: 800px) {
|
@media screen and (max-width: 800px) {
|
||||||
body { padding-top: 120px }
|
body { padding-top: 120px }
|
||||||
|
|
||||||
@ -1196,3 +1156,60 @@ td, th {
|
|||||||
#logo, #links { margin-bottom: 5px; }
|
#logo, #links { margin-bottom: 5px; }
|
||||||
#searchbox { width: calc(100vw - 35px); }
|
#searchbox { width: calc(100vw - 35px); }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@media screen and (max-width: 480px) {
|
||||||
|
body { padding-top: 100px; }
|
||||||
|
#version { display: none; }
|
||||||
|
|
||||||
|
.post {
|
||||||
|
grid-template: "post_header post_header post_thumbnail" auto
|
||||||
|
"post_title post_title post_thumbnail" 1fr
|
||||||
|
"post_media post_media post_thumbnail" auto
|
||||||
|
"post_body post_body post_thumbnail" auto
|
||||||
|
"post_score post_footer post_thumbnail" auto
|
||||||
|
/ auto 1fr fit-content(min(20%, 152px));
|
||||||
|
}
|
||||||
|
|
||||||
|
.post_score {
|
||||||
|
margin: 5px 0px 20px 15px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.compact .post_score { padding: 0; }
|
||||||
|
|
||||||
|
.post_score::before { content: "↑" }
|
||||||
|
|
||||||
|
.post_header { font-size: 14px; }
|
||||||
|
.post_footer { margin-left: 15px; }
|
||||||
|
|
||||||
|
.replies > .comment {
|
||||||
|
margin-left: -12px;
|
||||||
|
padding: 5px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.comment_left {
|
||||||
|
min-width: auto;
|
||||||
|
padding: 5px 0px;
|
||||||
|
align-items: initial;
|
||||||
|
margin-top: -5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.line {
|
||||||
|
margin-left: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* .thread { margin-left: -5px; } */
|
||||||
|
.comment_right { padding: 5px 0 10px 2px; }
|
||||||
|
.comment_author { margin-left: 5px; }
|
||||||
|
.comment_data { margin-left: 12px; }
|
||||||
|
.comment_data::marker { font-size: 22px; }
|
||||||
|
.created { width: 100%; }
|
||||||
|
|
||||||
|
.comment_score {
|
||||||
|
min-width: 32px;
|
||||||
|
height: 20px;
|
||||||
|
font-size: 15px;
|
||||||
|
padding: 7px 0px;
|
||||||
|
margin-right: -5px;
|
||||||
|
}
|
||||||
|
}
|
@ -65,7 +65,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<aside>
|
<aside>
|
||||||
<div class="panel" id="user">
|
<div class="panel" id="user">
|
||||||
<img id="user_icon" src="{{ user.icon }}">
|
<img id="user_icon" src="{{ user.icon }}" alt="User icon">
|
||||||
<p id="user_title">{{ user.title }}</p>
|
<p id="user_title">{{ user.title }}</p>
|
||||||
<p id="user_name">u/{{ user.name }}</p>
|
<p id="user_name">u/{{ user.name }}</p>
|
||||||
<div id="user_description">{{ user.description }}</div>
|
<div id="user_description">{{ user.description }}</div>
|
||||||
|
@ -86,9 +86,9 @@
|
|||||||
</svg>
|
</svg>
|
||||||
</a>
|
</a>
|
||||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
||||||
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}px" height="{{ post.media.height }}px" controls loop autoplay><a href={{ post.media.url }}>Video</a></video>
|
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" controls loop autoplay><a href={{ post.media.url }}>Video</a></video>
|
||||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
||||||
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}px" height="{{ post.media.height }}px" poster="{{ post.media.poster }}" preload="none" controls autoplay><a href={{ post.media.url }}>Video</a></video>
|
<video class="post_media_video short" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls autoplay><a href={{ post.media.url }}>Video</a></video>
|
||||||
{% else if post.post_type != "self" %}
|
{% else if post.post_type != "self" %}
|
||||||
<a class="post_thumbnail {% if post.thumbnail.url.is_empty() %}no_thumbnail{% endif %}" href="{% if post.post_type == "link" %}{{ post.media.url }}{% else %}{{ post.permalink }}{% endif %}">
|
<a class="post_thumbnail {% if post.thumbnail.url.is_empty() %}no_thumbnail{% endif %}" href="{% if post.post_type == "link" %}{{ post.media.url }}{% else %}{{ post.permalink }}{% endif %}">
|
||||||
{% if post.thumbnail.url.is_empty() %}
|
{% if post.thumbnail.url.is_empty() %}
|
||||||
|
Reference in New Issue
Block a user