Restrict Proxy to Reddit Domains
This commit is contained in:
@ -5,6 +5,7 @@ use actix_web::{get, middleware::NormalizePath, web, App, HttpResponse, HttpServ
|
||||
mod post;
|
||||
mod proxy;
|
||||
mod search;
|
||||
// mod settings;
|
||||
mod subreddit;
|
||||
mod user;
|
||||
mod utils;
|
||||
@ -50,6 +51,9 @@ async fn main() -> std::io::Result<()> {
|
||||
.route("/style.css/", web::get().to(style))
|
||||
.route("/favicon.ico/", web::get().to(HttpResponse::Ok))
|
||||
.route("/robots.txt/", web::get().to(robots))
|
||||
// SETTINGS SERVICE
|
||||
// .route("/settings/", web::get().to(settings::get))
|
||||
// .route("/settings/save/", web::post().to(settings::set))
|
||||
// PROXY SERVICE
|
||||
.route("/proxy/{url:.*}/", web::get().to(proxy::handler))
|
||||
// SEARCH SERVICES
|
||||
|
@ -16,7 +16,7 @@ struct PostTemplate {
|
||||
sort: String,
|
||||
}
|
||||
|
||||
pub async fn item(req: HttpRequest) -> Result<HttpResponse> {
|
||||
pub async fn item(req: HttpRequest) -> HttpResponse {
|
||||
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
|
||||
let sort = param(&path, "sort");
|
||||
let id = req.match_info().get("id").unwrap_or("").to_string();
|
||||
@ -35,7 +35,7 @@ pub async fn item(req: HttpRequest) -> Result<HttpResponse> {
|
||||
|
||||
// Use the Post and Comment structs to generate a website to show users
|
||||
let s = PostTemplate { comments, post, sort }.render().unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
// If the Reddit API returns an error, exit and send error page to user
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
|
54
src/proxy.rs
54
src/proxy.rs
@ -1,30 +1,40 @@
|
||||
use actix_web::{client::Client, web, Error, HttpResponse, Result};
|
||||
use actix_web::{client::Client, error, web, Error, HttpResponse, Result};
|
||||
use url::Url;
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
use base64::decode;
|
||||
|
||||
pub async fn handler(web::Path(url): web::Path<String>) -> Result<HttpResponse> {
|
||||
if cfg!(feature = "proxy") {
|
||||
#[cfg(feature = "proxy")]
|
||||
let media: String;
|
||||
pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse> {
|
||||
let domains = vec![
|
||||
"a.thumbs.redditmedia.com",
|
||||
"b.thumbs.redditmedia.com",
|
||||
"preview.redd.it",
|
||||
"external-preview.redd.it",
|
||||
"i.redd.it",
|
||||
"v.redd.it",
|
||||
];
|
||||
|
||||
#[cfg(not(feature = "proxy"))]
|
||||
let media = url;
|
||||
match decode(b64) {
|
||||
Ok(bytes) => {
|
||||
let media = String::from_utf8(bytes).unwrap();
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
match decode(url) {
|
||||
Ok(bytes) => media = String::from_utf8(bytes).unwrap(),
|
||||
Err(_e) => return Ok(HttpResponse::Ok().body("")),
|
||||
};
|
||||
match Url::parse(media.as_str()) {
|
||||
Ok(url) => {
|
||||
let domain = url.domain().unwrap_or_default();
|
||||
|
||||
let client = Client::default();
|
||||
client
|
||||
.get(media.replace("&", "&"))
|
||||
.send()
|
||||
.await
|
||||
.map_err(Error::from)
|
||||
.map(|res| HttpResponse::build(res.status()).streaming(res))
|
||||
} else {
|
||||
Ok(HttpResponse::Ok().body(""))
|
||||
if domains.contains(&domain) {
|
||||
Client::default()
|
||||
.get(media.replace("&", "&"))
|
||||
.send()
|
||||
.await
|
||||
.map_err(Error::from)
|
||||
.map(|res| HttpResponse::build(res.status()).streaming(res))
|
||||
} else {
|
||||
Err(error::ErrorForbidden("Resource must be from Reddit"))
|
||||
}
|
||||
}
|
||||
Err(_) => Err(error::ErrorBadRequest("Can't parse encoded base64 URL")),
|
||||
}
|
||||
}
|
||||
Err(_) => Err(error::ErrorBadRequest("Can't decode base64 URL")),
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,10 @@
|
||||
// CRATES
|
||||
use crate::utils::{error, fetch_posts, param, Post};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use askama::Template;
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
#[allow(dead_code)]
|
||||
#[template(path = "search.html", escape = "none")]
|
||||
struct SearchTemplate {
|
||||
posts: Vec<Post>,
|
||||
@ -16,7 +15,7 @@ struct SearchTemplate {
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn find(req: HttpRequest) -> Result<HttpResponse> {
|
||||
pub async fn find(req: HttpRequest) -> HttpResponse {
|
||||
let path = format!("{}.json?{}", req.path(), req.query_string());
|
||||
let q = param(&path, "q");
|
||||
let sort = if param(&path, "sort").is_empty() {
|
||||
@ -27,8 +26,8 @@ pub async fn find(req: HttpRequest) -> Result<HttpResponse> {
|
||||
let sub = req.match_info().get("sub").unwrap_or("").to_string();
|
||||
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok(posts) => {
|
||||
let s = SearchTemplate {
|
||||
Ok(posts) => HttpResponse::Ok().content_type("text/html").body(
|
||||
SearchTemplate {
|
||||
posts: posts.0,
|
||||
query: q,
|
||||
sub,
|
||||
@ -36,9 +35,8 @@ pub async fn find(req: HttpRequest) -> Result<HttpResponse> {
|
||||
ends: (param(&path, "after"), posts.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
}
|
||||
.unwrap(),
|
||||
),
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
}
|
||||
}
|
||||
|
48
src/settings.rs
Normal file
48
src/settings.rs
Normal file
@ -0,0 +1,48 @@
|
||||
// // CRATES
|
||||
// use crate::utils::cookies;
|
||||
// use actix_web::{cookie::Cookie, web::Form, HttpRequest, HttpResponse, Result}; // http::Method,
|
||||
// use askama::Template;
|
||||
|
||||
// // STRUCTS
|
||||
// #[derive(Template)]
|
||||
// #[template(path = "settings.html", escape = "none")]
|
||||
// struct SettingsTemplate {
|
||||
// pref_nsfw: String,
|
||||
// }
|
||||
|
||||
// #[derive(serde::Deserialize)]
|
||||
// pub struct Preferences {
|
||||
// pref_nsfw: Option<String>,
|
||||
// }
|
||||
|
||||
// // FUNCTIONS
|
||||
|
||||
// // Retrieve cookies from request "Cookie" header
|
||||
// pub async fn get(req: HttpRequest) -> Result<HttpResponse> {
|
||||
// let cookies = cookies(req);
|
||||
|
||||
// let pref_nsfw: String = cookies.get("pref_nsfw").unwrap_or(&String::new()).to_owned();
|
||||
|
||||
// let s = SettingsTemplate { pref_nsfw }.render().unwrap();
|
||||
// Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
// }
|
||||
|
||||
// // Set cookies using response "Set-Cookie" header
|
||||
// pub async fn set(form: Form<Preferences>) -> HttpResponse {
|
||||
// let nsfw: Cookie = match &form.pref_nsfw {
|
||||
// Some(value) => Cookie::build("pref_nsfw", value).path("/").secure(true).http_only(true).finish(),
|
||||
// None => Cookie::build("pref_nsfw", "").finish(),
|
||||
// };
|
||||
|
||||
// let body = SettingsTemplate {
|
||||
// pref_nsfw: form.pref_nsfw.clone().unwrap_or_default(),
|
||||
// }
|
||||
// .render()
|
||||
// .unwrap();
|
||||
|
||||
// HttpResponse::Found()
|
||||
// .content_type("text/html")
|
||||
// .set_header("Set-Cookie", nsfw.to_string())
|
||||
// .set_header("Location", "/settings")
|
||||
// .body(body)
|
||||
// }
|
@ -22,7 +22,7 @@ struct WikiTemplate {
|
||||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn page(req: HttpRequest) -> Result<HttpResponse> {
|
||||
pub async fn page(req: HttpRequest) -> HttpResponse {
|
||||
let path = format!("{}.json?{}", req.path(), req.query_string());
|
||||
let sub = req.match_info().get("sub").unwrap_or("popular").to_string();
|
||||
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
|
||||
@ -43,13 +43,13 @@ pub async fn page(req: HttpRequest) -> Result<HttpResponse> {
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn wiki(req: HttpRequest) -> Result<HttpResponse> {
|
||||
pub async fn wiki(req: HttpRequest) -> HttpResponse {
|
||||
let sub = req.match_info().get("sub").unwrap_or("reddit.com");
|
||||
let page = req.match_info().get("page").unwrap_or("index");
|
||||
let path: String = format!("r/{}/wiki/{}.json?raw_json=1", sub, page);
|
||||
@ -63,7 +63,7 @@ pub async fn wiki(req: HttpRequest) -> Result<HttpResponse> {
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
}
|
||||
|
@ -14,7 +14,8 @@ struct UserTemplate {
|
||||
ends: (String, String),
|
||||
}
|
||||
|
||||
pub async fn profile(req: HttpRequest) -> Result<HttpResponse> {
|
||||
// FUNCTIONS
|
||||
pub async fn profile(req: HttpRequest) -> HttpResponse {
|
||||
// Build the Reddit JSON API path
|
||||
let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string());
|
||||
|
||||
@ -36,7 +37,7 @@ pub async fn profile(req: HttpRequest) -> Result<HttpResponse> {
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
// If there is an error show error page
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
|
36
src/utils.rs
36
src/utils.rs
@ -1,17 +1,17 @@
|
||||
// use std::collections::HashMap;
|
||||
|
||||
//
|
||||
// CRATES
|
||||
//
|
||||
use actix_web::{http::StatusCode, HttpResponse, Result};
|
||||
use actix_web::{HttpResponse, Result};
|
||||
use askama::Template;
|
||||
use base64::encode;
|
||||
use chrono::{TimeZone, Utc};
|
||||
use regex::Regex;
|
||||
use serde_json::from_str;
|
||||
use url::Url;
|
||||
// use surf::{client, get, middleware::Redirect};
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
use base64::encode;
|
||||
|
||||
//
|
||||
// STRUCTS
|
||||
//
|
||||
@ -102,17 +102,31 @@ pub fn param(path: &str, value: &str) -> String {
|
||||
pairs.get(value).unwrap_or(&String::new()).to_owned()
|
||||
}
|
||||
|
||||
// Cookies from request
|
||||
// pub fn cookies(req: HttpRequest) -> HashMap<String, String> {
|
||||
// let mut result: HashMap<String, String> = HashMap::new();
|
||||
|
||||
// let cookies: Vec<Cookie> = req
|
||||
// .headers()
|
||||
// .get_all("Cookie")
|
||||
// .map(|value| value.to_str().unwrap())
|
||||
// .map(|unparsed| Cookie::parse(unparsed).unwrap())
|
||||
// .collect();
|
||||
|
||||
// for cookie in cookies {
|
||||
// result.insert(cookie.name().to_string(), cookie.value().to_string());
|
||||
// }
|
||||
|
||||
// result
|
||||
// }
|
||||
|
||||
// Direct urls to proxy if proxy is enabled
|
||||
pub fn format_url(url: String) -> String {
|
||||
if url.is_empty() {
|
||||
return String::new();
|
||||
};
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
return "/proxy/".to_string() + encode(url).as_str();
|
||||
|
||||
#[cfg(not(feature = "proxy"))]
|
||||
return url.to_string();
|
||||
format!("/proxy/{}", encode(url).as_str())
|
||||
}
|
||||
|
||||
// Rewrite Reddit links to Libreddit in body of text
|
||||
@ -217,10 +231,10 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||
// NETWORKING
|
||||
//
|
||||
|
||||
pub async fn error(message: String) -> Result<HttpResponse> {
|
||||
pub async fn error(message: String) -> HttpResponse {
|
||||
let msg = if message.is_empty() { "Page not found".to_string() } else { message };
|
||||
let body = ErrorTemplate { message: msg }.render().unwrap_or_default();
|
||||
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(body))
|
||||
HttpResponse::NotFound().content_type("text/html").body(body)
|
||||
}
|
||||
|
||||
// Make a request to a Reddit API and parse the JSON response
|
||||
|
Reference in New Issue
Block a user