Compare commits

..

2 Commits

Author SHA1 Message Date
0656756d21 Fix #196 2021-11-29 22:29:41 -08:00
43551f70fd Add leddit onion instance 2021-11-30 04:09:41 +00:00
6 changed files with 12 additions and 7 deletions

2
Cargo.lock generated
View File

@ -603,7 +603,7 @@ checksum = "8521a1b57e76b1ec69af7599e75e38e7b7fad6610f037db8c79b127201b5d119"
[[package]]
name = "libreddit"
version = "0.20.1"
version = "0.20.2"
dependencies = [
"askama",
"async-recursion",

View File

@ -3,7 +3,7 @@ name = "libreddit"
description = " Alternative private front-end to Reddit"
license = "AGPL-3.0"
repository = "https://github.com/spikecodes/libreddit"
version = "0.20.1"
version = "0.20.2"
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
edition = "2018"

View File

@ -69,6 +69,7 @@ Feel free to [open an issue](https://github.com/spikecodes/libreddit/issues/new)
| [liredejj74h5xjqr2dylnl5howb2bpikfowqoveub55ru27x43357iid.onion](http://liredejj74h5xjqr2dylnl5howb2bpikfowqoveub55ru27x43357iid.onion) | 🇩🇪 DE | |
| [kzhfp3nvb4qp575vy23ccbrgfocezjtl5dx66uthgrhu7nscu6rcwjyd.onion](http://kzhfp3nvb4qp575vy23ccbrgfocezjtl5dx66uthgrhu7nscu6rcwjyd.onion) | 🇺🇸 US | |
| [ecue64ybzvn6vjzl37kcsnwt4ycmbsyf74nbttyg7rkc3t3qwnj7mcyd.onion](http://ecue64ybzvn6vjzl37kcsnwt4ycmbsyf74nbttyg7rkc3t3qwnj7mcyd.onion) | 🇩🇪 DE | |
| [ledditqo2mxfvlgobxnlhrkq4dh34jss6evfkdkb2thlvy6dn4f4gpyd.onion](http://ledditqo2mxfvlgobxnlhrkq4dh34jss6evfkdkb2thlvy6dn4f4gpyd.onion) | 🇺🇸 US | |
A checkmark in the "Cloudflare" category here refers to the use of the reverse proxy, [Cloudflare](https://cloudflare). The checkmark will not be listed for a site which uses Cloudflare DNS but rather the proxying service which grants Cloudflare the ability to monitor traffic to the website.

View File

@ -2,7 +2,7 @@ use cached::proc_macro::cached;
use futures_lite::{future::Boxed, FutureExt};
use hyper::{body::Buf, client, Body, Request, Response, Uri};
use serde_json::Value;
use std::{result::Result, str::FromStr};
use std::result::Result;
use crate::server::RequestExt;
@ -20,7 +20,7 @@ pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, S
async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String> {
// First parameter is target URL (mandatory).
let url = Uri::from_str(url).map_err(|_| "Couldn't parse URL".to_string())?;
let uri = url.parse::<Uri>().map_err(|_| "Couldn't parse URL".to_string())?;
// Prepare the HTTPS connector.
let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http1().build();
@ -28,7 +28,7 @@ async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String
// Build the hyper client from the HTTPS connector.
let client: client::Client<_, hyper::Body> = client::Client::builder().build(https);
let mut builder = Request::get(url);
let mut builder = Request::get(uri);
// Copy useful headers from original request
for &key in &["Range", "If-Modified-Since", "Cache-Control"] {
@ -89,7 +89,10 @@ fn request(url: String, quarantine: bool) -> Boxed<Result<Response<Body>, String
response
.headers()
.get("Location")
.map(|val| val.to_str().unwrap_or_default())
.map(|val| {
let new_url = val.to_str().unwrap_or_default();
format!("{}{}raw_json=1", new_url, if new_url.contains("?") { "&" } else { "?" })
})
.unwrap_or_default()
.to_string(),
quarantine,

View File

@ -47,7 +47,7 @@ struct SearchTemplate {
// SERVICES
pub async fn find(req: Request<Body>) -> Result<Response<Body>, String> {
let nsfw_results = if setting(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" };
let path = format!("{}.json?{}{}", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
let path = format!("{}.json?{}{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results);
let query = param(&path, "q").unwrap_or_default();
if query.is_empty() {

View File

@ -465,6 +465,7 @@ aside {
#wiki {
background: var(--foreground);
padding: 35px;
overflow-wrap: anywhere;
}
#top {