Compare commits
7 Commits
Author | SHA1 | Date | |
---|---|---|---|
4b1195f221 | |||
a472461ee8 | |||
baf5e3d7ee | |||
f209757ed6 | |||
4173362ce1 | |||
b2ae5e486f | |||
cda19a1912 |
@ -3,7 +3,7 @@ name = "libreddit"
|
|||||||
description = " Alternative private front-end to Reddit"
|
description = " Alternative private front-end to Reddit"
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
repository = "https://github.com/spikecodes/libreddit"
|
repository = "https://github.com/spikecodes/libreddit"
|
||||||
version = "0.4.0"
|
version = "0.4.2"
|
||||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
@ -14,7 +14,7 @@ async-std = { version = "1.9.0", features = ["attributes"] }
|
|||||||
async-tls = { version = "0.11.0", default-features = false, features = ["client"] }
|
async-tls = { version = "0.11.0", default-features = false, features = ["client"] }
|
||||||
cached = "0.23.0"
|
cached = "0.23.0"
|
||||||
clap = { version = "2.33.3", default-features = false }
|
clap = { version = "2.33.3", default-features = false }
|
||||||
regex = "1.4.3"
|
regex = "1.4.4"
|
||||||
serde = { version = "1.0.124", features = ["derive"] }
|
serde = { version = "1.0.124", features = ["derive"] }
|
||||||
serde_json = "1.0.64"
|
serde_json = "1.0.64"
|
||||||
tide = { version = "0.16.0", default-features = false, features = ["h1-server", "cookies"] }
|
tide = { version = "0.16.0", default-features = false, features = ["h1-server", "cookies"] }
|
||||||
|
18
README.md
18
README.md
@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**10 second pitch:** Libreddit is a portmanteau of "libre" (meaning freedom) and "Reddit". It is a private front-end like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libredd.it/r/unpopularopinion) without being [tracked](#reddit).
|
**10 second pitch:** Libreddit is a portmanteau of "libre" (meaning freedom) and "Reddit". It is a private front-end like [Invidious](https://github.com/iv-org/invidious) but for Reddit. Browse the coldest takes of [r/unpopularopinion](https://libreddit.spike.codes/r/unpopularopinion) without being [tracked](#reddit).
|
||||||
|
|
||||||
- 🚀 Fast: written in Rust for blazing fast speeds and memory safety
|
- 🚀 Fast: written in Rust for blazing fast speeds and memory safety
|
||||||
- ☁️ Light: no JavaScript, no ads, no tracking, no bloat
|
- ☁️ Light: no JavaScript, no ads, no tracking, no bloat
|
||||||
@ -30,7 +30,7 @@
|
|||||||
- [Docker](#2-docker)
|
- [Docker](#2-docker)
|
||||||
- [AUR](#3-aur)
|
- [AUR](#3-aur)
|
||||||
- [GitHub Releases](#4-github-releases)
|
- [GitHub Releases](#4-github-releases)
|
||||||
- [Repl.it](#5-replit)
|
- [Replit](#5-replit)
|
||||||
- [Deployment](#deployment)
|
- [Deployment](#deployment)
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -137,9 +137,9 @@ For transparency, I hope to describe all the ways Libreddit handles user privacy
|
|||||||
|
|
||||||
**DNS:** Both official domains (`libredd.it` and `libreddit.spike.codes`) use Cloudflare as the DNS resolver. Though, the sites are not proxied through Cloudflare meaning Cloudflare doesn't have access to user traffic.
|
**DNS:** Both official domains (`libredd.it` and `libreddit.spike.codes`) use Cloudflare as the DNS resolver. Though, the sites are not proxied through Cloudflare meaning Cloudflare doesn't have access to user traffic.
|
||||||
|
|
||||||
**Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libredd.it/settings). This is not a cross-site cookie and the cookie holds no personal data, only a value of the possible layout.
|
**Cookies:** Libreddit uses optional cookies to store any configured settings in [the settings menu](https://libreddit.spike.codes/settings). This is not a cross-site cookie and the cookie holds no personal data, only a value of the possible layout.
|
||||||
|
|
||||||
**Hosting:** The official instances are hosted on [Repl.it](https://repl.it/) which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models and therefore, selfhosting and browsing through Tor are welcomed.
|
**Hosting:** The official instances are hosted on [Replit](https://replit.com/) which monitors usage to prevent abuse. I can understand if this invalidates certain users' threat models and therefore, selfhosting and browsing through Tor are welcomed.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -177,15 +177,15 @@ yay -S libreddit-git
|
|||||||
|
|
||||||
If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/spikecodes/libreddit/releases/latest).
|
If you're on Linux and none of these methods work for you, you can grab a Linux binary from [the newest release](https://github.com/spikecodes/libreddit/releases/latest).
|
||||||
|
|
||||||
## 5) Repl.it
|
## 5) Replit
|
||||||
|
|
||||||
**Note:** Repl.it is a free option but they are *not* private and will monitor server usage to prevent abuse. If you need a free and easy setup, this method may work best for you.
|
**Note:** Replit is a free option but they are *not* private and will monitor server usage to prevent abuse. If you need a free and easy setup, this method may work best for you.
|
||||||
|
|
||||||
1. Create a Repl.it account (see note above)
|
1. Create a Replit account (see note above)
|
||||||
2. Visit [the official Repl](https://repl.it/@spikethecoder/libreddit) and fork it
|
2. Visit [the official Repl](https://replit.com/@spikethecoder/libreddit) and fork it
|
||||||
3. Hit the run button to download the latest Libreddit version and start it
|
3. Hit the run button to download the latest Libreddit version and start it
|
||||||
|
|
||||||
In the web preview (defaults to top right), you should see your instance hosted where you can assign a [custom domain](https://docs.repl.it/repls/web-hosting#custom-domains).
|
In the web preview (defaults to top right), you should see your instance hosted where you can assign a [custom domain](https://docs.replit.com/repls/web-hosting#custom-domains).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -219,7 +219,7 @@ async fn main() -> tide::Result<()> {
|
|||||||
app.at("/settings/restore/").get(settings::restore);
|
app.at("/settings/restore/").get(settings::restore);
|
||||||
|
|
||||||
// Subreddit services
|
// Subreddit services
|
||||||
app.at("/r/:sub/").get(subreddit::page);
|
app.at("/r/:sub/").get(subreddit::community);
|
||||||
|
|
||||||
app.at("/r/:sub/subscribe/").post(subreddit::subscriptions);
|
app.at("/r/:sub/subscribe/").post(subreddit::subscriptions);
|
||||||
app.at("/r/:sub/unsubscribe/").post(subreddit::subscriptions);
|
app.at("/r/:sub/unsubscribe/").post(subreddit::subscriptions);
|
||||||
@ -235,13 +235,13 @@ async fn main() -> tide::Result<()> {
|
|||||||
app.at("/r/:sub/w/").get(subreddit::wiki);
|
app.at("/r/:sub/w/").get(subreddit::wiki);
|
||||||
app.at("/r/:sub/w/:page/").get(subreddit::wiki);
|
app.at("/r/:sub/w/:page/").get(subreddit::wiki);
|
||||||
|
|
||||||
app.at("/r/:sub/:sort/").get(subreddit::page);
|
app.at("/r/:sub/:sort/").get(subreddit::community);
|
||||||
|
|
||||||
// Comments handler
|
// Comments handler
|
||||||
app.at("/comments/:id/").get(post::item);
|
app.at("/comments/:id/").get(post::item);
|
||||||
|
|
||||||
// Front page
|
// Front page
|
||||||
app.at("/").get(subreddit::page);
|
app.at("/").get(subreddit::community);
|
||||||
|
|
||||||
// View Reddit wiki
|
// View Reddit wiki
|
||||||
app.at("/w/").get(subreddit::wiki);
|
app.at("/w/").get(subreddit::wiki);
|
||||||
@ -258,7 +258,7 @@ async fn main() -> tide::Result<()> {
|
|||||||
app.at("/:id/").get(|req: Request<()>| async {
|
app.at("/:id/").get(|req: Request<()>| async {
|
||||||
match req.param("id") {
|
match req.param("id") {
|
||||||
// Sort front page
|
// Sort front page
|
||||||
Ok("best") | Ok("hot") | Ok("new") | Ok("top") | Ok("rising") | Ok("controversial") => subreddit::page(req).await,
|
Ok("best") | Ok("hot") | Ok("new") | Ok("top") | Ok("rising") | Ok("controversial") => subreddit::community(req).await,
|
||||||
// Short link for post
|
// Short link for post
|
||||||
Ok(id) if id.len() > 4 && id.len() < 7 => post::item(req).await,
|
Ok(id) if id.len() > 4 && id.len() < 7 => post::item(req).await,
|
||||||
// Error message for unknown pages
|
// Error message for unknown pages
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
|
use crate::esc;
|
||||||
use crate::utils::{
|
use crate::utils::{
|
||||||
cookie, error, format_num, format_url, param, request, rewrite_urls, template, time, val, Author, Comment, Flags, Flair, FlairPart, Media, Post, Preferences,
|
cookie, error, format_num, format_url, param, request, rewrite_urls, template, time, val, Author, Comment, Flags, Flair, FlairPart, Media, Post, Preferences,
|
||||||
};
|
};
|
||||||
@ -81,7 +82,7 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
// Build a post using data parsed from Reddit post API
|
// Build a post using data parsed from Reddit post API
|
||||||
Post {
|
Post {
|
||||||
id: val(post, "id"),
|
id: val(post, "id"),
|
||||||
title: val(post, "title"),
|
title: esc!(post, "title"),
|
||||||
community: val(post, "subreddit"),
|
community: val(post, "subreddit"),
|
||||||
body: rewrite_urls(&val(post, "selftext_html")).replace("\\", ""),
|
body: rewrite_urls(&val(post, "selftext_html")).replace("\\", ""),
|
||||||
author: Author {
|
author: Author {
|
||||||
@ -92,7 +93,7 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
post["data"]["author_flair_richtext"].as_array(),
|
post["data"]["author_flair_richtext"].as_array(),
|
||||||
post["data"]["author_flair_text"].as_str(),
|
post["data"]["author_flair_text"].as_str(),
|
||||||
),
|
),
|
||||||
text: val(post, "link_flair_text"),
|
text: esc!(post, "link_flair_text"),
|
||||||
background_color: val(post, "author_flair_background_color"),
|
background_color: val(post, "author_flair_background_color"),
|
||||||
foreground_color: val(post, "author_flair_text_color"),
|
foreground_color: val(post, "author_flair_text_color"),
|
||||||
},
|
},
|
||||||
@ -115,7 +116,7 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
|||||||
post["data"]["link_flair_richtext"].as_array(),
|
post["data"]["link_flair_richtext"].as_array(),
|
||||||
post["data"]["link_flair_text"].as_str(),
|
post["data"]["link_flair_text"].as_str(),
|
||||||
),
|
),
|
||||||
text: val(post, "link_flair_text"),
|
text: esc!(post, "link_flair_text"),
|
||||||
background_color: val(post, "link_flair_background_color"),
|
background_color: val(post, "link_flair_background_color"),
|
||||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||||
"black".to_string()
|
"black".to_string()
|
||||||
@ -191,7 +192,7 @@ async fn parse_comments(json: &serde_json::Value, post_link: &str, post_author:
|
|||||||
data["author_flair_richtext"].as_array(),
|
data["author_flair_richtext"].as_array(),
|
||||||
data["author_flair_text"].as_str(),
|
data["author_flair_text"].as_str(),
|
||||||
),
|
),
|
||||||
text: val(&comment, "link_flair_text"),
|
text: esc!(&comment, "link_flair_text"),
|
||||||
background_color: val(&comment, "author_flair_background_color"),
|
background_color: val(&comment, "author_flair_background_color"),
|
||||||
foreground_color: val(&comment, "author_flair_text_color"),
|
foreground_color: val(&comment, "author_flair_text_color"),
|
||||||
},
|
},
|
||||||
|
12
src/proxy.rs
12
src/proxy.rs
@ -19,8 +19,8 @@ pub async fn handler(req: Request<()>, format: &str, params: Vec<&str>) -> tide:
|
|||||||
/// Relays the `Content-Length` and `Content-Type` header.
|
/// Relays the `Content-Length` and `Content-Type` header.
|
||||||
async fn request(url: String) -> tide::Result {
|
async fn request(url: String) -> tide::Result {
|
||||||
// Parse url into parts
|
// Parse url into parts
|
||||||
let parts = Url::parse(&url).unwrap();
|
let parts = Url::parse(&url)?;
|
||||||
let host = parts.host().unwrap().to_string();
|
let host = parts.host().map(|host| host.to_string()).unwrap_or_default();
|
||||||
let domain = parts.domain().unwrap_or_default();
|
let domain = parts.domain().unwrap_or_default();
|
||||||
let path = format!("{}?{}", parts.path(), parts.query().unwrap_or_default());
|
let path = format!("{}?{}", parts.path(), parts.query().unwrap_or_default());
|
||||||
// Build reddit-compliant user agent for Libreddit
|
// Build reddit-compliant user agent for Libreddit
|
||||||
@ -36,17 +36,17 @@ async fn request(url: String) -> tide::Result {
|
|||||||
let connector = TlsConnector::default();
|
let connector = TlsConnector::default();
|
||||||
|
|
||||||
// Open a TCP connection
|
// Open a TCP connection
|
||||||
let tcp_stream = TcpStream::connect(format!("{}:443", domain)).await.unwrap();
|
let tcp_stream = TcpStream::connect(format!("{}:443", domain)).await?;
|
||||||
|
|
||||||
// Use the connector to start the handshake process
|
// Use the connector to start the handshake process
|
||||||
let mut tls_stream = connector.connect(domain, tcp_stream).await.unwrap();
|
let mut tls_stream = connector.connect(domain, tcp_stream).await?;
|
||||||
|
|
||||||
// Write the aforementioned HTTP request to the stream
|
// Write the aforementioned HTTP request to the stream
|
||||||
tls_stream.write_all(req.as_bytes()).await.unwrap();
|
tls_stream.write_all(req.as_bytes()).await?;
|
||||||
|
|
||||||
// And read the response
|
// And read the response
|
||||||
let mut writer = Vec::new();
|
let mut writer = Vec::new();
|
||||||
io::copy(&mut tls_stream, &mut writer).await.unwrap();
|
io::copy(&mut tls_stream, &mut writer).await?;
|
||||||
|
|
||||||
// Find the delimiter which separates the body and headers
|
// Find the delimiter which separates the body and headers
|
||||||
match (0..writer.len()).find(|i| writer[i.to_owned()] == 10_u8 && writer[i - 2] == 10_u8) {
|
match (0..writer.len()).find(|i| writer[i.to_owned()] == 10_u8 && writer[i - 2] == 10_u8) {
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
|
use crate::esc;
|
||||||
use crate::utils::{cookie, error, format_num, format_url, param, redirect, request, rewrite_urls, template, val, Post, Preferences, Subreddit};
|
use crate::utils::{cookie, error, format_num, format_url, param, redirect, request, rewrite_urls, template, val, Post, Preferences, Subreddit};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use tide::{http::Cookie, Request};
|
use tide::{http::Cookie, Request};
|
||||||
@ -25,7 +26,7 @@ struct WikiTemplate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// SERVICES
|
// SERVICES
|
||||||
pub async fn page(req: Request<()>) -> tide::Result {
|
pub async fn community(req: Request<()>) -> tide::Result {
|
||||||
// Build Reddit API path
|
// Build Reddit API path
|
||||||
let subscribed = cookie(&req, "subscriptions");
|
let subscribed = cookie(&req, "subscriptions");
|
||||||
let front_page = cookie(&req, "front_page");
|
let front_page = cookie(&req, "front_page");
|
||||||
@ -167,9 +168,9 @@ async fn subreddit(sub: &str) -> Result<Subreddit, String> {
|
|||||||
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
||||||
|
|
||||||
let sub = Subreddit {
|
let sub = Subreddit {
|
||||||
name: val(&res, "display_name"),
|
name: esc!(&res, "display_name"),
|
||||||
title: val(&res, "title"),
|
title: esc!(&res, "title"),
|
||||||
description: val(&res, "public_description"),
|
description: esc!(&res, "public_description"),
|
||||||
info: rewrite_urls(&val(&res, "description_html").replace("\\", "")),
|
info: rewrite_urls(&val(&res, "description_html").replace("\\", "")),
|
||||||
icon: format_url(&icon),
|
icon: format_url(&icon),
|
||||||
members: format_num(members),
|
members: format_num(members),
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
// CRATES
|
// CRATES
|
||||||
|
use crate::esc;
|
||||||
use crate::utils::{error, format_url, param, request, template, Post, Preferences, User};
|
use crate::utils::{error, format_url, param, request, template, Post, Preferences, User};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use tide::Request;
|
use tide::Request;
|
||||||
@ -57,17 +58,17 @@ async fn user(name: &str) -> Result<User, String> {
|
|||||||
// Grab creation date as unix timestamp
|
// Grab creation date as unix timestamp
|
||||||
let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
|
let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
|
||||||
|
|
||||||
// nested_val function used to parse JSON from Reddit APIs
|
// Closure used to parse JSON from Reddit APIs
|
||||||
let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string();
|
let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string();
|
||||||
|
|
||||||
// Parse the JSON output into a User struct
|
// Parse the JSON output into a User struct
|
||||||
Ok(User {
|
Ok(User {
|
||||||
name: name.to_string(),
|
name: name.to_string(),
|
||||||
title: about("title"),
|
title: esc!(about("title")),
|
||||||
icon: format_url(&about("icon_img")),
|
icon: format_url(&about("icon_img")),
|
||||||
karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
|
karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
|
||||||
created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"),
|
created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"),
|
||||||
banner: about("banner_img"),
|
banner: esc!(about("banner_img")),
|
||||||
description: about("public_description"),
|
description: about("public_description"),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
30
src/utils.rs
30
src/utils.rs
@ -1,6 +1,7 @@
|
|||||||
//
|
//
|
||||||
// CRATES
|
// CRATES
|
||||||
//
|
//
|
||||||
|
use crate::esc;
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use async_recursion::async_recursion;
|
use async_recursion::async_recursion;
|
||||||
use async_std::{io, net::TcpStream, prelude::*};
|
use async_std::{io, net::TcpStream, prelude::*};
|
||||||
@ -227,14 +228,14 @@ impl Post {
|
|||||||
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
|
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
|
||||||
let score = data["score"].as_i64().unwrap_or_default();
|
let score = data["score"].as_i64().unwrap_or_default();
|
||||||
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||||
let title = val(post, "title");
|
let title = esc!(post, "title");
|
||||||
|
|
||||||
// Determine the type of media along with the media URL
|
// Determine the type of media along with the media URL
|
||||||
let (post_type, media, gallery) = Media::parse(&data).await;
|
let (post_type, media, gallery) = Media::parse(&data).await;
|
||||||
|
|
||||||
posts.push(Self {
|
posts.push(Self {
|
||||||
id: val(post, "id"),
|
id: val(post, "id"),
|
||||||
title: if title.is_empty() { fallback_title.to_owned() } else { title },
|
title: esc!(if title.is_empty() { fallback_title.to_owned() } else { title }),
|
||||||
community: val(post, "subreddit"),
|
community: val(post, "subreddit"),
|
||||||
body: rewrite_urls(&val(post, "body_html")),
|
body: rewrite_urls(&val(post, "body_html")),
|
||||||
author: Author {
|
author: Author {
|
||||||
@ -245,7 +246,7 @@ impl Post {
|
|||||||
data["author_flair_richtext"].as_array(),
|
data["author_flair_richtext"].as_array(),
|
||||||
data["author_flair_text"].as_str(),
|
data["author_flair_text"].as_str(),
|
||||||
),
|
),
|
||||||
text: val(post, "link_flair_text"),
|
text: esc!(post, "link_flair_text"),
|
||||||
background_color: val(post, "author_flair_background_color"),
|
background_color: val(post, "author_flair_background_color"),
|
||||||
foreground_color: val(post, "author_flair_text_color"),
|
foreground_color: val(post, "author_flair_text_color"),
|
||||||
},
|
},
|
||||||
@ -272,7 +273,7 @@ impl Post {
|
|||||||
data["link_flair_richtext"].as_array(),
|
data["link_flair_richtext"].as_array(),
|
||||||
data["link_flair_text"].as_str(),
|
data["link_flair_text"].as_str(),
|
||||||
),
|
),
|
||||||
text: val(post, "link_flair_text"),
|
text: esc!(post, "link_flair_text"),
|
||||||
background_color: val(post, "link_flair_background_color"),
|
background_color: val(post, "link_flair_background_color"),
|
||||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||||
"black".to_string()
|
"black".to_string()
|
||||||
@ -486,6 +487,27 @@ pub fn val(j: &Value, k: &str) -> String {
|
|||||||
j["data"][k].as_str().unwrap_or_default().to_string()
|
j["data"][k].as_str().unwrap_or_default().to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! esc {
|
||||||
|
($f:expr) => {
|
||||||
|
$f.replace('<', "<").replace('>', ">")
|
||||||
|
};
|
||||||
|
($j:expr, $k:expr) => {
|
||||||
|
$j["data"][$k].as_str().unwrap_or_default().to_string().replace('<', "<").replace('>', ">")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Escape < and > to accurately render HTML
|
||||||
|
// pub fn esc(j: &Value, k: &str) -> String {
|
||||||
|
// val(j,k)
|
||||||
|
// // .replace('&', "&")
|
||||||
|
// .replace('<', "<")
|
||||||
|
// .replace('>', ">")
|
||||||
|
// // .replace('"', """)
|
||||||
|
// // .replace('\'', "'")
|
||||||
|
// // .replace('/', "/")
|
||||||
|
// }
|
||||||
|
|
||||||
//
|
//
|
||||||
// NETWORKING
|
// NETWORKING
|
||||||
//
|
//
|
||||||
|
@ -13,7 +13,6 @@
|
|||||||
<!-- Meta Tags -->
|
<!-- Meta Tags -->
|
||||||
<meta name="author" content="u/{{ post.author.name }}">
|
<meta name="author" content="u/{{ post.author.name }}">
|
||||||
<meta name="title" content="{{ post.title }} - r/{{ post.community }}">
|
<meta name="title" content="{{ post.title }} - r/{{ post.community }}">
|
||||||
<meta name="description" content="View on Libreddit, an alternative private front-end to Reddit.">
|
|
||||||
<meta property="og:type" content="website">
|
<meta property="og:type" content="website">
|
||||||
<meta property="og:url" content="{{ post.permalink }}">
|
<meta property="og:url" content="{{ post.permalink }}">
|
||||||
<meta property="og:title" content="{{ post.title }} - r/{{ post.community }}">
|
<meta property="og:title" content="{{ post.title }} - r/{{ post.community }}">
|
||||||
|
Reference in New Issue
Block a user