diff --git a/src/main.rs b/src/main.rs
index 611c0c9..fa772be 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -215,8 +215,10 @@ async fn main() {
.at("/r/u_:name")
.get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed());
- app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions(r).boxed());
- app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions(r).boxed());
+ app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions_filters(r).boxed());
+ app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions_filters(r).boxed());
+ app.at("/r/:sub/filter").post(|r| subreddit::subscriptions_filters(r).boxed());
+ app.at("/r/:sub/unfilter").post(|r| subreddit::subscriptions_filters(r).boxed());
app.at("/r/:sub/comments/:id").get(|r| post::item(r).boxed());
app.at("/r/:sub/comments/:id/:title").get(|r| post::item(r).boxed());
diff --git a/src/post.rs b/src/post.rs
index 6bdf409..ff430fc 100644
--- a/src/post.rs
+++ b/src/post.rs
@@ -4,11 +4,12 @@ use crate::esc;
use crate::server::RequestExt;
use crate::subreddit::{can_access_quarantine, quarantine};
use crate::utils::{
- error, format_num, format_url, param, rewrite_urls, setting, template, time, val, Author, Awards, Comment, Flags, Flair, FlairPart, Media, Post, Preferences,
+ error, format_num, format_url, get_filters, param, rewrite_urls, setting, template, time, val, Author, Awards, Comment, Flags, Flair, FlairPart, Media, Post, Preferences,
};
use hyper::{Body, Request, Response};
use askama::Template;
+use std::collections::HashSet;
// STRUCTS
#[derive(Template)]
@@ -55,7 +56,7 @@ pub async fn item(req: Request
) -> Result, String> {
Ok(response) => {
// Parse the JSON into Post and Comment structs
let post = parse_post(&response[0]).await;
- let comments = parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment);
+ let comments = parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req));
let url = req.uri().to_string();
// Use the Post and Comment structs to generate a website to show users
@@ -156,7 +157,7 @@ async fn parse_post(json: &serde_json::Value) -> Post {
}
// COMMENTS
-fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str) -> Vec {
+fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str, filters: &HashSet) -> Vec {
// Parse the comment JSON into a Vector of Comments
let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned);
@@ -177,7 +178,7 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str,
// If this comment contains replies, handle those too
let replies: Vec = if data["replies"].is_object() {
- parse_comments(&data["replies"], post_link, post_author, highlighted_comment)
+ parse_comments(&data["replies"], post_link, post_author, highlighted_comment, filters)
} else {
Vec::new()
};
@@ -190,13 +191,29 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str,
let id = val(&comment, "id");
let highlighted = id == highlighted_comment;
+ let author = Author {
+ name: val(&comment, "author"),
+ flair: Flair {
+ flair_parts: FlairPart::parse(
+ data["author_flair_type"].as_str().unwrap_or_default(),
+ data["author_flair_richtext"].as_array(),
+ data["author_flair_text"].as_str(),
+ ),
+ text: esc!(&comment, "link_flair_text"),
+ background_color: val(&comment, "author_flair_background_color"),
+ foreground_color: val(&comment, "author_flair_text_color"),
+ },
+ distinguished: val(&comment, "distinguished"),
+ };
+ let is_filtered = filters.contains(&["u_", author.name.as_str()].concat());
+
// Many subreddits have a default comment posted about the sub's rules etc.
// Many libreddit users do not wish to see this kind of comment by default.
// Reddit does not tell us which users are "bots", so a good heuristic is to
// collapse stickied moderator comments.
let is_moderator_comment = data["distinguished"].as_str().unwrap_or_default() == "moderator";
let is_stickied = data["stickied"].as_bool().unwrap_or_default();
- let collapsed = is_moderator_comment && is_stickied;
+ let collapsed = (is_moderator_comment && is_stickied) || is_filtered;
Comment {
id,
@@ -206,20 +223,7 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str,
post_link: post_link.to_string(),
post_author: post_author.to_string(),
body,
- author: Author {
- name: val(&comment, "author"),
- flair: Flair {
- flair_parts: FlairPart::parse(
- data["author_flair_type"].as_str().unwrap_or_default(),
- data["author_flair_richtext"].as_array(),
- data["author_flair_text"].as_str(),
- ),
- text: esc!(&comment, "link_flair_text"),
- background_color: val(&comment, "author_flair_background_color"),
- foreground_color: val(&comment, "author_flair_text_color"),
- },
- distinguished: val(&comment, "distinguished"),
- },
+ author,
score: if data["score_hidden"].as_bool().unwrap_or_default() {
("\u{2022}".to_string(), "Hidden".to_string())
} else {
@@ -232,6 +236,7 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str,
highlighted,
awards,
collapsed,
+ is_filtered,
}
})
.collect()
diff --git a/src/search.rs b/src/search.rs
index aff7173..0eef077 100644
--- a/src/search.rs
+++ b/src/search.rs
@@ -1,5 +1,5 @@
// CRATES
-use crate::utils::{catch_random, error, format_num, format_url, param, redirect, setting, template, val, Post, Preferences};
+use crate::utils::{catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences};
use crate::{
client::json,
subreddit::{can_access_quarantine, quarantine},
@@ -37,6 +37,11 @@ struct SearchTemplate {
params: SearchParams,
prefs: Preferences,
url: String,
+ /// Whether the subreddit itself is filtered.
+ is_filtered: bool,
+ /// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
+ /// and all fetched posts being filtered).
+ all_posts_filtered: bool,
}
// SERVICES
@@ -59,15 +64,23 @@ pub async fn find(req: Request) -> Result, String> {
let typed = param(&path, "type").unwrap_or_default();
let sort = param(&path, "sort").unwrap_or_else(|| "relevance".to_string());
+ let filters = get_filters(&req);
// If search is not restricted to this subreddit, show other subreddits in search results
- let subreddits = param(&path, "restrict_sr").map_or(search_subreddits(&query, &typed).await, |_| Vec::new());
+ let subreddits = if param(&path, "restrict_sr").is_none() {
+ let mut subreddits = search_subreddits(&query, &typed).await;
+ subreddits.retain(|s| !filters.contains(s.name.as_str()));
+ subreddits
+ } else {
+ Vec::new()
+ };
let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
- match Post::fetch(&path, String::new(), quarantined).await {
- Ok((posts, after)) => template(SearchTemplate {
- posts,
+ // If all requested subs are filtered, we don't need to fetch posts.
+ if sub.split("+").all(|s| filters.contains(s)) {
+ template(SearchTemplate {
+ posts: Vec::new(),
subreddits,
sub,
params: SearchParams {
@@ -75,19 +88,46 @@ pub async fn find(req: Request) -> Result, String> {
sort,
t: param(&path, "t").unwrap_or_default(),
before: param(&path, "after").unwrap_or_default(),
- after,
+ after: "".to_string(),
restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
typed,
},
prefs: Preferences::new(req),
url,
- }),
- Err(msg) => {
- if msg == "quarantined" {
- let sub = req.param("sub").unwrap_or_default();
- quarantine(req, sub)
- } else {
- error(req, msg).await
+ is_filtered: true,
+ all_posts_filtered: false,
+ })
+ } else {
+ match Post::fetch(&path, quarantined).await {
+ Ok((mut posts, after)) => {
+ let all_posts_filtered = filter_posts(&mut posts, &filters);
+
+ template(SearchTemplate {
+ posts,
+ subreddits,
+ sub,
+ params: SearchParams {
+ q: query.replace('"', """),
+ sort,
+ t: param(&path, "t").unwrap_or_default(),
+ before: param(&path, "after").unwrap_or_default(),
+ after,
+ restrict_sr: param(&path, "restrict_sr").unwrap_or_default(),
+ typed,
+ },
+ prefs: Preferences::new(req),
+ url,
+ is_filtered: false,
+ all_posts_filtered,
+ })
+ }
+ Err(msg) => {
+ if msg == "quarantined" {
+ let sub = req.param("sub").unwrap_or_default();
+ quarantine(req, sub)
+ } else {
+ error(req, msg).await
+ }
}
}
}
@@ -109,7 +149,7 @@ async fn search_subreddits(q: &str, typed: &str) -> Vec {
let icon = subreddit["data"]["community_icon"].as_str().map_or_else(|| val(subreddit, "icon_img"), ToString::to_string);
Subreddit {
- name: val(subreddit, "display_name_prefixed"),
+ name: val(subreddit, "display_name"),
url: val(subreddit, "url"),
icon: format_url(&icon),
description: val(subreddit, "public_description"),
diff --git a/src/settings.rs b/src/settings.rs
index efa4708..9cdd266 100644
--- a/src/settings.rs
+++ b/src/settings.rs
@@ -109,7 +109,7 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response response.insert_cookie(
Cookie::build(name.to_owned(), value.clone())
diff --git a/src/subreddit.rs b/src/subreddit.rs
index 66938f5..f94a583 100644
--- a/src/subreddit.rs
+++ b/src/subreddit.rs
@@ -1,6 +1,8 @@
// CRATES
use crate::esc;
-use crate::utils::{catch_random, error, format_num, format_url, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit};
+use crate::utils::{
+ catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit,
+};
use crate::{client::json, server::ResponseExt, RequestExt};
use askama::Template;
use cookie::Cookie;
@@ -17,6 +19,11 @@ struct SubredditTemplate {
ends: (String, String),
prefs: Preferences,
url: String,
+ /// Whether the subreddit itself is filtered.
+ is_filtered: bool,
+ /// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
+ /// and all fetched posts being filtered).
+ all_posts_filtered: bool,
}
#[derive(Template)]
@@ -48,7 +55,7 @@ pub async fn community(req: Request) -> Result, String> {
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort));
- let sub = req.param("sub").unwrap_or(if front_page == "default" || front_page.is_empty() {
+ let sub_name = req.param("sub").unwrap_or(if front_page == "default" || front_page.is_empty() {
if subscribed.is_empty() {
"popular".to_string()
} else {
@@ -57,59 +64,77 @@ pub async fn community(req: Request) -> Result, String> {
} else {
front_page.clone()
});
- let quarantined = can_access_quarantine(&req, &sub) || root;
+ let quarantined = can_access_quarantine(&req, &sub_name) || root;
// Handle random subreddits
- if let Ok(random) = catch_random(&sub, "").await {
+ if let Ok(random) = catch_random(&sub_name, "").await {
return Ok(random);
}
- if req.param("sub").is_some() && sub.starts_with("u_") {
- return Ok(redirect(["/user/", &sub[2..]].concat()));
+ if req.param("sub").is_some() && sub_name.starts_with("u_") {
+ return Ok(redirect(["/user/", &sub_name[2..]].concat()));
}
- let path = format!("/r/{}/{}.json?{}&raw_json=1", sub, sort, req.uri().query().unwrap_or_default());
-
- match Post::fetch(&path, String::new(), quarantined).await {
- Ok((posts, after)) => {
- // If you can get subreddit posts, also request subreddit metadata
- let sub = if !sub.contains('+') && sub != subscribed && sub != "popular" && sub != "all" {
- // Regular subreddit
- subreddit(&sub, quarantined).await.unwrap_or_default()
- } else if sub == subscribed {
- // Subscription feed
- if req.uri().path().starts_with("/r/") {
- subreddit(&sub, quarantined).await.unwrap_or_default()
- } else {
- Subreddit::default()
- }
- } else if sub.contains('+') {
- // Multireddit
- Subreddit {
- name: sub,
- ..Subreddit::default()
- }
- } else {
- Subreddit::default()
- };
-
- let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
-
- template(SubredditTemplate {
- sub,
- posts,
- sort: (sort, param(&path, "t").unwrap_or_default()),
- ends: (param(&path, "after").unwrap_or_default(), after),
- prefs: Preferences::new(req),
- url,
- })
+ // Request subreddit metadata
+ let sub = if !sub_name.contains('+') && sub_name != subscribed && sub_name != "popular" && sub_name != "all" {
+ // Regular subreddit
+ subreddit(&sub_name, quarantined).await.unwrap_or_default()
+ } else if sub_name == subscribed {
+ // Subscription feed
+ if req.uri().path().starts_with("/r/") {
+ subreddit(&sub_name, quarantined).await.unwrap_or_default()
+ } else {
+ Subreddit::default()
+ }
+ } else if sub_name.contains('+') {
+ // Multireddit
+ Subreddit {
+ name: sub_name.clone(),
+ ..Subreddit::default()
+ }
+ } else {
+ Subreddit::default()
+ };
+
+ let path = format!("/r/{}/{}.json?{}&raw_json=1", sub_name.clone(), sort, req.uri().query().unwrap_or_default());
+ let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
+ let filters = get_filters(&req);
+
+ // If all requested subs are filtered, we don't need to fetch posts.
+ if sub_name.split("+").all(|s| filters.contains(s)) {
+ template(SubredditTemplate {
+ sub,
+ posts: Vec::new(),
+ sort: (sort, param(&path, "t").unwrap_or_default()),
+ ends: (param(&path, "after").unwrap_or_default(), "".to_string()),
+ prefs: Preferences::new(req),
+ url,
+ is_filtered: true,
+ all_posts_filtered: false,
+ })
+ } else {
+ match Post::fetch(&path, quarantined).await {
+ Ok((mut posts, after)) => {
+ let all_posts_filtered = filter_posts(&mut posts, &filters);
+
+ template(SubredditTemplate {
+ sub,
+ posts,
+ sort: (sort, param(&path, "t").unwrap_or_default()),
+ ends: (param(&path, "after").unwrap_or_default(), after),
+ prefs: Preferences::new(req),
+ url,
+ is_filtered: false,
+ all_posts_filtered,
+ })
+ }
+ Err(msg) => match msg.as_str() {
+ "quarantined" => quarantine(req, sub_name),
+ "private" => error(req, format!("r/{} is a private community", sub_name)).await,
+ "banned" => error(req, format!("r/{} has been banned from Reddit", sub_name)).await,
+ _ => error(req, msg).await,
+ },
}
- Err(msg) => match msg.as_str() {
- "quarantined" => quarantine(req, sub),
- "private" => error(req, format!("r/{} is a private community", sub)).await,
- "banned" => error(req, format!("r/{} has been banned from Reddit", sub)).await,
- _ => error(req, msg).await,
- },
}
}
@@ -150,18 +175,25 @@ pub fn can_access_quarantine(req: &Request, sub: &str) -> bool {
setting(req, &format!("allow_quaran_{}", sub.to_lowercase())).parse().unwrap_or_default()
}
-// Sub or unsub by setting subscription cookie using response "Set-Cookie" header
-pub async fn subscriptions(req: Request) -> Result, String> {
+// Sub, filter, unfilter, or unsub by setting subscription cookie using response "Set-Cookie" header
+pub async fn subscriptions_filters(req: Request) -> Result, String> {
let sub = req.param("sub").unwrap_or_default();
+ let action: Vec = req.uri().path().split('/').map(String::from).collect();
+
// Handle random subreddits
if sub == "random" || sub == "randnsfw" {
- return Err("Can't subscribe to random subreddit!".to_string());
+ if action.contains(&"filter".to_string()) || action.contains(&"unfilter".to_string()) {
+ return Err("Can't filter random subreddit!".to_string());
+ } else {
+ return Err("Can't subscribe to random subreddit!".to_string());
+ }
}
let query = req.uri().query().unwrap_or_default().to_string();
- let action: Vec = req.uri().path().split('/').map(String::from).collect();
- let mut sub_list = Preferences::new(req).subscriptions;
+ let preferences = Preferences::new(req);
+ let mut sub_list = preferences.subscriptions;
+ let mut filters = preferences.filters;
// Retrieve list of posts for these subreddits to extract display names
let posts = json(format!("/r/{}/hot.json?raw_json=1", sub), true).await?;
@@ -182,8 +214,10 @@ pub async fn subscriptions(req: Request) -> Result, String>
for part in sub.split('+') {
// Retrieve display name for the subreddit
let display;
- let part = if let Some(&(_, display)) = display_lookup.iter().find(|x| x.0 == part.to_lowercase()) {
- // This is already known, doesn't require seperate request
+ let part = if part.starts_with("u_") {
+ part
+ } else if let Some(&(_, display)) = display_lookup.iter().find(|x| x.0 == part.to_lowercase()) {
+ // This is already known, doesn't require separate request
display
} else {
// This subreddit display name isn't known, retrieve it
@@ -196,16 +230,28 @@ pub async fn subscriptions(req: Request) -> Result, String>
if action.contains(&"subscribe".to_string()) && !sub_list.contains(&part.to_owned()) {
// Add each sub name to the subscribed list
sub_list.push(part.to_owned());
- // Reorder sub names alphabettically
+ filters.retain(|s| s.to_lowercase() != part.to_lowercase());
+ // Reorder sub names alphabetically
sub_list.sort_by_key(|a| a.to_lowercase());
+ filters.sort_by_key(|a| a.to_lowercase());
} else if action.contains(&"unsubscribe".to_string()) {
// Remove sub name from subscribed list
sub_list.retain(|s| s.to_lowercase() != part.to_lowercase());
+ } else if action.contains(&"filter".to_string()) && !filters.contains(&part.to_owned()) {
+ // Add each sub name to the filtered list
+ filters.push(part.to_owned());
+ sub_list.retain(|s| s.to_lowercase() != part.to_lowercase());
+ // Reorder sub names alphabetically
+ filters.sort_by_key(|a| a.to_lowercase());
+ sub_list.sort_by_key(|a| a.to_lowercase());
+ } else if action.contains(&"unfilter".to_string()) {
+ // Remove sub name from filtered list
+ filters.retain(|s| s.to_lowercase() != part.to_lowercase());
}
}
// Redirect back to subreddit
- // check for redirect parameter if unsubscribing from outside sidebar
+ // check for redirect parameter if unsubscribing/unfiltering from outside sidebar
let path = if let Some(redirect_path) = param(&format!("?{}", query), "redirect") {
format!("/{}/", redirect_path)
} else {
@@ -226,6 +272,17 @@ pub async fn subscriptions(req: Request) -> Result, String>
.finish(),
);
}
+ if filters.is_empty() {
+ response.remove_cookie("filters".to_string());
+ } else {
+ response.insert_cookie(
+ Cookie::build("filters", filters.join("+"))
+ .path("/")
+ .http_only(true)
+ .expires(OffsetDateTime::now_utc() + Duration::weeks(52))
+ .finish(),
+ );
+ }
Ok(response)
}
diff --git a/src/user.rs b/src/user.rs
index 9179551..61772e5 100644
--- a/src/user.rs
+++ b/src/user.rs
@@ -2,7 +2,7 @@
use crate::client::json;
use crate::esc;
use crate::server::RequestExt;
-use crate::utils::{error, format_url, param, template, Post, Preferences, User};
+use crate::utils::{error, filter_posts, format_url, get_filters, param, template, Post, Preferences, User};
use askama::Template;
use hyper::{Body, Request, Response};
use time::OffsetDateTime;
@@ -17,6 +17,11 @@ struct UserTemplate {
ends: (String, String),
prefs: Preferences,
url: String,
+ /// Whether the user themself is filtered.
+ is_filtered: bool,
+ /// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place,
+ /// and all fetched posts being filtered).
+ all_posts_filtered: bool,
}
// FUNCTIONS
@@ -27,31 +32,45 @@ pub async fn profile(req: Request) -> Result, String> {
req.param("name").unwrap_or_else(|| "reddit".to_string()),
req.uri().query().unwrap_or_default()
);
+ let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
// Retrieve other variables from Libreddit request
let sort = param(&path, "sort").unwrap_or_default();
let username = req.param("name").unwrap_or_default();
+ let user = user(&username).await.unwrap_or_default();
- // Request user posts/comments from Reddit
- let posts = Post::fetch(&path, "Comment".to_string(), false).await;
- let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str()));
+ let filters = get_filters(&req);
+ if filters.contains(&["u_", &username].concat()) {
+ template(UserTemplate {
+ user,
+ posts: Vec::new(),
+ sort: (sort, param(&path, "t").unwrap_or_default()),
+ ends: (param(&path, "after").unwrap_or_default(), "".to_string()),
+ prefs: Preferences::new(req),
+ url,
+ is_filtered: true,
+ all_posts_filtered: false,
+ })
+ } else {
+ // Request user posts/comments from Reddit
+ match Post::fetch(&path, false).await {
+ Ok((mut posts, after)) => {
+ let all_posts_filtered = filter_posts(&mut posts, &filters);
- match posts {
- Ok((posts, after)) => {
- // If you can get user posts, also request user data
- let user = user(&username).await.unwrap_or_default();
-
- template(UserTemplate {
- user,
- posts,
- sort: (sort, param(&path, "t").unwrap_or_default()),
- ends: (param(&path, "after").unwrap_or_default(), after),
- prefs: Preferences::new(req),
- url,
- })
+ template(UserTemplate {
+ user,
+ posts,
+ sort: (sort, param(&path, "t").unwrap_or_default()),
+ ends: (param(&path, "after").unwrap_or_default(), after),
+ prefs: Preferences::new(req),
+ url,
+ is_filtered: false,
+ all_posts_filtered,
+ })
+ }
+ // If there is an error show error page
+ Err(msg) => error(req, msg).await,
}
- // If there is an error show error page
- Err(msg) => error(req, msg).await,
}
}
diff --git a/src/utils.rs b/src/utils.rs
index d6961ec..dad2e99 100644
--- a/src/utils.rs
+++ b/src/utils.rs
@@ -7,7 +7,7 @@ use cookie::Cookie;
use hyper::{Body, Request, Response};
use regex::Regex;
use serde_json::Value;
-use std::collections::HashMap;
+use std::collections::{HashMap, HashSet};
use std::str::FromStr;
use time::{Duration, OffsetDateTime};
use url::Url;
@@ -219,7 +219,7 @@ pub struct Post {
impl Post {
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
- pub async fn fetch(path: &str, fallback_title: String, quarantine: bool) -> Result<(Vec, String), String> {
+ pub async fn fetch(path: &str, quarantine: bool) -> Result<(Vec, String), String> {
let res;
let post_list;
@@ -262,7 +262,7 @@ impl Post {
posts.push(Self {
id: val(post, "id"),
- title: esc!(if title.is_empty() { fallback_title.clone() } else { title }),
+ title,
community: val(post, "subreddit"),
body,
author: Author {
@@ -346,6 +346,7 @@ pub struct Comment {
pub highlighted: bool,
pub awards: Awards,
pub collapsed: bool,
+ pub is_filtered: bool,
}
#[derive(Default, Clone)]
@@ -458,6 +459,7 @@ pub struct Preferences {
pub comment_sort: String,
pub post_sort: String,
pub subscriptions: Vec,
+ pub filters: Vec,
}
impl Preferences {
@@ -475,10 +477,28 @@ impl Preferences {
comment_sort: setting(&req, "comment_sort"),
post_sort: setting(&req, "post_sort"),
subscriptions: setting(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
+ filters: setting(&req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
}
}
}
+/// Gets a `HashSet` of filters from the cookie in the given `Request`.
+pub fn get_filters(req: &Request) -> HashSet {
+ setting(&req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect::>()
+}
+
+/// Filters a `Vec` by the given `HashSet` of filters (each filter being a subreddit name or a user name). If a
+/// `Post`'s subreddit or author is found in the filters, it is removed. Returns `true` if _all_ posts were filtered
+/// out, or `false` otherwise.
+pub fn filter_posts(posts: &mut Vec, filters: &HashSet) -> bool {
+ if posts.is_empty() {
+ false
+ } else {
+ posts.retain(|p| !filters.contains(&p.community) && !filters.contains(&["u_", &p.author.name].concat()));
+ posts.is_empty()
+ }
+}
+
//
// FORMATTING
//
@@ -515,7 +535,7 @@ pub fn setting(req: &Request, name: &str) -> String {
// Detect and redirect in the event of a random subreddit
pub async fn catch_random(sub: &str, additional: &str) -> Result, String> {
- if (sub == "random" || sub == "randnsfw") && !sub.contains('+') {
+ if sub == "random" || sub == "randnsfw" {
let new_sub = json(format!("/r/{}/about.json?raw_json=1", sub), false).await?["data"]["display_name"]
.as_str()
.unwrap_or_default()
diff --git a/static/style.css b/static/style.css
index fefba1a..514b642 100644
--- a/static/style.css
+++ b/static/style.css
@@ -372,7 +372,7 @@ aside {
margin-bottom: 20px;
}
-#user_details, #sub_details {
+#user_details, #sub_details, #sub_actions, #user_actions {
display: grid;
grid-template-columns: repeat(2, 1fr);
grid-column-gap: 20px;
@@ -384,7 +384,7 @@ aside {
/* Subscriptions */
-#sub_subscription, #user_subscription {
+#sub_subscription, #user_subscription, #user_filter, #sub_filter {
margin-top: 20px;
}
@@ -392,18 +392,18 @@ aside {
margin-bottom: 20px;
}
-.subscribe, .unsubscribe {
+.subscribe, .unsubscribe, .filter, .unfilter {
padding: 10px 20px;
border-radius: 5px;
cursor: pointer;
}
-.subscribe {
+.subscribe, .filter {
color: var(--foreground);
background-color: var(--accent);
}
-.unsubscribe {
+.unsubscribe, .unfilter {
color: var(--text);
background-color: var(--highlighted);
}
@@ -1042,7 +1042,7 @@ a.search_subreddit:hover {
overflow: auto;
}
-.comment_body.highlighted {
+.comment_body.highlighted, .comment_body_filtered.highlighted {
background: var(--highlighted);
}
@@ -1055,6 +1055,15 @@ a.search_subreddit:hover {
color: var(--accent);
}
+.comment_body_filtered {
+ opacity: 0.4;
+ font-weight: normal;
+ font-style: italic;
+ padding: 5px 5px;
+ margin: 5px 0;
+ overflow: auto;
+}
+
.deeper_replies {
color: var(--accent);
margin-left: 15px;
@@ -1226,6 +1235,14 @@ input[type="submit"] {
color: var(--accent);
}
+#settings_filters .unsubscribe {
+ margin-left: 30px;
+}
+
+#settings_filters a {
+ color: var(--accent);
+}
+
.helper {
padding: 10px;
width: 250px;
diff --git a/templates/comment.html b/templates/comment.html
index 8734e2a..7090251 100644
--- a/templates/comment.html
+++ b/templates/comment.html
@@ -8,7 +8,7 @@
-
+ {% endif %}
{% endif %}
diff --git a/templates/user.html b/templates/user.html
index bfcef08..8095d06 100644
--- a/templates/user.html
+++ b/templates/user.html
@@ -13,11 +13,12 @@
{% block body %}
+ {% if !is_filtered %}
+ {% if all_posts_filtered %}
+
(All content on this page has been filtered)
+ {% else %}
{% for post in posts %}
{% if post.flags.nsfw && prefs.show_nsfw != "on" %}
- {% else if post.title != "Comment" %}
+ {% else if !post.title.is_empty() %}
{% call utils::post_in_list(post) %}
{% else %}
+ {% endif %}
+ {% endif %}
u/{{ author.name }} {% if author.flair.flair_parts.len() > 0 %} @@ -25,7 +25,11 @@ {% endfor %} {% endif %}
+ {% if is_filtered %} +- {{ subreddit.name }} + r/{{ subreddit.name }} • {{ subreddit.subscribers.0 }} Members
@@ -54,10 +55,15 @@ {% endif %}Filtered Feeds
+ {% for sub in prefs.filters %} +Note: settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them.
-
You can restore your current settings and subscriptions after clearing your cookies using this link.
+You can restore your current settings and subscriptions after clearing your cookies using this link.