Compare commits

...

12 Commits

Author SHA1 Message Date
bd4cb96c0f "fix" scraper 2024-11-03 09:36:11 +13:00
a9c99cc752 Merge remote-tracking branch 'upstream/main' 2024-11-03 09:34:12 +13:00
Matthew Esposito
f03bdcf472
feat: display whether or not the instance is up to date on error (#310) 2024-11-01 18:16:25 -04:00
Matthew Esposito
2fd358f3ed
feat(hls): add video quality preference (#306) 2024-11-01 12:28:52 -04:00
Matthew Esposito
5ef57812f8 style: fix clippy 2024-11-01 11:39:05 -04:00
Nolan Poe
d17d097b12
Fix parts of CI (#304)
* Run cargo fmt, hide clippy::cmp_owned errors

* Bump deps

* Fix failing test

* Update src/client.rs

---------

Co-authored-by: Matthew Esposito <matt@matthew.science>
2024-10-31 22:50:50 -04:00
Alex
a96894c743
enables http2 crate feature, replaces http1 protocol with http2 on co… (#305) 2024-10-31 22:48:19 -04:00
Matthew Esposito
9aea9c90a2 fix: reduce to minimum patch, fix clippy 2024-10-31 16:09:35 -04:00
Matthew Esposito
efdf1848ac fix: emergency patch for 403 2024-10-31 16:06:29 -04:00
Matthew Esposito
bc9530821d feat(scraper): add output file 2024-10-30 15:15:38 -04:00
Matthew Esposito
f3d2f0cc59 feat(scraper): add scraper CLI 2024-10-21 20:54:05 -04:00
Matthew Esposito
49ef59e000 chore: make library 2024-10-21 20:46:03 -04:00
19 changed files with 546 additions and 483 deletions

758
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -9,6 +9,7 @@ authors = [
"spikecodes <19519553+spikecodes@users.noreply.github.com>", "spikecodes <19519553+spikecodes@users.noreply.github.com>",
] ]
edition = "2021" edition = "2021"
default-run = "redlib"
[dependencies] [dependencies]
rinja = { version = "0.3.4", default-features = false } rinja = { version = "0.3.4", default-features = false }
@ -16,13 +17,14 @@ cached = { version = "0.51.3", features = ["async"] }
clap = { version = "4.4.11", default-features = false, features = [ clap = { version = "4.4.11", default-features = false, features = [
"std", "std",
"env", "env",
"derive",
] } ] }
regex = "1.10.2" regex = "1.10.2"
serde = { version = "1.0.193", features = ["derive"] } serde = { version = "1.0.193", features = ["derive"] }
cookie = "0.18.0" cookie = "0.18.0"
futures-lite = "2.2.0" futures-lite = "2.2.0"
hyper = { version = "0.14.28", features = ["full"] } hyper = { version = "0.14.28", features = ["full"] }
hyper-rustls = "0.24.2" hyper-rustls = { version = "0.24.2", features = [ "http2" ] }
percent-encoding = "2.3.1" percent-encoding = "2.3.1"
route-recognizer = "0.3.1" route-recognizer = "0.3.1"
serde_json = "1.0.108" serde_json = "1.0.108"
@ -56,3 +58,11 @@ sealed_test = "1.0.0"
codegen-units = 1 codegen-units = 1
lto = true lto = true
strip = "symbols" strip = "symbols"
[[bin]]
name = "redlib"
path = "src/main.rs"
[[bin]]
name = "scraper"
path = "src/scraper/main.rs"

View File

@ -4,7 +4,7 @@ use futures_lite::future::block_on;
use futures_lite::{future::Boxed, FutureExt}; use futures_lite::{future::Boxed, FutureExt};
use hyper::client::HttpConnector; use hyper::client::HttpConnector;
use hyper::header::HeaderValue; use hyper::header::HeaderValue;
use hyper::{body, body::Buf, client, header, Body, Client, Method, Request, Response, Uri}; use hyper::{body, body::Buf, header, Body, Client, Method, Request, Response, Uri};
use hyper_rustls::HttpsConnector; use hyper_rustls::HttpsConnector;
use libflate::gzip; use libflate::gzip;
use log::{error, trace, warn}; use log::{error, trace, warn};
@ -30,10 +30,10 @@ const REDDIT_SHORT_URL_BASE_HOST: &str = "redd.it";
const ALTERNATIVE_REDDIT_URL_BASE: &str = "https://www.reddit.com"; const ALTERNATIVE_REDDIT_URL_BASE: &str = "https://www.reddit.com";
const ALTERNATIVE_REDDIT_URL_BASE_HOST: &str = "www.reddit.com"; const ALTERNATIVE_REDDIT_URL_BASE_HOST: &str = "www.reddit.com";
pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| { pub static HTTPS_CONNECTOR: Lazy<HttpsConnector<HttpConnector>> =
let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http1().build(); Lazy::new(|| hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http2().build());
client::Client::builder().build(https)
}); pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| Client::builder().build::<_, Body>(HTTPS_CONNECTOR.clone()));
pub static OAUTH_CLIENT: Lazy<ArcSwap<Oauth>> = Lazy::new(|| { pub static OAUTH_CLIENT: Lazy<ArcSwap<Oauth>> = Lazy::new(|| {
let client = block_on(Oauth::new()); let client = block_on(Oauth::new());
@ -154,7 +154,7 @@ async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String
let parsed_uri = url.parse::<Uri>().map_err(|_| "Couldn't parse URL".to_string())?; let parsed_uri = url.parse::<Uri>().map_err(|_| "Couldn't parse URL".to_string())?;
// Build the hyper client from the HTTPS connector. // Build the hyper client from the HTTPS connector.
let client: Client<_, Body> = CLIENT.clone(); let client: &Lazy<Client<_, Body>> = &CLIENT;
let mut builder = Request::get(parsed_uri); let mut builder = Request::get(parsed_uri);
@ -216,7 +216,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
let url = format!("{base_path}{path}"); let url = format!("{base_path}{path}");
// Construct the hyper client from the HTTPS connector. // Construct the hyper client from the HTTPS connector.
let client: Client<_, Body> = CLIENT.clone(); let client: &Lazy<Client<_, Body>> = &CLIENT;
let (token, vendor_id, device_id, user_agent, loid) = { let (token, vendor_id, device_id, user_agent, loid) = {
let client = OAUTH_CLIENT.load_full(); let client = OAUTH_CLIENT.load_full();

View File

@ -85,7 +85,7 @@ fn info_html(req: &Request<Body>) -> Result<Response<Body>, Error> {
pub struct InstanceInfo { pub struct InstanceInfo {
package_name: String, package_name: String,
crate_version: String, crate_version: String,
git_commit: String, pub git_commit: String,
deploy_date: String, deploy_date: String,
compile_mode: String, compile_mode: String,
deploy_unix_ts: i64, deploy_unix_ts: i64,

13
src/lib.rs Normal file
View File

@ -0,0 +1,13 @@
pub mod client;
pub mod config;
pub mod duplicates;
pub mod instance_info;
pub mod oauth;
pub mod oauth_resources;
pub mod post;
pub mod search;
pub mod server;
pub mod settings;
pub mod subreddit;
pub mod user;
pub mod utils;

View File

@ -2,35 +2,21 @@
#![forbid(unsafe_code)] #![forbid(unsafe_code)]
#![allow(clippy::cmp_owned)] #![allow(clippy::cmp_owned)]
// Reference local files use cached::proc_macro::cached;
mod config;
mod duplicates;
mod instance_info;
mod oauth;
mod oauth_resources;
mod post;
mod search;
mod settings;
mod subreddit;
mod user;
mod utils;
// Import Crates
use clap::{Arg, ArgAction, Command}; use clap::{Arg, ArgAction, Command};
use std::str::FromStr;
use futures_lite::FutureExt; use futures_lite::FutureExt;
use hyper::Uri;
use hyper::{header::HeaderValue, Body, Request, Response}; use hyper::{header::HeaderValue, Body, Request, Response};
mod client;
use client::{canonical_path, proxy};
use log::info; use log::info;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use server::RequestExt; use redsunlib::client::{canonical_path, proxy, CLIENT};
use utils::{error, redirect, ThemeAssets, MascotAssets}; use redsunlib::server::{self, RequestExt};
use redsunlib::utils::{error, redirect, ThemeAssets, MascotAssets};
use redsunlib::{config, duplicates, headers, instance_info, post, search, settings, subreddit, user};
use crate::client::OAUTH_CLIENT; use redsunlib::client::OAUTH_CLIENT;
mod server;
// Create Services // Create Services
@ -257,6 +243,12 @@ async fn main() {
app app
.at("/highlighted.js") .at("/highlighted.js")
.get(|_| resource(include_str!("../static/highlighted.js"), "text/javascript", false).boxed()); .get(|_| resource(include_str!("../static/highlighted.js"), "text/javascript", false).boxed());
app
.at("/check_update.js")
.get(|_| resource(include_str!("../static/check_update.js"), "text/javascript", false).boxed());
app.at("/commits.atom").get(|_| async move { proxy_commit_info().await }.boxed());
// FFmpeg // FFmpeg
app app
.at("/ffmpeg/814.ffmpeg.js") .at("/ffmpeg/814.ffmpeg.js")
@ -437,3 +429,22 @@ async fn main() {
eprintln!("Server error: {e}"); eprintln!("Server error: {e}");
} }
} }
pub async fn proxy_commit_info() -> Result<Response<Body>, String> {
Ok(
Response::builder()
.status(200)
.header("content-type", "application/atom+xml")
.body(Body::from(fetch_commit_info().await))
.unwrap_or_default(),
)
}
#[cached(time = 600)]
async fn fetch_commit_info() -> String {
let uri = Uri::from_str("https://github.com/redlib-org/redlib/commits/main.atom").expect("Invalid URI");
let resp: Body = CLIENT.get(uri).await.expect("Failed to request GitHub").into_body();
hyper::body::to_bytes(resp).await.expect("Failed to read body").iter().copied().map(|x| x as char).collect()
}

View File

@ -94,7 +94,7 @@ impl Oauth {
trace!("Sending token request..."); trace!("Sending token request...");
// Send request // Send request
let client: client::Client<_, Body> = CLIENT.clone(); let client: &once_cell::sync::Lazy<client::Client<_, Body>> = &CLIENT;
let resp = client.request(request).await.ok()?; let resp = client.request(request).await.ok()?;
trace!("Received response with status {} and length {:?}", resp.status(), resp.headers().get("content-length")); trace!("Received response with status {} and length {:?}", resp.status(), resp.headers().get("content-length"));

View File

@ -1,3 +1,5 @@
#![allow(clippy::cmp_owned)]
// CRATES // CRATES
use crate::client::json; use crate::client::json;
use crate::config::get_setting; use crate::config::get_setting;

75
src/scraper/main.rs Normal file
View File

@ -0,0 +1,75 @@
use std::{fmt::Display, io::Write};
use clap::{Parser, ValueEnum};
use redsunlib::utils::Post;
#[derive(Parser)]
#[command(name = "my_cli")]
#[command(about = "A simple CLI example", long_about = None)]
struct Cli {
#[arg(short = 's', long = "sub")]
sub: String,
#[arg(short = 'c', long = "count")]
count: usize,
#[arg(long = "sort")]
sort: SortOrder,
#[arg(short = 'f', long = "format", value_enum)]
format: Format,
#[arg(short = 'o', long = "output")]
output: Option<String>,
}
#[derive(Debug, Clone, ValueEnum)]
enum SortOrder {
Hot,
Rising,
New,
Top,
Controversial,
}
impl Display for SortOrder {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SortOrder::Hot => write!(f, "hot"),
SortOrder::Rising => write!(f, "rising"),
SortOrder::New => write!(f, "new"),
SortOrder::Top => write!(f, "top"),
SortOrder::Controversial => write!(f, "controversial"),
}
}
}
#[derive(Debug, Clone, ValueEnum)]
enum Format {
Json,
}
#[tokio::main]
async fn main() {
let cli = Cli::parse();
let (sub, final_count, sort, format, output) = (cli.sub, cli.count, cli.sort, cli.format, cli.output);
let initial = format!("/r/{sub}/{sort}.json?&raw_json=1");
let (mut posts, mut after) = Post::fetch(&initial, false).await.unwrap();
while posts.len() < final_count {
print!("\r");
let path = format!("/r/{sub}/{sort}.json?sort={sort}&t=&after={after}&raw_json=1");
let (new_posts, new_after) = Post::fetch(&path, false).await.unwrap();
posts.extend(new_posts);
after = new_after;
// Print number of posts fetched
print!("Fetched {} posts", posts.len());
std::io::stdout().flush().unwrap();
}
match format {
Format::Json => {
let filename: String = output.unwrap_or_else(|| format!("{sub}.json"));
let json = serde_json::to_string(&posts).unwrap();
std::fs::write(filename, json).unwrap();
}
}
}

View File

@ -1,9 +1,11 @@
#![allow(clippy::cmp_owned)]
// CRATES // CRATES
use crate::utils::{self, catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences}; use crate::utils::{self, catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences};
use crate::{ use crate::{
client::json, client::json,
server::RequestExt,
subreddit::{can_access_quarantine, quarantine}, subreddit::{can_access_quarantine, quarantine},
RequestExt,
}; };
use hyper::{Body, Request, Response}; use hyper::{Body, Request, Response};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;

View File

@ -1,4 +1,5 @@
#![allow(dead_code)] #![allow(dead_code)]
#![allow(clippy::cmp_owned)]
use brotli::enc::{BrotliCompress, BrotliEncoderParams}; use brotli::enc::{BrotliCompress, BrotliEncoderParams};
use cached::proc_macro::cached; use cached::proc_macro::cached;
@ -195,6 +196,12 @@ impl Route<'_> {
} }
} }
impl Default for Server {
fn default() -> Self {
Self::new()
}
}
impl Server { impl Server {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
@ -723,7 +730,7 @@ mod tests {
CompressionType::Brotli => Box::new(BrotliDecompressor::new(body_cursor, expected_lorem_ipsum.len())), CompressionType::Brotli => Box::new(BrotliDecompressor::new(body_cursor, expected_lorem_ipsum.len())),
_ => panic!("no decompressor for {}", expected_encoding.to_string()), _ => panic!("no decompressor for {}", expected_encoding),
}; };
let mut decompressed = Vec::<u8>::new(); let mut decompressed = Vec::<u8>::new();

View File

@ -1,3 +1,5 @@
#![allow(clippy::cmp_owned)]
use std::collections::HashMap; use std::collections::HashMap;
// CRATES // CRATES
@ -19,7 +21,7 @@ struct SettingsTemplate {
// CONSTANTS // CONSTANTS
const PREFS: [&str; 19] = [ const PREFS: [&str; 20] = [
"theme", "theme",
"mascot", "mascot",
"front_page", "front_page",
@ -39,6 +41,7 @@ const PREFS: [&str; 19] = [
"hide_awards", "hide_awards",
"hide_score", "hide_score",
"disable_visit_reddit_confirmation", "disable_visit_reddit_confirmation",
"video_quality",
]; ];
// FUNCTIONS // FUNCTIONS

View File

@ -1,9 +1,11 @@
#![allow(clippy::cmp_owned)]
use crate::{config, utils}; use crate::{config, utils};
// CRATES // CRATES
use crate::utils::{ use crate::utils::{
catch_random, error, filter_posts, format_num, format_url, get_filters, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit, catch_random, error, filter_posts, format_num, format_url, get_filters, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit,
}; };
use crate::{client::json, server::ResponseExt, RequestExt}; use crate::{client::json, server::RequestExt, server::ResponseExt};
use cookie::Cookie; use cookie::Cookie;
use hyper::{Body, Request, Response}; use hyper::{Body, Request, Response};
use rinja::Template; use rinja::Template;

View File

@ -1,3 +1,5 @@
#![allow(clippy::cmp_owned)]
// CRATES // CRATES
use crate::client::json; use crate::client::json;
use crate::server::RequestExt; use crate::server::RequestExt;

View File

@ -1,4 +1,6 @@
#![allow(dead_code)] #![allow(dead_code)]
#![allow(clippy::cmp_owned)]
use crate::config::{self, get_setting}; use crate::config::{self, get_setting};
// //
// CRATES // CRATES
@ -11,6 +13,7 @@ use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use rinja::Template; use rinja::Template;
use rust_embed::RustEmbed; use rust_embed::RustEmbed;
use serde::Serialize;
use serde_json::Value; use serde_json::Value;
use serde_json_path::{JsonPath, JsonPathExt}; use serde_json_path::{JsonPath, JsonPathExt};
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
@ -46,6 +49,7 @@ pub enum ResourceType {
} }
// Post flair with content, background color and foreground color // Post flair with content, background color and foreground color
#[derive(Serialize)]
pub struct Flair { pub struct Flair {
pub flair_parts: Vec<FlairPart>, pub flair_parts: Vec<FlairPart>,
pub text: String, pub text: String,
@ -54,7 +58,7 @@ pub struct Flair {
} }
// Part of flair, either emoji or text // Part of flair, either emoji or text
#[derive(Clone)] #[derive(Clone, Serialize)]
pub struct FlairPart { pub struct FlairPart {
pub flair_part_type: String, pub flair_part_type: String,
pub value: String, pub value: String,
@ -96,12 +100,14 @@ impl FlairPart {
} }
} }
#[derive(Serialize)]
pub struct Author { pub struct Author {
pub name: String, pub name: String,
pub flair: Flair, pub flair: Flair,
pub distinguished: String, pub distinguished: String,
} }
#[derive(Serialize)]
pub struct Poll { pub struct Poll {
pub poll_options: Vec<PollOption>, pub poll_options: Vec<PollOption>,
pub voting_end_timestamp: (String, String), pub voting_end_timestamp: (String, String),
@ -129,6 +135,7 @@ impl Poll {
} }
} }
#[derive(Serialize)]
pub struct PollOption { pub struct PollOption {
pub id: u64, pub id: u64,
pub text: String, pub text: String,
@ -158,13 +165,14 @@ impl PollOption {
} }
// Post flags with nsfw and stickied // Post flags with nsfw and stickied
#[derive(Serialize)]
pub struct Flags { pub struct Flags {
pub spoiler: bool, pub spoiler: bool,
pub nsfw: bool, pub nsfw: bool,
pub stickied: bool, pub stickied: bool,
} }
#[derive(Debug)] #[derive(Debug, Serialize)]
pub struct Media { pub struct Media {
pub url: String, pub url: String,
pub alt_url: String, pub alt_url: String,
@ -264,6 +272,7 @@ impl Media {
} }
} }
#[derive(Serialize)]
pub struct GalleryMedia { pub struct GalleryMedia {
pub url: String, pub url: String,
pub width: i64, pub width: i64,
@ -304,6 +313,7 @@ impl GalleryMedia {
} }
// Post containing content, metadata and media // Post containing content, metadata and media
#[derive(Serialize)]
pub struct Post { pub struct Post {
pub id: String, pub id: String,
pub title: String, pub title: String,
@ -470,7 +480,7 @@ pub struct Comment {
pub prefs: Preferences, pub prefs: Preferences,
} }
#[derive(Default, Clone)] #[derive(Default, Clone, Serialize)]
pub struct Award { pub struct Award {
pub name: String, pub name: String,
pub icon_url: String, pub icon_url: String,
@ -484,6 +494,7 @@ impl std::fmt::Display for Award {
} }
} }
#[derive(Serialize)]
pub struct Awards(pub Vec<Award>); pub struct Awards(pub Vec<Award>);
impl std::ops::Deref for Awards { impl std::ops::Deref for Awards {
@ -603,6 +614,7 @@ pub struct Preferences {
pub show_nsfw: String, pub show_nsfw: String,
pub blur_nsfw: String, pub blur_nsfw: String,
pub hide_hls_notification: String, pub hide_hls_notification: String,
pub video_quality: String,
pub hide_sidebar_and_summary: String, pub hide_sidebar_and_summary: String,
pub use_hls: String, pub use_hls: String,
pub ffmpeg_video_downloads: String, pub ffmpeg_video_downloads: String,
@ -659,6 +671,7 @@ impl Preferences {
use_hls: setting(req, "use_hls"), use_hls: setting(req, "use_hls"),
ffmpeg_video_downloads: setting(req, "ffmpeg_video_downloads"), ffmpeg_video_downloads: setting(req, "ffmpeg_video_downloads"),
hide_hls_notification: setting(req, "hide_hls_notification"), hide_hls_notification: setting(req, "hide_hls_notification"),
video_quality: setting(req, "video_quality"),
autoplay_videos: setting(req, "autoplay_videos"), autoplay_videos: setting(req, "autoplay_videos"),
fixed_navbar: setting_or_default(req, "fixed_navbar", "on".to_string()), fixed_navbar: setting_or_default(req, "fixed_navbar", "on".to_string()),
disable_visit_reddit_confirmation: setting(req, "disable_visit_reddit_confirmation"), disable_visit_reddit_confirmation: setting(req, "disable_visit_reddit_confirmation"),

38
static/check_update.js Normal file
View File

@ -0,0 +1,38 @@
async function checkInstanceUpdateStatus() {
try {
const response = await fetch('/commits.atom');
const text = await response.text();
const parser = new DOMParser();
const xmlDoc = parser.parseFromString(text, "application/xml");
const entries = xmlDoc.getElementsByTagName('entry');
const localCommit = document.getElementById('git_commit').dataset.value;
let statusMessage = '';
if (entries.length > 0) {
const commitHashes = Array.from(entries).map(entry => {
const id = entry.getElementsByTagName('id')[0].textContent;
return id.split('/').pop();
});
const commitIndex = commitHashes.indexOf(localCommit);
if (commitIndex === 0) {
statusMessage = '✅ Instance is up to date.';
} else if (commitIndex > 0) {
statusMessage = `⚠️ This instance is not up to date and is ${commitIndex} commits old. Test and confirm on an up-to-date instance before reporting.`;
} else {
statusMessage = `⚠️ This instance is not up to date and is at least ${commitHashes.length} commits old. Test and confirm on an up-to-date instance before reporting.`;
}
} else {
statusMessage = '⚠️ Unable to fetch commit information.';
}
document.getElementById('update-status').innerText = statusMessage;
} catch (error) {
console.error('Error fetching commits:', error);
document.getElementById('update-status').innerText = '⚠️ Error checking update status.';
}
}
checkInstanceUpdateStatus();

View File

@ -27,6 +27,8 @@
<link rel="manifest" type="application/json" href="/manifest.json"> <link rel="manifest" type="application/json" href="/manifest.json">
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico"> <link rel="shortcut icon" type="image/x-icon" href="/favicon.ico">
<link rel="stylesheet" type="text/css" href="/style.css?v={{ env!("CARGO_PKG_VERSION") }}"> <link rel="stylesheet" type="text/css" href="/style.css?v={{ env!("CARGO_PKG_VERSION") }}">
<!-- Video quality -->
<div id="video_quality" data-value="{{ prefs.video_quality }}"></div>
{% endblock %} {% endblock %}
</head> </head>
<body class=" <body class="

View File

@ -6,10 +6,15 @@
<h1>{{ msg }}</h1> <h1>{{ msg }}</h1>
<h3><a href="https://www.redditstatus.com/">Reddit Status</a></h3> <h3><a href="https://www.redditstatus.com/">Reddit Status</a></h3>
<br /> <br />
<h3 id="update-status"></h3>
<br>
<div id="git_commit" data-value="{{ crate::instance_info::INSTANCE_INFO.git_commit }}"></div>
<script src="/check_update.js"></script>
<h3>Expected something to work? <a <h3>Expected something to work? <a
href="https://github.com/redlib-org/redlib/issues/new?assignees=&labels=bug&projects=&template=bug_report.md&title=%F0%9F%90%9B+Bug+Report%3A+{{ msg }}">Report href="https://github.com/redlib-org/redlib/issues/new?assignees=&labels=bug&projects=&template=bug_report.md&title=%F0%9F%90%9B+Bug+Report%3A+{{ msg }}">Report
an issue</a></h3> an issue</a></h3>
<br /> <br />
<h3>Head back <a href="/">home</a>?</h3> <h3>Head back <a href="/">home</a>?</h3>
</div> </div>
{% endblock %} {% endblock %}

View File

@ -68,6 +68,12 @@
</fieldset> </fieldset>
<fieldset> <fieldset>
<legend>Content</legend> <legend>Content</legend>
<div class="prefs-group">
<label for="video_quality">Video quality:</label>
<select name="video_quality" id="video_quality">
{% call utils::options(prefs.video_quality, ["best", "medium", "worst"], "best") %}
</select>
</div>
<div class="prefs-group"> <div class="prefs-group">
<label for="post_sort" title="Applies only to subreddit feeds">Default subreddit post sort:</label> <label for="post_sort" title="Applies only to subreddit feeds">Default subreddit post sort:</label>
<select name="post_sort"> <select name="post_sort">