Compare commits
7 Commits
Author | SHA1 | Date | |
---|---|---|---|
37d1939dc0 | |||
08a20b89a6 | |||
5d518cfc18 | |||
7e752b3d81 | |||
87729d0daa | |||
dc06ae3b29 | |||
225380b7d9 |
2
.github/workflows/docker-arm.yml
vendored
2
.github/workflows/docker-arm.yml
vendored
@ -33,6 +33,6 @@ jobs:
|
|||||||
file: ./Dockerfile.arm
|
file: ./Dockerfile.arm
|
||||||
platforms: linux/arm64
|
platforms: linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: spikecodes/libreddit:arm
|
tags: libreddit/libreddit:arm
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
2
.github/workflows/docker-armv7.yml
vendored
2
.github/workflows/docker-armv7.yml
vendored
@ -36,6 +36,6 @@ jobs:
|
|||||||
file: ./Dockerfile.armv7
|
file: ./Dockerfile.armv7
|
||||||
platforms: linux/arm/v7
|
platforms: linux/arm/v7
|
||||||
push: true
|
push: true
|
||||||
tags: spikecodes/libreddit:armv7
|
tags: libreddit/libreddit:armv7
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
8
.github/workflows/docker.yml
vendored
8
.github/workflows/docker.yml
vendored
@ -26,6 +26,12 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
- name: Docker Hub Description
|
||||||
|
uses: peter-evans/dockerhub-description@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
repository: libreddit/libreddit
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
@ -33,6 +39,6 @@ jobs:
|
|||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
push: true
|
push: true
|
||||||
tags: spikecodes/libreddit:latest
|
tags: libreddit/libreddit:latest
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -664,7 +664,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libreddit"
|
name = "libreddit"
|
||||||
version = "0.25.0"
|
version = "0.25.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"askama",
|
"askama",
|
||||||
"async-recursion",
|
"async-recursion",
|
||||||
|
@ -3,7 +3,7 @@ name = "libreddit"
|
|||||||
description = " Alternative private front-end to Reddit"
|
description = " Alternative private front-end to Reddit"
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
repository = "https://github.com/spikecodes/libreddit"
|
repository = "https://github.com/spikecodes/libreddit"
|
||||||
version = "0.25.0"
|
version = "0.25.1"
|
||||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
16
README.md
16
README.md
@ -39,7 +39,7 @@ Both files are part of the [libreddit-instances](https://github.com/libreddit/li
|
|||||||
|
|
||||||
# About
|
# About
|
||||||
|
|
||||||
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/spikecodes/libreddit), :octocat: [GitHub](https://github.com/libreddit/libreddit), and 🦊 [GitLab](https://gitlab.com/spikecodes/libreddit).
|
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/libreddit/libreddit), :octocat: [GitHub](https://github.com/libreddit/libreddit), and 🦊 [GitLab](https://gitlab.com/libreddit/libreddit).
|
||||||
|
|
||||||
## Built with
|
## Built with
|
||||||
|
|
||||||
@ -136,21 +136,21 @@ cargo install libreddit
|
|||||||
|
|
||||||
## 2) Docker
|
## 2) Docker
|
||||||
|
|
||||||
Deploy the [Docker image](https://hub.docker.com/r/spikecodes/libreddit) of Libreddit:
|
Deploy the [Docker image](https://hub.docker.com/r/libreddit/libreddit) of Libreddit:
|
||||||
```
|
```
|
||||||
docker pull spikecodes/libreddit
|
docker pull libreddit/libreddit
|
||||||
docker run -d --name libreddit -p 8080:8080 spikecodes/libreddit
|
docker run -d --name libreddit -p 8080:8080 libreddit/libreddit
|
||||||
```
|
```
|
||||||
|
|
||||||
Deploy using a different port (in this case, port 80):
|
Deploy using a different port (in this case, port 80):
|
||||||
```
|
```
|
||||||
docker pull spikecodes/libreddit
|
docker pull libreddit/libreddit
|
||||||
docker run -d --name libreddit -p 80:8080 spikecodes/libreddit
|
docker run -d --name libreddit -p 80:8080 libreddit/libreddit
|
||||||
```
|
```
|
||||||
|
|
||||||
To deploy on `arm64` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:arm`.
|
To deploy on `arm64` platforms, simply replace `libreddit/libreddit` in the commands above with `libreddit/libreddit:arm`.
|
||||||
|
|
||||||
To deploy on `armv7` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:armv7`.
|
To deploy on `armv7` platforms, simply replace `libreddit/libreddit` in the commands above with `libreddit/libreddit:armv7`.
|
||||||
|
|
||||||
## 3) AUR
|
## 3) AUR
|
||||||
|
|
||||||
|
@ -243,7 +243,7 @@ impl Server {
|
|||||||
match func.await {
|
match func.await {
|
||||||
Ok(mut res) => {
|
Ok(mut res) => {
|
||||||
res.headers_mut().extend(def_headers);
|
res.headers_mut().extend(def_headers);
|
||||||
let _ = compress_response(req_headers, &mut res).await;
|
let _ = compress_response(&req_headers, &mut res).await;
|
||||||
|
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
@ -282,7 +282,7 @@ async fn new_boilerplate(
|
|||||||
) -> Result<Response<Body>, String> {
|
) -> Result<Response<Body>, String> {
|
||||||
match Response::builder().status(status).body(body) {
|
match Response::builder().status(status).body(body) {
|
||||||
Ok(mut res) => {
|
Ok(mut res) => {
|
||||||
let _ = compress_response(req_headers, &mut res).await;
|
let _ = compress_response(&req_headers, &mut res).await;
|
||||||
|
|
||||||
res.headers_mut().extend(default_headers.clone());
|
res.headers_mut().extend(default_headers.clone());
|
||||||
Ok(res)
|
Ok(res)
|
||||||
@ -306,7 +306,8 @@ async fn new_boilerplate(
|
|||||||
/// Accept-Encoding: gzip, compress, br
|
/// Accept-Encoding: gzip, compress, br
|
||||||
/// Accept-Encoding: br;q=1.0, gzip;q=0.8, *;q=0.1
|
/// Accept-Encoding: br;q=1.0, gzip;q=0.8, *;q=0.1
|
||||||
/// ```
|
/// ```
|
||||||
fn determine_compressor(accept_encoding: &str) -> Option<CompressionType> {
|
#[cached]
|
||||||
|
fn determine_compressor(accept_encoding: String) -> Option<CompressionType> {
|
||||||
if accept_encoding.is_empty() {
|
if accept_encoding.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
@ -473,7 +474,7 @@ fn determine_compressor(accept_encoding: &str) -> Option<CompressionType> {
|
|||||||
///
|
///
|
||||||
/// This function logs errors to stderr, but only in debug mode. No information
|
/// This function logs errors to stderr, but only in debug mode. No information
|
||||||
/// is logged in release builds.
|
/// is logged in release builds.
|
||||||
async fn compress_response(req_headers: HeaderMap<header::HeaderValue>, res: &mut Response<Body>) -> Result<(), String> {
|
async fn compress_response(req_headers: &HeaderMap<header::HeaderValue>, res: &mut Response<Body>) -> Result<(), String> {
|
||||||
// Check if the data is eligible for compression.
|
// Check if the data is eligible for compression.
|
||||||
if let Some(hdr) = res.headers().get(header::CONTENT_TYPE) {
|
if let Some(hdr) = res.headers().get(header::CONTENT_TYPE) {
|
||||||
match from_utf8(hdr.as_bytes()) {
|
match from_utf8(hdr.as_bytes()) {
|
||||||
@ -503,30 +504,22 @@ async fn compress_response(req_headers: HeaderMap<header::HeaderValue>, res: &mu
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
// Quick and dirty closure for extracting a header from the request and
|
|
||||||
// returning it as a &str.
|
|
||||||
let get_req_header = |k: header::HeaderName| -> Option<&str> {
|
|
||||||
match req_headers.get(k) {
|
|
||||||
Some(hdr) => match from_utf8(hdr.as_bytes()) {
|
|
||||||
Ok(val) => Some(val),
|
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
Err(e) => {
|
|
||||||
dbg_msg!(e);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(debug_assertions))]
|
|
||||||
Err(_) => None,
|
|
||||||
},
|
|
||||||
None => None,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check to see which compressor is requested, and if we can use it.
|
// Check to see which compressor is requested, and if we can use it.
|
||||||
let accept_encoding: &str = match get_req_header(header::ACCEPT_ENCODING) {
|
let accept_encoding: String = match req_headers.get(header::ACCEPT_ENCODING) {
|
||||||
Some(val) => val,
|
|
||||||
None => return Ok(()), // Client requested no compression.
|
None => return Ok(()), // Client requested no compression.
|
||||||
|
|
||||||
|
Some(hdr) => match String::from_utf8(hdr.as_bytes().into()) {
|
||||||
|
Ok(val) => val,
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
Err(e) => {
|
||||||
|
dbg_msg!(e);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
Err(_) => return Ok(()),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
let compressor: CompressionType = match determine_compressor(accept_encoding) {
|
let compressor: CompressionType = match determine_compressor(accept_encoding) {
|
||||||
@ -636,18 +629,18 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_determine_compressor() {
|
fn test_determine_compressor() {
|
||||||
// Single compressor given.
|
// Single compressor given.
|
||||||
assert_eq!(determine_compressor("unsupported"), None);
|
assert_eq!(determine_compressor("unsupported".to_string()), None);
|
||||||
assert_eq!(determine_compressor("gzip"), Some(CompressionType::Gzip));
|
assert_eq!(determine_compressor("gzip".to_string()), Some(CompressionType::Gzip));
|
||||||
assert_eq!(determine_compressor("*"), Some(DEFAULT_COMPRESSOR));
|
assert_eq!(determine_compressor("*".to_string()), Some(DEFAULT_COMPRESSOR));
|
||||||
|
|
||||||
// Multiple compressors.
|
// Multiple compressors.
|
||||||
assert_eq!(determine_compressor("gzip, br"), Some(CompressionType::Brotli));
|
assert_eq!(determine_compressor("gzip, br".to_string()), Some(CompressionType::Brotli));
|
||||||
assert_eq!(determine_compressor("gzip;q=0.8, br;q=0.3"), Some(CompressionType::Gzip));
|
assert_eq!(determine_compressor("gzip;q=0.8, br;q=0.3".to_string()), Some(CompressionType::Gzip));
|
||||||
assert_eq!(determine_compressor("br, gzip"), Some(CompressionType::Brotli));
|
assert_eq!(determine_compressor("br, gzip".to_string()), Some(CompressionType::Brotli));
|
||||||
assert_eq!(determine_compressor("br;q=0.3, gzip;q=0.4"), Some(CompressionType::Gzip));
|
assert_eq!(determine_compressor("br;q=0.3, gzip;q=0.4".to_string()), Some(CompressionType::Gzip));
|
||||||
|
|
||||||
// Invalid q-values.
|
// Invalid q-values.
|
||||||
assert_eq!(determine_compressor("gzip;q=NAN"), None);
|
assert_eq!(determine_compressor("gzip;q=NAN".to_string()), None);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -672,9 +665,9 @@ mod tests {
|
|||||||
] {
|
] {
|
||||||
// Determine what the expected encoding should be based on both the
|
// Determine what the expected encoding should be based on both the
|
||||||
// specific encodings we accept.
|
// specific encodings we accept.
|
||||||
let expected_encoding: CompressionType = match determine_compressor(accept_encoding) {
|
let expected_encoding: CompressionType = match determine_compressor(accept_encoding.to_string()) {
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
None => panic!("determine_compressor(accept_encoding) => None"),
|
None => panic!("determine_compressor(accept_encoding.to_string()) => None"),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Build headers with our Accept-Encoding.
|
// Build headers with our Accept-Encoding.
|
||||||
@ -691,8 +684,8 @@ mod tests {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Perform the compression.
|
// Perform the compression.
|
||||||
if let Err(e) = block_on(compress_response(req_headers, &mut res)) {
|
if let Err(e) = block_on(compress_response(&req_headers, &mut res)) {
|
||||||
panic!("compress_response(req_headers, &mut res) => Err(\"{}\")", e);
|
panic!("compress_response(&req_headers, &mut res) => Err(\"{}\")", e);
|
||||||
};
|
};
|
||||||
|
|
||||||
// If the content was compressed, we expect the Content-Encoding
|
// If the content was compressed, we expect the Content-Encoding
|
||||||
@ -739,9 +732,8 @@ mod tests {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut decompressed = Vec::<u8>::new();
|
let mut decompressed = Vec::<u8>::new();
|
||||||
match io::copy(&mut decoder, &mut decompressed) {
|
if let Err(e) = io::copy(&mut decoder, &mut decompressed) {
|
||||||
Ok(_) => {}
|
panic!("{}", e);
|
||||||
Err(e) => panic!("{}", e),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(decompressed.eq(&expected_lorem_ipsum));
|
assert!(decompressed.eq(&expected_lorem_ipsum));
|
||||||
|
@ -115,7 +115,7 @@
|
|||||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||||
<script src="/hls.min.js"></script>
|
<script src="/hls.min.js"></script>
|
||||||
<div class="post_media_content">
|
<div class="post_media_content">
|
||||||
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls>
|
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls>
|
||||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||||
</video>
|
</video>
|
||||||
@ -213,19 +213,19 @@
|
|||||||
</div>
|
</div>
|
||||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
||||||
<div class="post_media_content">
|
<div class="post_media_content">
|
||||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||||
</div>
|
</div>
|
||||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
||||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||||
<div class="post_media_content">
|
<div class="post_media_content">
|
||||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" controls preload="none">
|
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" controls preload="none">
|
||||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||||
</video>
|
</video>
|
||||||
</div>
|
</div>
|
||||||
{% else %}
|
{% else %}
|
||||||
<div class="post_media_content">
|
<div class="post_media_content">
|
||||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||||
</div>
|
</div>
|
||||||
{% call render_hls_notification(format!("{}%23{}", &self.url[1..].replace("&", "%26").replace("+", "%2B"), post.id)) %}
|
{% call render_hls_notification(format!("{}%23{}", &self.url[1..].replace("&", "%26").replace("+", "%2B"), post.id)) %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
Reference in New Issue
Block a user