mirror of https://github.com/spikecodes/libreddit
Merge branch 'master' into ferrit-nsfw
This commit is contained in:
commit
4a9ea506bd
|
@ -33,6 +33,6 @@ jobs:
|
|||
file: ./Dockerfile.arm
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: spikecodes/libreddit:arm
|
||||
tags: libreddit/libreddit:arm
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
|
|
@ -36,6 +36,6 @@ jobs:
|
|||
file: ./Dockerfile.armv7
|
||||
platforms: linux/arm/v7
|
||||
push: true
|
||||
tags: spikecodes/libreddit:armv7
|
||||
tags: libreddit/libreddit:armv7
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
|
|
@ -26,6 +26,12 @@ jobs:
|
|||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Docker Hub Description
|
||||
uses: peter-evans/dockerhub-description@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
repository: libreddit/libreddit
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
|
@ -33,6 +39,6 @@ jobs:
|
|||
file: ./Dockerfile
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: spikecodes/libreddit:latest
|
||||
tags: libreddit/libreddit:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
|
|
@ -664,7 +664,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libreddit"
|
||||
version = "0.25.0"
|
||||
version = "0.25.1"
|
||||
dependencies = [
|
||||
"askama",
|
||||
"async-recursion",
|
||||
|
|
|
@ -3,7 +3,7 @@ name = "libreddit"
|
|||
description = " Alternative private front-end to Reddit"
|
||||
license = "AGPL-3.0"
|
||||
repository = "https://github.com/spikecodes/libreddit"
|
||||
version = "0.25.0"
|
||||
version = "0.25.1"
|
||||
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
|
||||
edition = "2021"
|
||||
|
||||
|
|
16
README.md
16
README.md
|
@ -39,7 +39,7 @@ Both files are part of the [libreddit-instances](https://github.com/libreddit/li
|
|||
|
||||
# About
|
||||
|
||||
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/spikecodes/libreddit), :octocat: [GitHub](https://github.com/libreddit/libreddit), and 🦊 [GitLab](https://gitlab.com/spikecodes/libreddit).
|
||||
Find Libreddit on 💬 [Matrix](https://matrix.to/#/#libreddit:kde.org), 🐋 [Docker](https://hub.docker.com/r/libreddit/libreddit), :octocat: [GitHub](https://github.com/libreddit/libreddit), and 🦊 [GitLab](https://gitlab.com/libreddit/libreddit).
|
||||
|
||||
## Built with
|
||||
|
||||
|
@ -136,21 +136,21 @@ cargo install libreddit
|
|||
|
||||
## 2) Docker
|
||||
|
||||
Deploy the [Docker image](https://hub.docker.com/r/spikecodes/libreddit) of Libreddit:
|
||||
Deploy the [Docker image](https://hub.docker.com/r/libreddit/libreddit) of Libreddit:
|
||||
```
|
||||
docker pull spikecodes/libreddit
|
||||
docker run -d --name libreddit -p 8080:8080 spikecodes/libreddit
|
||||
docker pull libreddit/libreddit
|
||||
docker run -d --name libreddit -p 8080:8080 libreddit/libreddit
|
||||
```
|
||||
|
||||
Deploy using a different port (in this case, port 80):
|
||||
```
|
||||
docker pull spikecodes/libreddit
|
||||
docker run -d --name libreddit -p 80:8080 spikecodes/libreddit
|
||||
docker pull libreddit/libreddit
|
||||
docker run -d --name libreddit -p 80:8080 libreddit/libreddit
|
||||
```
|
||||
|
||||
To deploy on `arm64` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:arm`.
|
||||
To deploy on `arm64` platforms, simply replace `libreddit/libreddit` in the commands above with `libreddit/libreddit:arm`.
|
||||
|
||||
To deploy on `armv7` platforms, simply replace `spikecodes/libreddit` in the commands above with `spikecodes/libreddit:armv7`.
|
||||
To deploy on `armv7` platforms, simply replace `libreddit/libreddit` in the commands above with `libreddit/libreddit:armv7`.
|
||||
|
||||
## 3) AUR
|
||||
|
||||
|
|
|
@ -243,7 +243,7 @@ impl Server {
|
|||
match func.await {
|
||||
Ok(mut res) => {
|
||||
res.headers_mut().extend(def_headers);
|
||||
let _ = compress_response(req_headers, &mut res).await;
|
||||
let _ = compress_response(&req_headers, &mut res).await;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
@ -282,7 +282,7 @@ async fn new_boilerplate(
|
|||
) -> Result<Response<Body>, String> {
|
||||
match Response::builder().status(status).body(body) {
|
||||
Ok(mut res) => {
|
||||
let _ = compress_response(req_headers, &mut res).await;
|
||||
let _ = compress_response(&req_headers, &mut res).await;
|
||||
|
||||
res.headers_mut().extend(default_headers.clone());
|
||||
Ok(res)
|
||||
|
@ -306,7 +306,8 @@ async fn new_boilerplate(
|
|||
/// Accept-Encoding: gzip, compress, br
|
||||
/// Accept-Encoding: br;q=1.0, gzip;q=0.8, *;q=0.1
|
||||
/// ```
|
||||
fn determine_compressor(accept_encoding: &str) -> Option<CompressionType> {
|
||||
#[cached]
|
||||
fn determine_compressor(accept_encoding: String) -> Option<CompressionType> {
|
||||
if accept_encoding.is_empty() {
|
||||
return None;
|
||||
};
|
||||
|
@ -473,7 +474,7 @@ fn determine_compressor(accept_encoding: &str) -> Option<CompressionType> {
|
|||
///
|
||||
/// This function logs errors to stderr, but only in debug mode. No information
|
||||
/// is logged in release builds.
|
||||
async fn compress_response(req_headers: HeaderMap<header::HeaderValue>, res: &mut Response<Body>) -> Result<(), String> {
|
||||
async fn compress_response(req_headers: &HeaderMap<header::HeaderValue>, res: &mut Response<Body>) -> Result<(), String> {
|
||||
// Check if the data is eligible for compression.
|
||||
if let Some(hdr) = res.headers().get(header::CONTENT_TYPE) {
|
||||
match from_utf8(hdr.as_bytes()) {
|
||||
|
@ -503,30 +504,22 @@ async fn compress_response(req_headers: HeaderMap<header::HeaderValue>, res: &mu
|
|||
return Ok(());
|
||||
};
|
||||
|
||||
// Quick and dirty closure for extracting a header from the request and
|
||||
// returning it as a &str.
|
||||
let get_req_header = |k: header::HeaderName| -> Option<&str> {
|
||||
match req_headers.get(k) {
|
||||
Some(hdr) => match from_utf8(hdr.as_bytes()) {
|
||||
Ok(val) => Some(val),
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
Err(_) => None,
|
||||
},
|
||||
None => None,
|
||||
}
|
||||
};
|
||||
|
||||
// Check to see which compressor is requested, and if we can use it.
|
||||
let accept_encoding: &str = match get_req_header(header::ACCEPT_ENCODING) {
|
||||
Some(val) => val,
|
||||
let accept_encoding: String = match req_headers.get(header::ACCEPT_ENCODING) {
|
||||
None => return Ok(()), // Client requested no compression.
|
||||
|
||||
Some(hdr) => match String::from_utf8(hdr.as_bytes().into()) {
|
||||
Ok(val) => val,
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
Err(e) => {
|
||||
dbg_msg!(e);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
Err(_) => return Ok(()),
|
||||
},
|
||||
};
|
||||
|
||||
let compressor: CompressionType = match determine_compressor(accept_encoding) {
|
||||
|
@ -636,18 +629,18 @@ mod tests {
|
|||
#[test]
|
||||
fn test_determine_compressor() {
|
||||
// Single compressor given.
|
||||
assert_eq!(determine_compressor("unsupported"), None);
|
||||
assert_eq!(determine_compressor("gzip"), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("*"), Some(DEFAULT_COMPRESSOR));
|
||||
assert_eq!(determine_compressor("unsupported".to_string()), None);
|
||||
assert_eq!(determine_compressor("gzip".to_string()), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("*".to_string()), Some(DEFAULT_COMPRESSOR));
|
||||
|
||||
// Multiple compressors.
|
||||
assert_eq!(determine_compressor("gzip, br"), Some(CompressionType::Brotli));
|
||||
assert_eq!(determine_compressor("gzip;q=0.8, br;q=0.3"), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("br, gzip"), Some(CompressionType::Brotli));
|
||||
assert_eq!(determine_compressor("br;q=0.3, gzip;q=0.4"), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("gzip, br".to_string()), Some(CompressionType::Brotli));
|
||||
assert_eq!(determine_compressor("gzip;q=0.8, br;q=0.3".to_string()), Some(CompressionType::Gzip));
|
||||
assert_eq!(determine_compressor("br, gzip".to_string()), Some(CompressionType::Brotli));
|
||||
assert_eq!(determine_compressor("br;q=0.3, gzip;q=0.4".to_string()), Some(CompressionType::Gzip));
|
||||
|
||||
// Invalid q-values.
|
||||
assert_eq!(determine_compressor("gzip;q=NAN"), None);
|
||||
assert_eq!(determine_compressor("gzip;q=NAN".to_string()), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -672,9 +665,9 @@ mod tests {
|
|||
] {
|
||||
// Determine what the expected encoding should be based on both the
|
||||
// specific encodings we accept.
|
||||
let expected_encoding: CompressionType = match determine_compressor(accept_encoding) {
|
||||
let expected_encoding: CompressionType = match determine_compressor(accept_encoding.to_string()) {
|
||||
Some(s) => s,
|
||||
None => panic!("determine_compressor(accept_encoding) => None"),
|
||||
None => panic!("determine_compressor(accept_encoding.to_string()) => None"),
|
||||
};
|
||||
|
||||
// Build headers with our Accept-Encoding.
|
||||
|
@ -691,8 +684,8 @@ mod tests {
|
|||
.unwrap();
|
||||
|
||||
// Perform the compression.
|
||||
if let Err(e) = block_on(compress_response(req_headers, &mut res)) {
|
||||
panic!("compress_response(req_headers, &mut res) => Err(\"{}\")", e);
|
||||
if let Err(e) = block_on(compress_response(&req_headers, &mut res)) {
|
||||
panic!("compress_response(&req_headers, &mut res) => Err(\"{}\")", e);
|
||||
};
|
||||
|
||||
// If the content was compressed, we expect the Content-Encoding
|
||||
|
@ -739,9 +732,8 @@ mod tests {
|
|||
};
|
||||
|
||||
let mut decompressed = Vec::<u8>::new();
|
||||
match io::copy(&mut decoder, &mut decompressed) {
|
||||
Ok(_) => {}
|
||||
Err(e) => panic!("{}", e),
|
||||
if let Err(e) = io::copy(&mut decoder, &mut decompressed) {
|
||||
panic!("{}", e);
|
||||
};
|
||||
|
||||
assert!(decompressed.eq(&expected_lorem_ipsum));
|
||||
|
|
|
@ -115,7 +115,7 @@
|
|||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<script src="/hls.min.js"></script>
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls>
|
||||
<video class="post_media_video short {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls>
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
|
@ -213,19 +213,19 @@
|
|||
</div>
|
||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "gif" %}
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls loop {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% else if (prefs.layout.is_empty() || prefs.layout == "card") && post.post_type == "video" %}
|
||||
{% if prefs.use_hls == "on" && !post.media.alt_url.is_empty() %}
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" controls preload="none">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %} {% if prefs.autoplay_videos == "on" %}hls_autoplay{% endif %}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" controls preload="none">
|
||||
<source src="{{ post.media.alt_url }}" type="application/vnd.apple.mpegurl" />
|
||||
<source src="{{ post.media.url }}" type="video/mp4" />
|
||||
</video>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="post_media_content">
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" width="{{ post.media.width }}" height="{{ post.media.height }}" poster="{{ post.media.poster }}" preload="none" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
<video class="post_media_video short {%if post.flags.nsfw && prefs.blur_nsfw=="on" %}post_nsfw_blur{% endif %}" src="{{ post.media.url }}" {% if post.media.width > 0 && post.media.height > 0 %}width="{{ post.media.width }}" height="{{ post.media.height }}"{% endif %} poster="{{ post.media.poster }}" preload="none" controls {% if prefs.autoplay_videos == "on" %}autoplay{% endif %}><a href={{ post.media.url }}>Video</a></video>
|
||||
</div>
|
||||
{% call render_hls_notification(format!("{}%23{}", &self.url[1..].replace("&", "%26").replace("+", "%2B"), post.id)) %}
|
||||
{% endif %}
|
||||
|
|
Loading…
Reference in New Issue