Automatically generate release notes

This commit is contained in:
spikecodes 2021-12-27 10:15:25 -08:00
parent 7aeabfc4bc
commit 90fa0b5496
No known key found for this signature in database
GPG Key ID: 004CECFF9B463BCB
5 changed files with 10 additions and 21 deletions

View File

@ -43,12 +43,13 @@ jobs:
if: github.base_ref != 'master' if: github.base_ref != 'master'
with: with:
tag_name: ${{ steps.version.outputs.version }} tag_name: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }} - NAME name: ${{ steps.version.outputs.version }} - ${{ github.event.head_commit.message }}
draft: true draft: true
files: | files: |
target/release/libreddit target/release/libreddit
libreddit.sha512 libreddit.sha512
body: | body: |
- ${{ github.event.head_commit.message }} ${{ github.sha }} - ${{ github.event.head_commit.message }} ${{ github.sha }}
generate_release_notes: true
env: env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}

2
Cargo.lock generated
View File

@ -572,7 +572,7 @@ checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125"
[[package]] [[package]]
name = "libreddit" name = "libreddit"
version = "0.20.3" version = "0.20.4"
dependencies = [ dependencies = [
"askama", "askama",
"async-recursion", "async-recursion",

View File

@ -3,7 +3,7 @@ name = "libreddit"
description = " Alternative private front-end to Reddit" description = " Alternative private front-end to Reddit"
license = "AGPL-3.0" license = "AGPL-3.0"
repository = "https://github.com/spikecodes/libreddit" repository = "https://github.com/spikecodes/libreddit"
version = "0.20.3" version = "0.20.4"
authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"] authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"]
edition = "2021" edition = "2021"

View File

@ -240,4 +240,4 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str,
} }
}) })
.collect() .collect()
} }

View File

@ -96,31 +96,19 @@ impl Media {
// If post is a video, return the video // If post is a video, return the video
let (post_type, url_val, alt_url_val) = if data_preview["fallback_url"].is_string() { let (post_type, url_val, alt_url_val) = if data_preview["fallback_url"].is_string() {
( (
if data_preview["is_gif"].as_bool().unwrap_or(false) { if data_preview["is_gif"].as_bool().unwrap_or(false) { "gif" } else { "video" },
"gif"
} else {
"video"
},
&data_preview["fallback_url"], &data_preview["fallback_url"],
Some(&data_preview["hls_url"]), Some(&data_preview["hls_url"]),
) )
} else if secure_media["fallback_url"].is_string() { } else if secure_media["fallback_url"].is_string() {
( (
if secure_media["is_gif"].as_bool().unwrap_or(false) { if secure_media["is_gif"].as_bool().unwrap_or(false) { "gif" } else { "video" },
"gif"
} else {
"video"
},
&secure_media["fallback_url"], &secure_media["fallback_url"],
Some(&secure_media["hls_url"]), Some(&secure_media["hls_url"]),
) )
} else if crosspost_parent_media["fallback_url"].is_string() { } else if crosspost_parent_media["fallback_url"].is_string() {
( (
if crosspost_parent_media["is_gif"].as_bool().unwrap_or(false) { if crosspost_parent_media["is_gif"].as_bool().unwrap_or(false) { "gif" } else { "video" },
"gif"
} else {
"video"
},
&crosspost_parent_media["fallback_url"], &crosspost_parent_media["fallback_url"],
Some(&crosspost_parent_media["hls_url"]), Some(&crosspost_parent_media["hls_url"]),
) )
@ -728,4 +716,4 @@ mod tests {
assert_eq!(format_num(1001), ("1.0k".to_string(), "1001".to_string())); assert_eq!(format_num(1001), ("1.0k".to_string(), "1001".to_string()));
assert_eq!(format_num(1_999_999), ("2.0m".to_string(), "1999999".to_string())); assert_eq!(format_num(1_999_999), ("2.0m".to_string(), "1999999".to_string()));
} }
} }