libreddit/src/post.rs

244 lines
8.0 KiB
Rust
Raw Normal View History

2020-10-25 21:25:59 +01:00
// CRATES
2021-03-17 23:30:33 +01:00
use crate::client::json;
2021-03-12 05:15:26 +01:00
use crate::esc;
2021-03-17 23:30:33 +01:00
use crate::server::RequestExt;
use crate::subreddit::{can_access_quarantine, quarantine};
use crate::utils::{
error, format_num, format_url, get_filters, param, rewrite_urls, setting, template, time, val, Author, Awards, Comment, Flags, Flair, FlairPart, Media, Post, Preferences,
};
2021-03-17 23:30:33 +01:00
use hyper::{Body, Request, Response};
2020-12-15 01:35:04 +01:00
2020-10-25 21:25:59 +01:00
use askama::Template;
use std::collections::HashSet;
2020-11-30 03:50:29 +01:00
2020-10-25 21:25:59 +01:00
// STRUCTS
#[derive(Template)]
#[template(path = "post.html", escape = "none")]
struct PostTemplate {
comments: Vec<Comment>,
post: Post,
2020-10-26 04:57:19 +01:00
sort: String,
2021-01-10 22:20:47 +01:00
prefs: Preferences,
single_thread: bool,
url: String,
2020-10-25 21:25:59 +01:00
}
2021-03-17 23:30:33 +01:00
pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
2021-01-07 17:38:05 +01:00
// Build Reddit API path
2021-03-17 23:30:33 +01:00
let mut path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default());
let sub = req.param("sub").unwrap_or_default();
let quarantined = can_access_quarantine(&req, &sub);
2021-01-07 19:32:55 +01:00
// Set sort to sort query parameter
let sort = param(&path, "sort").unwrap_or_else(|| {
// Grab default comment sort method from Cookies
let default_sort = setting(&req, "comment_sort");
2021-01-07 19:32:55 +01:00
// If there's no sort query but there's a default sort, set sort to default_sort
2021-05-16 18:11:38 +02:00
if default_sort.is_empty() {
String::new()
} else {
path = format!("{}.json?{}&sort={}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), default_sort);
default_sort
}
});
2021-01-01 00:54:13 +01:00
2020-12-22 06:40:06 +01:00
// Log the post ID being fetched in debug mode
#[cfg(debug_assertions)]
2021-03-17 23:30:33 +01:00
dbg!(req.param("id").unwrap_or_default());
2021-02-14 00:02:38 +01:00
let single_thread = req.param("comment_id").is_some();
let highlighted_comment = &req.param("comment_id").unwrap_or_default();
2021-01-02 07:21:43 +01:00
2020-11-21 06:04:35 +01:00
// Send a request to the url, receive JSON in response
match json(path, quarantined).await {
2021-01-01 00:54:13 +01:00
// Otherwise, grab the JSON output from the request
2021-05-20 21:24:06 +02:00
Ok(response) => {
2021-01-02 00:28:13 +01:00
// Parse the JSON into Post and Comment structs
2021-05-20 21:24:06 +02:00
let post = parse_post(&response[0]).await;
let comments = parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req));
let url = req.uri().to_string();
2021-01-02 00:28:13 +01:00
// Use the Post and Comment structs to generate a website to show users
template(PostTemplate {
2021-01-10 22:20:47 +01:00
comments,
post,
sort,
2021-02-25 06:29:23 +01:00
prefs: Preferences::new(req),
single_thread,
2021-11-15 03:51:36 +01:00
url,
})
2021-01-02 07:21:43 +01:00
}
2021-01-02 00:28:13 +01:00
// If the Reddit API returns an error, exit and send error page to user
Err(msg) => {
if msg == "quarantined" {
let sub = req.param("sub").unwrap_or_default();
quarantine(req, sub)
} else {
error(req, msg).await
}
}
2020-10-25 21:25:59 +01:00
}
}
2020-10-26 01:52:57 +01:00
// POSTS
2021-01-07 06:27:24 +01:00
async fn parse_post(json: &serde_json::Value) -> Post {
2020-12-23 03:29:43 +01:00
// Retrieve post (as opposed to comments) from JSON
let post: &serde_json::Value = &json["data"]["children"][0];
2020-10-25 21:25:59 +01:00
2020-12-23 03:29:43 +01:00
// Grab UTC time as unix timestamp
2021-03-09 16:22:17 +01:00
let (rel_time, created) = time(post["data"]["created_utc"].as_f64().unwrap_or_default());
// Parse post score and upvote ratio
2021-01-04 06:31:21 +01:00
let score = post["data"]["score"].as_i64().unwrap_or_default();
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
2020-10-25 21:25:59 +01:00
2020-12-23 03:29:43 +01:00
// Determine the type of media along with the media URL
2021-02-25 06:29:23 +01:00
let (post_type, media, gallery) = Media::parse(&post["data"]).await;
let awards: Awards = Awards::parse(&post["data"]["all_awardings"]);
2020-12-23 03:29:43 +01:00
// Build a post using data parsed from Reddit post API
2021-01-07 06:27:24 +01:00
Post {
id: val(post, "id"),
2021-03-12 05:15:26 +01:00
title: esc!(post, "title"),
community: val(post, "subreddit"),
2021-02-10 06:54:55 +01:00
body: rewrite_urls(&val(post, "selftext_html")).replace("\\", ""),
2021-01-17 00:02:24 +01:00
author: Author {
name: val(post, "author"),
flair: Flair {
2021-02-25 06:29:23 +01:00
flair_parts: FlairPart::parse(
2021-02-14 23:53:09 +01:00
post["data"]["author_flair_type"].as_str().unwrap_or_default(),
2021-01-17 00:02:24 +01:00
post["data"]["author_flair_richtext"].as_array(),
post["data"]["author_flair_text"].as_str(),
),
2021-03-12 05:15:26 +01:00
text: esc!(post, "link_flair_text"),
2021-01-17 00:02:24 +01:00
background_color: val(post, "author_flair_background_color"),
foreground_color: val(post, "author_flair_text_color"),
},
distinguished: val(post, "distinguished"),
2021-01-12 22:43:03 +01:00
},
permalink: val(post, "permalink"),
2020-12-07 19:32:46 +01:00
score: format_num(score),
upvote_ratio: ratio as i64,
2021-01-07 06:27:24 +01:00
post_type,
2021-01-17 21:58:12 +01:00
media,
thumbnail: Media {
url: format_url(val(post, "thumbnail").as_str()),
alt_url: String::new(),
2021-01-17 21:58:12 +01:00
width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(),
height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(),
poster: "".to_string(),
2021-01-17 21:58:12 +01:00
},
2021-01-13 21:52:00 +01:00
flair: Flair {
2021-02-25 06:29:23 +01:00
flair_parts: FlairPart::parse(
2021-02-14 23:53:09 +01:00
post["data"]["link_flair_type"].as_str().unwrap_or_default(),
2021-01-13 21:52:00 +01:00
post["data"]["link_flair_richtext"].as_array(),
post["data"]["link_flair_text"].as_str(),
),
2021-03-12 05:15:26 +01:00
text: esc!(post, "link_flair_text"),
2021-01-12 22:43:03 +01:00
background_color: val(post, "link_flair_background_color"),
foreground_color: if val(post, "link_flair_text_color") == "dark" {
2020-11-17 05:36:36 +01:00
"black".to_string()
} else {
"white".to_string()
},
2021-01-12 22:43:03 +01:00
},
2020-12-30 04:01:02 +01:00
flags: Flags {
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
2020-12-30 04:01:02 +01:00
},
2021-01-11 23:08:12 +01:00
domain: val(post, "domain"),
rel_time,
2021-01-16 20:50:12 +01:00
created,
comments: format_num(post["data"]["num_comments"].as_i64().unwrap_or_default()),
2021-02-06 21:05:11 +01:00
gallery,
awards,
2021-01-07 06:27:24 +01:00
}
2020-10-25 21:25:59 +01:00
}
// COMMENTS
fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str, filters: &HashSet<String>) -> Vec<Comment> {
2021-05-20 21:24:06 +02:00
// Parse the comment JSON into a Vector of Comments
let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned);
2020-10-26 04:57:19 +01:00
2020-12-20 04:54:46 +01:00
// For each comment, retrieve the values to build a Comment object
2021-05-20 21:24:06 +02:00
comments
.into_iter()
.map(|comment| {
let kind = comment["kind"].as_str().unwrap_or_default().to_string();
let data = &comment["data"];
2021-02-14 23:53:09 +01:00
2021-05-20 21:24:06 +02:00
let unix_time = data["created_utc"].as_f64().unwrap_or_default();
let (rel_time, created) = time(unix_time);
2020-12-20 04:54:46 +01:00
2021-05-20 21:24:06 +02:00
let edited = data["edited"].as_f64().map_or((String::new(), String::new()), time);
2021-02-14 23:53:09 +01:00
2021-05-20 21:24:06 +02:00
let score = data["score"].as_i64().unwrap_or(0);
let body = rewrite_urls(&val(&comment, "body_html"));
2020-10-26 01:52:57 +01:00
2021-05-20 21:24:06 +02:00
// If this comment contains replies, handle those too
let replies: Vec<Comment> = if data["replies"].is_object() {
parse_comments(&data["replies"], post_link, post_author, highlighted_comment, filters)
2021-01-31 23:10:13 +01:00
} else {
2021-05-20 21:24:06 +02:00
Vec::new()
};
let awards: Awards = Awards::parse(&data["all_awardings"]);
2021-05-20 21:24:06 +02:00
let parent_kind_and_id = val(&comment, "parent_id");
let parent_info = parent_kind_and_id.split('_').collect::<Vec<&str>>();
let id = val(&comment, "id");
let highlighted = id == highlighted_comment;
let author = Author {
name: val(&comment, "author"),
flair: Flair {
flair_parts: FlairPart::parse(
data["author_flair_type"].as_str().unwrap_or_default(),
data["author_flair_richtext"].as_array(),
data["author_flair_text"].as_str(),
),
text: esc!(&comment, "link_flair_text"),
background_color: val(&comment, "author_flair_background_color"),
foreground_color: val(&comment, "author_flair_text_color"),
},
distinguished: val(&comment, "distinguished"),
};
let is_filtered = filters.contains(&["u_", author.name.as_str()].concat());
// Many subreddits have a default comment posted about the sub's rules etc.
// Many libreddit users do not wish to see this kind of comment by default.
// Reddit does not tell us which users are "bots", so a good heuristic is to
// collapse stickied moderator comments.
let is_moderator_comment = data["distinguished"].as_str().unwrap_or_default() == "moderator";
let is_stickied = data["stickied"].as_bool().unwrap_or_default();
let collapsed = (is_moderator_comment && is_stickied) || is_filtered;
2021-05-20 21:24:06 +02:00
Comment {
id,
kind,
parent_id: parent_info[1].to_string(),
parent_kind: parent_info[0].to_string(),
post_link: post_link.to_string(),
post_author: post_author.to_string(),
body,
author,
2021-05-20 21:24:06 +02:00
score: if data["score_hidden"].as_bool().unwrap_or_default() {
("\u{2022}".to_string(), "Hidden".to_string())
} else {
format_num(score)
},
rel_time,
created,
edited,
replies,
highlighted,
awards,
collapsed,
is_filtered,
2021-05-20 21:24:06 +02:00
}
})
.collect()
2021-12-27 19:15:25 +01:00
}