libreddit/src/utils.rs

637 lines
18 KiB
Rust
Raw Normal View History

2020-11-21 07:05:27 +01:00
//
// CRATES
//
2021-03-17 23:30:33 +01:00
use crate::{client::json, esc, server::RequestExt};
2021-01-01 06:03:44 +01:00
use askama::Template;
2021-03-17 23:30:33 +01:00
use cookie::Cookie;
use hyper::{Body, Request, Response};
2021-01-02 19:58:21 +01:00
use regex::Regex;
2021-03-17 23:30:33 +01:00
use serde_json::Value;
2021-01-09 02:35:04 +01:00
use std::collections::HashMap;
2021-01-13 21:52:00 +01:00
use time::{Duration, OffsetDateTime};
2021-03-17 23:30:33 +01:00
use url::Url;
2020-11-21 07:05:27 +01:00
2021-01-12 22:43:03 +01:00
// Post flair with content, background color and foreground color
2021-01-13 21:52:00 +01:00
pub struct Flair {
pub flair_parts: Vec<FlairPart>,
2021-02-20 22:59:16 +01:00
pub text: String,
2021-01-12 22:43:03 +01:00
pub background_color: String,
pub foreground_color: String,
}
2021-01-15 00:13:52 +01:00
// Part of flair, either emoji or text
2021-01-13 21:52:00 +01:00
pub struct FlairPart {
pub flair_part_type: String,
2021-01-12 22:43:03 +01:00
pub value: String,
}
2021-02-25 06:29:23 +01:00
impl FlairPart {
pub fn parse(flair_type: &str, rich_flair: Option<&Vec<Value>>, text_flair: Option<&str>) -> Vec<Self> {
// Parse type of flair
match flair_type {
// If flair contains emojis and text
"richtext" => match rich_flair {
Some(rich) => rich
.iter()
// For each part of the flair, extract text and emojis
.map(|part| {
let value = |name: &str| part[name].as_str().unwrap_or_default();
Self {
flair_part_type: value("e").to_string(),
value: match value("e") {
"text" => value("t").to_string(),
"emoji" => format_url(value("u")),
_ => String::new(),
},
}
})
.collect::<Vec<Self>>(),
None => Vec::new(),
},
// If flair contains only text
"text" => match text_flair {
Some(text) => vec![Self {
flair_part_type: "text".to_string(),
value: text.to_string(),
}],
None => Vec::new(),
},
_ => Vec::new(),
}
}
}
2021-01-17 00:02:24 +01:00
pub struct Author {
pub name: String,
pub flair: Flair,
pub distinguished: String,
}
2020-12-30 04:01:02 +01:00
// Post flags with nsfw and stickied
pub struct Flags {
pub nsfw: bool,
2021-01-01 00:54:13 +01:00
pub stickied: bool,
2020-12-30 04:01:02 +01:00
}
2020-11-17 20:37:40 +01:00
2021-01-17 21:58:12 +01:00
pub struct Media {
pub url: String,
pub alt_url: String,
2021-01-17 21:58:12 +01:00
pub width: i64,
pub height: i64,
pub poster: String,
2021-01-17 21:58:12 +01:00
}
2021-02-25 06:29:23 +01:00
impl Media {
pub async fn parse(data: &Value) -> (String, Self, Vec<GalleryMedia>) {
let mut gallery = Vec::new();
// If post is a video, return the video
let (post_type, url_val, alt_url_val) = if data["preview"]["reddit_video_preview"]["fallback_url"].is_string() {
2021-02-25 06:29:23 +01:00
// Return reddit video
(
if data["preview"]["reddit_video_preview"]["is_gif"].as_bool().unwrap_or(false) {
"gif"
} else {
"video"
},
&data["preview"]["reddit_video_preview"]["fallback_url"],
Some(&data["preview"]["reddit_video_preview"]["hls_url"]),
)
2021-02-25 06:29:23 +01:00
} else if data["secure_media"]["reddit_video"]["fallback_url"].is_string() {
// Return reddit video
(
if data["preview"]["reddit_video_preview"]["is_gif"].as_bool().unwrap_or(false) {
"gif"
} else {
"video"
},
&data["secure_media"]["reddit_video"]["fallback_url"],
Some(&data["secure_media"]["reddit_video"]["hls_url"]),
)
2021-02-25 06:29:23 +01:00
} else if data["post_hint"].as_str().unwrap_or("") == "image" {
// Handle images, whether GIFs or pics
let preview = &data["preview"]["images"][0];
let mp4 = &preview["variants"]["mp4"];
if mp4.is_object() {
// Return the mp4 if the media is a gif
("gif", &mp4["source"]["url"], None)
2021-02-25 06:29:23 +01:00
} else {
// Return the picture if the media is an image
if data["domain"] == "i.redd.it" {
("image", &data["url"], None)
2021-02-25 06:29:23 +01:00
} else {
("image", &preview["source"]["url"], None)
2021-02-25 06:29:23 +01:00
}
}
} else if data["is_self"].as_bool().unwrap_or_default() {
// If type is self, return permalink
("self", &data["permalink"], None)
2021-02-25 06:29:23 +01:00
} else if data["is_gallery"].as_bool().unwrap_or_default() {
// If this post contains a gallery of images
gallery = GalleryMedia::parse(&data["gallery_data"]["items"], &data["media_metadata"]);
("gallery", &data["url"], None)
2021-02-25 06:29:23 +01:00
} else {
// If type can't be determined, return url
("link", &data["url"], None)
2021-02-25 06:29:23 +01:00
};
let source = &data["preview"]["images"][0]["source"];
2021-02-25 20:43:58 +01:00
let url = if post_type == "self" || post_type == "link" {
url_val.as_str().unwrap_or_default().to_string()
} else {
2021-03-09 03:49:06 +01:00
format_url(url_val.as_str().unwrap_or_default())
2021-02-25 20:43:58 +01:00
};
let alt_url = alt_url_val.map_or(String::new(), |val| format_url(val.as_str().unwrap_or_default()));
2021-02-25 06:29:23 +01:00
(
post_type.to_string(),
Self {
2021-02-25 20:43:58 +01:00
url,
alt_url,
2021-02-25 06:29:23 +01:00
width: source["width"].as_i64().unwrap_or_default(),
height: source["height"].as_i64().unwrap_or_default(),
poster: format_url(source["url"].as_str().unwrap_or_default()),
},
gallery,
)
}
}
2021-02-06 21:05:11 +01:00
pub struct GalleryMedia {
pub url: String,
pub width: i64,
pub height: i64,
pub caption: String,
2021-02-08 02:33:54 +01:00
pub outbound_url: String,
2021-02-06 21:05:11 +01:00
}
2021-02-25 06:29:23 +01:00
impl GalleryMedia {
fn parse(items: &Value, metadata: &Value) -> Vec<Self> {
2021-02-25 18:07:45 +01:00
items
.as_array()
2021-02-25 06:29:23 +01:00
.unwrap_or(&Vec::new())
.iter()
.map(|item| {
// For each image in gallery
let media_id = item["media_id"].as_str().unwrap_or_default();
let image = &metadata[media_id]["s"];
// Construct gallery items
Self {
url: format_url(image["u"].as_str().unwrap_or_default()),
width: image["x"].as_i64().unwrap_or_default(),
height: image["y"].as_i64().unwrap_or_default(),
caption: item["caption"].as_str().unwrap_or_default().to_string(),
outbound_url: item["outbound_url"].as_str().unwrap_or_default().to_string(),
}
})
.collect::<Vec<Self>>()
}
}
2020-11-17 20:37:40 +01:00
// Post containing content, metadata and media
pub struct Post {
pub id: String,
2020-11-17 20:37:40 +01:00
pub title: String,
pub community: String,
pub body: String,
2021-01-17 00:02:24 +01:00
pub author: Author,
pub permalink: String,
pub score: (String, String),
pub upvote_ratio: i64,
pub post_type: String,
2020-12-23 03:29:43 +01:00
pub flair: Flair,
2020-12-30 04:01:02 +01:00
pub flags: Flags,
2021-01-17 21:58:12 +01:00
pub thumbnail: Media,
pub media: Media,
2021-01-11 23:08:12 +01:00
pub domain: String,
pub rel_time: String,
pub created: String,
pub comments: (String, String),
2021-02-06 21:05:11 +01:00
pub gallery: Vec<GalleryMedia>,
2020-11-17 20:37:40 +01:00
}
2021-02-25 06:29:23 +01:00
impl Post {
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
pub async fn fetch(path: &str, fallback_title: String, quarantine: bool) -> Result<(Vec<Self>, String), String> {
2021-02-25 06:29:23 +01:00
let res;
let post_list;
// Send a request to the url
match json(path.to_string(), quarantine).await {
2021-02-25 06:29:23 +01:00
// If success, receive JSON in response
Ok(response) => {
res = response;
}
// If the Reddit API returns an error, exit this function
Err(msg) => return Err(msg),
}
// Fetch the list of posts from the JSON response
match res["data"]["children"].as_array() {
Some(list) => post_list = list,
None => return Err("No posts found".to_string()),
}
let mut posts: Vec<Self> = Vec::new();
// For each post from posts list
for post in post_list {
let data = &post["data"];
2021-03-09 16:22:17 +01:00
let (rel_time, created) = time(data["created_utc"].as_f64().unwrap_or_default());
2021-02-25 06:29:23 +01:00
let score = data["score"].as_i64().unwrap_or_default();
let ratio: f64 = data["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
2021-03-12 05:15:26 +01:00
let title = esc!(post, "title");
2021-02-25 06:29:23 +01:00
// Determine the type of media along with the media URL
let (post_type, media, gallery) = Media::parse(&data).await;
posts.push(Self {
id: val(post, "id"),
2021-03-12 05:15:26 +01:00
title: esc!(if title.is_empty() { fallback_title.to_owned() } else { title }),
2021-02-25 06:29:23 +01:00
community: val(post, "subreddit"),
body: rewrite_urls(&val(post, "body_html")),
author: Author {
name: val(post, "author"),
flair: Flair {
flair_parts: FlairPart::parse(
data["author_flair_type"].as_str().unwrap_or_default(),
data["author_flair_richtext"].as_array(),
data["author_flair_text"].as_str(),
),
2021-03-12 05:15:26 +01:00
text: esc!(post, "link_flair_text"),
2021-02-25 06:29:23 +01:00
background_color: val(post, "author_flair_background_color"),
foreground_color: val(post, "author_flair_text_color"),
},
distinguished: val(post, "distinguished"),
},
score: if data["hide_score"].as_bool().unwrap_or_default() {
("\u{2022}".to_string(), "Hidden".to_string())
2021-02-25 06:29:23 +01:00
} else {
format_num(score)
},
upvote_ratio: ratio as i64,
post_type,
thumbnail: Media {
url: format_url(val(post, "thumbnail").as_str()),
alt_url: String::new(),
2021-02-25 06:29:23 +01:00
width: data["thumbnail_width"].as_i64().unwrap_or_default(),
height: data["thumbnail_height"].as_i64().unwrap_or_default(),
poster: "".to_string(),
},
media,
domain: val(post, "domain"),
flair: Flair {
flair_parts: FlairPart::parse(
data["link_flair_type"].as_str().unwrap_or_default(),
data["link_flair_richtext"].as_array(),
data["link_flair_text"].as_str(),
),
2021-03-12 05:15:26 +01:00
text: esc!(post, "link_flair_text"),
2021-02-25 06:29:23 +01:00
background_color: val(post, "link_flair_background_color"),
foreground_color: if val(post, "link_flair_text_color") == "dark" {
"black".to_string()
} else {
"white".to_string()
},
},
flags: Flags {
nsfw: data["over_18"].as_bool().unwrap_or_default(),
stickied: data["stickied"].as_bool().unwrap_or_default(),
},
permalink: val(post, "permalink"),
rel_time,
created,
comments: format_num(data["num_comments"].as_i64().unwrap_or_default()),
gallery,
});
}
Ok((posts, res["data"]["after"].as_str().unwrap_or_default().to_string()))
}
}
#[derive(Template)]
#[template(path = "comment.html", escape = "none")]
2020-11-17 20:37:40 +01:00
// Comment with content, post, score and data/time that it was posted
pub struct Comment {
pub id: String,
pub kind: String,
pub parent_id: String,
pub parent_kind: String,
pub post_link: String,
pub post_author: String,
2020-11-17 20:37:40 +01:00
pub body: String,
2021-01-17 00:02:24 +01:00
pub author: Author,
pub score: (String, String),
pub rel_time: String,
pub created: String,
2021-02-14 23:53:09 +01:00
pub edited: (String, String),
2020-12-20 04:54:46 +01:00
pub replies: Vec<Comment>,
pub highlighted: bool,
2020-11-17 20:37:40 +01:00
}
2021-02-25 06:29:23 +01:00
#[derive(Template)]
#[template(path = "error.html", escape = "none")]
pub struct ErrorTemplate {
pub msg: String,
pub prefs: Preferences,
}
2021-01-09 05:55:40 +01:00
#[derive(Default)]
2020-11-17 20:37:40 +01:00
// User struct containing metadata about user
pub struct User {
pub name: String,
pub title: String,
2020-11-17 20:37:40 +01:00
pub icon: String,
pub karma: i64,
2020-12-24 07:16:04 +01:00
pub created: String,
2020-11-17 20:37:40 +01:00
pub banner: String,
2020-11-30 03:50:29 +01:00
pub description: String,
2020-11-17 20:37:40 +01:00
}
2020-12-29 03:42:46 +01:00
#[derive(Default)]
2020-11-17 20:37:40 +01:00
// Subreddit struct containing metadata about community
pub struct Subreddit {
pub name: String,
pub title: String,
pub description: String,
2020-12-29 03:42:46 +01:00
pub info: String,
pub moderators: Vec<String>,
2020-11-17 20:37:40 +01:00
pub icon: String,
pub members: (String, String),
pub active: (String, String),
2021-01-02 07:21:43 +01:00
pub wiki: bool,
2020-11-17 20:37:40 +01:00
}
2020-11-19 22:49:32 +01:00
// Parser for query params, used in sorting (eg. /r/rust/?sort=hot)
#[derive(serde::Deserialize)]
pub struct Params {
2020-12-30 02:11:47 +01:00
pub t: Option<String>,
2021-01-01 00:54:13 +01:00
pub q: Option<String>,
2020-11-19 22:49:32 +01:00
pub sort: Option<String>,
pub after: Option<String>,
2020-11-30 03:50:29 +01:00
pub before: Option<String>,
2020-11-19 22:49:32 +01:00
}
2021-01-11 03:15:34 +01:00
#[derive(Default)]
2021-01-09 02:35:04 +01:00
pub struct Preferences {
2021-01-11 03:15:34 +01:00
pub theme: String,
2021-01-09 05:55:40 +01:00
pub front_page: String,
2021-01-09 02:35:04 +01:00
pub layout: String,
2021-01-10 22:08:36 +01:00
pub wide: String,
2021-01-31 06:43:46 +01:00
pub show_nsfw: String,
pub hide_hls_notification: String,
pub use_hls: String,
2021-01-09 02:50:03 +01:00
pub comment_sort: String,
pub post_sort: String,
pub subscriptions: Vec<String>,
2021-01-09 02:35:04 +01:00
}
2021-02-25 06:29:23 +01:00
impl Preferences {
// Build preferences from cookies
2021-03-17 23:30:33 +01:00
pub fn new(req: Request<Body>) -> Self {
2021-02-25 06:29:23 +01:00
Self {
theme: setting(&req, "theme"),
front_page: setting(&req, "front_page"),
layout: setting(&req, "layout"),
wide: setting(&req, "wide"),
show_nsfw: setting(&req, "show_nsfw"),
use_hls: setting(&req, "use_hls"),
hide_hls_notification: setting(&req, "hide_hls_notification"),
comment_sort: setting(&req, "comment_sort"),
post_sort: setting(&req, "post_sort"),
subscriptions: setting(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(),
2021-02-25 06:29:23 +01:00
}
}
}
2020-12-01 06:10:08 +01:00
//
2020-12-07 19:53:22 +01:00
// FORMATTING
2020-12-01 06:10:08 +01:00
//
2021-02-25 06:29:23 +01:00
// Grab a query parameter from a url
pub fn param(path: &str, value: &str) -> Option<String> {
Some(
Url::parse(format!("https://libredd.it/{}", path).as_str())
.ok()?
.query_pairs()
.into_owned()
.collect::<HashMap<_, _>>()
.get(value)?
.to_owned(),
)
2021-01-01 00:54:13 +01:00
}
// Retrieve the value of a setting by name
pub fn setting(req: &Request<Body>, name: &str) -> String {
// Parse a cookie value from request
req
.cookie(name)
.unwrap_or_else(|| {
// If there is no cookie for this setting, try receiving a default from an environment variable
if let Ok(default) = std::env::var(format!("LIBREDDIT_DEFAULT_{}", name.to_uppercase())) {
Cookie::new(name, default)
} else {
Cookie::named(name)
}
})
.value()
.to_string()
2021-01-06 03:04:49 +01:00
}
2021-01-03 05:50:23 +01:00
// Detect and redirect in the event of a random subreddit
pub async fn catch_random(sub: &str, additional: &str) -> Result<Response<Body>, String> {
if (sub == "random" || sub == "randnsfw") && !sub.contains('+') {
let new_sub = json(format!("/r/{}/about.json?raw_json=1", sub), false).await?["data"]["display_name"]
.as_str()
.unwrap_or_default()
.to_string();
Ok(redirect(format!("/r/{}{}", new_sub, additional)))
} else {
Err("No redirect needed".to_string())
}
}
2020-12-26 03:06:33 +01:00
// Direct urls to proxy if proxy is enabled
2021-01-12 02:47:14 +01:00
pub fn format_url(url: &str) -> String {
2021-01-09 02:35:04 +01:00
if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" {
2021-01-05 04:26:41 +01:00
String::new()
} else {
match Url::parse(url) {
Ok(parsed) => {
let domain = parsed.domain().unwrap_or_default();
2021-03-11 04:02:03 +01:00
let capture = |regex: &str, format: &str, segments: i16| {
Regex::new(regex)
.map(|re| match re.captures(url) {
2021-03-11 04:02:03 +01:00
Some(caps) => match segments {
1 => [format, &caps[1]].join(""),
2 => [format, &caps[1], "/", &caps[2]].join(""),
_ => String::new(),
},
None => String::new(),
})
.unwrap_or_default()
};
macro_rules! chain {
() => {
{
String::new()
}
};
( $first_fn:expr, $($other_fns:expr), *) => {
{
let result = $first_fn;
if result.is_empty() {
chain!($($other_fns,)*)
}
else
{
result
}
}
};
}
match domain {
"v.redd.it" => chain!(
capture(r"https://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$))", "/vid/", 2),
capture(r"https://v\.redd\.it/(.+)/(HLSPlaylist\.m3u8.*)$", "/hls/", 2)
),
"i.redd.it" => capture(r"https://i\.redd\.it/(.*)", "/img/", 1),
"a.thumbs.redditmedia.com" => capture(r"https://a\.thumbs\.redditmedia\.com/(.*)", "/thumb/a/", 1),
"b.thumbs.redditmedia.com" => capture(r"https://b\.thumbs\.redditmedia\.com/(.*)", "/thumb/b/", 1),
"emoji.redditmedia.com" => capture(r"https://emoji\.redditmedia\.com/(.*)/(.*)", "/emoji/", 2),
"preview.redd.it" => capture(r"https://preview\.redd\.it/(.*)", "/preview/pre/", 1),
"external-preview.redd.it" => capture(r"https://external\-preview\.redd\.it/(.*)", "/preview/external-pre/", 1),
"styles.redditmedia.com" => capture(r"https://styles\.redditmedia\.com/(.*)", "/style/", 1),
"www.redditstatic.com" => capture(r"https://www\.redditstatic\.com/(.*)", "/static/", 1),
_ => String::new(),
}
}
Err(_) => String::new(),
2021-02-18 19:04:59 +01:00
}
2021-01-05 04:26:41 +01:00
}
2020-12-01 06:10:08 +01:00
}
2021-01-02 19:58:21 +01:00
// Rewrite Reddit links to Libreddit in body of text
pub fn rewrite_urls(input_text: &str) -> String {
let text1 = match Regex::new(r#"href="(https|http|)://(www.|old.|np.|amp.|)(reddit).(com)/"#) {
Ok(re) => re.replace_all(input_text, r#"href="/"#).to_string(),
Err(_) => String::new(),
};
// Rewrite external media previews to Libreddit
match Regex::new(r"https://external-preview\.redd\.it(.*)[^?]") {
Ok(re) => {
if re.is_match(&text1) {
2021-05-15 23:51:57 +02:00
re.replace_all(&text1, format_url(re.find(&text1).map(|x| x.as_str()).unwrap_or_default())).to_string()
} else {
text1
}
2021-05-09 07:09:47 +02:00
}
2021-02-20 21:14:32 +01:00
Err(_) => String::new(),
}
2021-01-02 19:58:21 +01:00
}
2020-12-26 03:06:33 +01:00
// Append `m` and `k` for millions and thousands respectively
pub fn format_num(num: i64) -> (String, String) {
let truncated = if num >= 1_000_000 || num <= -1_000_000 {
2021-01-09 02:35:04 +01:00
format!("{}m", num / 1_000_000)
2021-03-20 06:04:44 +01:00
} else if num >= 1000 || num <= -1000 {
2021-01-09 02:35:04 +01:00
format!("{}k", num / 1_000)
2020-12-07 20:36:05 +01:00
} else {
num.to_string()
};
(truncated, num.to_string())
2020-12-07 19:53:22 +01:00
}
2021-02-25 06:29:23 +01:00
// Parse a relative and absolute time from a UNIX timestamp
2021-03-09 16:22:17 +01:00
pub fn time(created: f64) -> (String, String) {
let time = OffsetDateTime::from_unix_timestamp(created.round() as i64);
2021-01-12 19:59:32 +01:00
let time_delta = OffsetDateTime::now_utc() - time;
2021-01-15 00:13:52 +01:00
// If the time difference is more than a month, show full date
let rel_time = if time_delta > Duration::days(30) {
2021-01-15 00:13:52 +01:00
time.format("%b %d '%y")
// Otherwise, show relative date/time
2021-01-14 01:31:24 +01:00
} else if time_delta.whole_days() > 0 {
format!("{}d ago", time_delta.whole_days())
2021-01-12 19:59:32 +01:00
} else if time_delta.whole_hours() > 0 {
format!("{}h ago", time_delta.whole_hours())
} else {
format!("{}m ago", time_delta.whole_minutes())
};
2021-02-14 23:53:09 +01:00
(rel_time, time.format("%b %d %Y, %H:%M:%S UTC"))
2021-01-12 19:59:32 +01:00
}
2020-11-17 20:37:40 +01:00
// val() function used to parse JSON from Reddit APIs
2021-01-14 18:53:54 +01:00
pub fn val(j: &Value, k: &str) -> String {
2021-01-22 06:25:51 +01:00
j["data"][k].as_str().unwrap_or_default().to_string()
2020-11-17 20:37:40 +01:00
}
2021-03-12 05:15:26 +01:00
#[macro_export]
macro_rules! esc {
($f:expr) => {
$f.replace('<', "&lt;").replace('>', "&gt;")
};
($j:expr, $k:expr) => {
$j["data"][$k].as_str().unwrap_or_default().to_string().replace('<', "&lt;").replace('>', "&gt;")
};
}
// Escape < and > to accurately render HTML
// pub fn esc(j: &Value, k: &str) -> String {
// val(j,k)
// // .replace('&', "&amp;")
// .replace('<', "&lt;")
// .replace('>', "&gt;")
// // .replace('"', "&quot;")
// // .replace('\'', "&#x27;")
// // .replace('/', "&#x2f;")
// }
2020-11-20 05:42:18 +01:00
//
// NETWORKING
//
2021-03-17 23:30:33 +01:00
pub fn template(t: impl Template) -> Result<Response<Body>, String> {
Ok(
Response::builder()
.status(200)
.header("content-type", "text/html")
.body(t.render().unwrap_or_default().into())
.unwrap_or_default(),
)
2021-02-14 00:02:38 +01:00
}
2021-03-17 23:30:33 +01:00
pub fn redirect(path: String) -> Response<Body> {
Response::builder()
.status(302)
.header("content-type", "text/html")
2021-02-14 00:02:38 +01:00
.header("Location", &path)
2021-03-17 23:30:33 +01:00
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path).into())
.unwrap_or_default()
}
2021-03-17 23:30:33 +01:00
pub async fn error(req: Request<Body>, msg: String) -> Result<Response<Body>, String> {
2021-02-25 18:07:45 +01:00
let body = ErrorTemplate {
msg,
prefs: Preferences::new(req),
}
.render()
.unwrap_or_default();
2021-03-17 23:30:33 +01:00
Ok(Response::builder().status(404).header("content-type", "text/html").body(body.into()).unwrap_or_default())
2021-01-01 06:03:44 +01:00
}