From d49a3fd38a99ef6f345dc135d71fd71e7efbd397 Mon Sep 17 00:00:00 2001 From: Matthew Esposito Date: Wed, 9 Nov 2022 15:02:21 -0500 Subject: [PATCH] Many clippy changes --- src/client.rs | 18 ++++----- src/main.rs | 62 +++++++++++++++++++++++-------- src/post.rs | 14 +++---- src/search.rs | 24 ++++++------ src/server.rs | 65 ++++++++++++++++----------------- src/settings.rs | 17 ++++----- src/subreddit.rs | 81 +++++++++++++++++++---------------------- src/user.rs | 16 ++++---- src/utils.rs | 95 +++++++++++++++++++++++------------------------- 9 files changed, 205 insertions(+), 187 deletions(-) diff --git a/src/client.rs b/src/client.rs index da78304..e24741b 100644 --- a/src/client.rs +++ b/src/client.rs @@ -51,12 +51,12 @@ pub async fn canonical_path(path: String) -> Result, String> { } pub async fn proxy(req: Request, format: &str) -> Result, String> { - let mut url = format!("{}?{}", format, req.uri().query().unwrap_or_default()); + let mut url = format!("{format}?{}", req.uri().query().unwrap_or_default()); // For each parameter in request for (name, value) in req.params().iter() { // Fill the parameter value in the url - url = url.replace(&format!("{{{}}}", name), value); + url = url.replace(&format!("{{{name}}}"), value); } stream(&url, &req).await @@ -70,7 +70,7 @@ async fn stream(url: &str, req: &Request) -> Result, String let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http1().build(); // Build the hyper client from the HTTPS connector. - let client: client::Client<_, hyper::Body> = client::Client::builder().build(https); + let client: client::Client<_, Body> = client::Client::builder().build(https); let mut builder = Request::get(uri); @@ -116,18 +116,18 @@ fn reddit_head(path: String, quarantine: bool) -> Boxed, S request(&Method::HEAD, path, false, quarantine) } -/// Makes a request to Reddit. If `redirect` is `true`, request_with_redirect +/// Makes a request to Reddit. If `redirect` is `true`, `request_with_redirect` /// will recurse on the URL that Reddit provides in the Location HTTP header /// in its response. fn request(method: &'static Method, path: String, redirect: bool, quarantine: bool) -> Boxed, String>> { // Build Reddit URL from path. - let url = format!("{}{}", REDDIT_URL_BASE, path); + let url = format!("{REDDIT_URL_BASE}{path}"); // Prepare the HTTPS connector. let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_or_http().enable_http1().build(); // Construct the hyper client from the HTTPS connector. - let client: client::Client<_, hyper::Body> = client::Client::builder().build(https); + let client: client::Client<_, Body> = client::Client::builder().build(https); // Build request to Reddit. When making a GET, request gzip compression. // (Reddit doesn't do brotli yet.) @@ -161,7 +161,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo .get("Location") .map(|val| { let new_url = percent_encode(val.as_bytes(), CONTROLS).to_string(); - format!("{}{}raw_json=1", new_url, if new_url.contains('?') { "&" } else { "?" }) + format!("{new_url}{}raw_json=1", if new_url.contains('?') { "&" } else { "?" }) }) .unwrap_or_default() .to_string(), @@ -236,7 +236,7 @@ pub async fn json(path: String, quarantine: bool) -> Result { // Closure to quickly build errors let err = |msg: &str, e: String| -> Result { // eprintln!("{} - {}: {}", url, msg, e); - Err(format!("{}: {}", msg, e)) + Err(format!("{msg}: {e}")) }; // Fetch the url... @@ -258,7 +258,7 @@ pub async fn json(path: String, quarantine: bool) -> Result { .as_str() .unwrap_or_else(|| { json["message"].as_str().unwrap_or_else(|| { - eprintln!("{}{} - Error parsing reddit error", REDDIT_URL_BASE, path); + eprintln!("{REDDIT_URL_BASE}{path} - Error parsing reddit error"); "Error parsing reddit error" }) }) diff --git a/src/main.rs b/src/main.rs index 8592e3c..17a38c5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,35 @@ +#![deny( + anonymous_parameters, + clippy::all, + illegal_floating_point_literal_pattern, + late_bound_lifetime_arguments, + path_statements, + patterns_in_fns_without_body, + rust_2018_idioms, + trivial_numeric_casts, + unused_extern_crates +)] +#![warn( + clippy::dbg_macro, + clippy::decimal_literal_representation, + clippy::get_unwrap, + clippy::nursery, + clippy::pedantic, + clippy::todo, + clippy::unimplemented, + clippy::use_debug, + clippy::all, + unused_qualifications, + variant_size_differences +)] +#![allow(elided_lifetimes_in_paths)] +#![allow(clippy::unused_async)] +#![allow(clippy::similar_names)] +#![allow(clippy::items_after_statements)] +#![allow(clippy::cast_possible_truncation)] +#![allow(clippy::cast_precision_loss)] +#![allow(clippy::match_wildcard_for_single_variants)] + // Global specifiers #![forbid(unsafe_code)] #![allow(clippy::cmp_owned)] @@ -143,9 +175,9 @@ async fn main() { ) .get_matches(); - let address = matches.get_one("address").map(|m: &String| m.as_str()).unwrap_or("0.0.0.0"); - let port = std::env::var("PORT").unwrap_or_else(|_| matches.get_one("port").map(|m: &String| m.as_str()).unwrap_or("8080").to_string()); - let hsts = matches.get_one("hsts").map(|m: &String| m.as_str()); + let address = matches.get_one("address").map_or("0.0.0.0", std::string::String::as_str); + let port = std::env::var("PORT").unwrap_or_else(|_| matches.get_one("port").map_or("8080", std::string::String::as_str).to_string()); + let hsts = matches.get_one("hsts").map(std::string::String::as_str); let listener = [address, ":", &port].concat(); @@ -163,7 +195,7 @@ async fn main() { }; if let Some(expire_time) = hsts { - if let Ok(val) = HeaderValue::from_str(&format!("max-age={}", expire_time)) { + if let Ok(val) = HeaderValue::from_str(&format!("max-age={expire_time}")) { app.default_headers.insert("Strict-Transport-Security", val); } } @@ -204,7 +236,7 @@ async fn main() { // Browse user profile app .at("/u/:name") - .get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed()); + .get(|r| async move { Ok(redirect(&format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed()); app.at("/u/:name/comments/:id/:title").get(|r| post::item(r).boxed()); app.at("/u/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed()); @@ -228,7 +260,7 @@ async fn main() { app .at("/r/u_:name") - .get(|r| async move { Ok(redirect(format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed()); + .get(|r| async move { Ok(redirect(&format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed()); app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions_filters(r).boxed()); app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions_filters(r).boxed()); @@ -248,10 +280,10 @@ async fn main() { app .at("/r/:sub/w") - .get(|r| async move { Ok(redirect(format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed()); + .get(|r| async move { Ok(redirect(&format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed()); app .at("/r/:sub/w/*page") - .get(|r| async move { Ok(redirect(format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed()); + .get(|r| async move { Ok(redirect(&format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed()); app.at("/r/:sub/wiki").get(|r| subreddit::wiki(r).boxed()); app.at("/r/:sub/wiki/*page").get(|r| subreddit::wiki(r).boxed()); @@ -263,10 +295,10 @@ async fn main() { app.at("/").get(|r| subreddit::community(r).boxed()); // View Reddit wiki - app.at("/w").get(|_| async { Ok(redirect("/wiki".to_string())) }.boxed()); + app.at("/w").get(|_| async { Ok(redirect("/wiki")) }.boxed()); app .at("/w/*page") - .get(|r| async move { Ok(redirect(format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed()); + .get(|r| async move { Ok(redirect(&format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed()); app.at("/wiki").get(|r| subreddit::wiki(r).boxed()); app.at("/wiki/*page").get(|r| subreddit::wiki(r).boxed()); @@ -283,9 +315,9 @@ async fn main() { Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await, // Short link for post - Some(id) if (5..7).contains(&id.len()) => match canonical_path(format!("/{}", id)).await { + Some(id) if (5..7).contains(&id.len()) => match canonical_path(format!("/{id}")).await { Ok(path_opt) => match path_opt { - Some(path) => Ok(redirect(path)), + Some(path) => Ok(redirect(&path)), None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await, }, Err(e) => error(req, e).await, @@ -300,12 +332,12 @@ async fn main() { // Default service in case no routes match app.at("/*").get(|req| error(req, "Nothing here".to_string()).boxed()); - println!("Running Libreddit v{} on {}!", env!("CARGO_PKG_VERSION"), listener); + println!("Running Libreddit v{} on {listener}!", env!("CARGO_PKG_VERSION")); - let server = app.listen(listener); + let server = app.listen(&listener); // Run this server for... forever! if let Err(e) = server.await { - eprintln!("Server error: {}", e); + eprintln!("Server error: {e}"); } } diff --git a/src/post.rs b/src/post.rs index 5f3142a..355559a 100644 --- a/src/post.rs +++ b/src/post.rs @@ -37,14 +37,14 @@ pub async fn item(req: Request) -> Result, String> { if default_sort.is_empty() { String::new() } else { - path = format!("{}.json?{}&sort={}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), default_sort); + path = format!("{}.json?{}&sort={default_sort}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default()); default_sort } }); // Log the post ID being fetched in debug mode #[cfg(debug_assertions)] - dbg!(req.param("id").unwrap_or_default()); + req.param("id").unwrap_or_default(); let single_thread = req.param("comment_id").is_some(); let highlighted_comment = &req.param("comment_id").unwrap_or_default(); @@ -59,20 +59,20 @@ pub async fn item(req: Request) -> Result, String> { let url = req.uri().to_string(); // Use the Post and Comment structs to generate a website to show users - template(PostTemplate { + Ok(template(&PostTemplate { comments, post, sort, - prefs: Preferences::new(req), + prefs: Preferences::new(&req), single_thread, url, - }) + })) } // If the Reddit API returns an error, exit and send error page to user Err(msg) => { if msg == "quarantined" { let sub = req.param("sub").unwrap_or_default(); - quarantine(req, sub) + Ok(quarantine(&req, sub)) } else { error(req, msg).await } @@ -137,7 +137,7 @@ async fn parse_post(json: &serde_json::Value) -> Post { alt_url: String::new(), width: post["data"]["thumbnail_width"].as_i64().unwrap_or_default(), height: post["data"]["thumbnail_height"].as_i64().unwrap_or_default(), - poster: "".to_string(), + poster: String::new(), }, flair: Flair { flair_parts: FlairPart::parse( diff --git a/src/search.rs b/src/search.rs index 4b13594..434a858 100644 --- a/src/search.rs +++ b/src/search.rs @@ -49,15 +49,15 @@ struct SearchTemplate { // SERVICES pub async fn find(req: Request) -> Result, String> { let nsfw_results = if setting(&req, "show_nsfw") == "on" { "&include_over_18=on" } else { "" }; - let path = format!("{}.json?{}{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), nsfw_results); + let path = format!("{}.json?{}{nsfw_results}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default()); let query = param(&path, "q").unwrap_or_default(); if query.is_empty() { - return Ok(redirect("/".to_string())); + return Ok(redirect("/")); } if query.starts_with("r/") { - return Ok(redirect(format!("/{}", query))); + return Ok(redirect(&format!("/{query}"))); } let sub = req.param("sub").unwrap_or_default(); @@ -85,7 +85,7 @@ pub async fn find(req: Request) -> Result, String> { // If all requested subs are filtered, we don't need to fetch posts. if sub.split('+').all(|s| filters.contains(s)) { - template(SearchTemplate { + Ok(template(&SearchTemplate { posts: Vec::new(), subreddits, sub, @@ -94,22 +94,22 @@ pub async fn find(req: Request) -> Result, String> { sort, t: param(&path, "t").unwrap_or_default(), before: param(&path, "after").unwrap_or_default(), - after: "".to_string(), + after: String::new(), restrict_sr: param(&path, "restrict_sr").unwrap_or_default(), typed, }, - prefs: Preferences::new(req), + prefs: Preferences::new(&req), url, is_filtered: true, all_posts_filtered: false, all_posts_hidden_nsfw: false, - }) + })) } else { match Post::fetch(&path, quarantined).await { Ok((mut posts, after)) => { let all_posts_filtered = filter_posts(&mut posts, &filters); let all_posts_hidden_nsfw = posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"; - template(SearchTemplate { + Ok(template(&SearchTemplate { posts, subreddits, sub, @@ -122,17 +122,17 @@ pub async fn find(req: Request) -> Result, String> { restrict_sr: param(&path, "restrict_sr").unwrap_or_default(), typed, }, - prefs: Preferences::new(req), + prefs: Preferences::new(&req), url, is_filtered: false, all_posts_filtered, all_posts_hidden_nsfw, - }) + })) } Err(msg) => { if msg == "quarantined" { let sub = req.param("sub").unwrap_or_default(); - quarantine(req, sub) + Ok(quarantine(&req, sub)) } else { error(req, msg).await } @@ -143,7 +143,7 @@ pub async fn find(req: Request) -> Result, String> { async fn search_subreddits(q: &str, typed: &str) -> Vec { let limit = if typed == "sr_user" { "50" } else { "3" }; - let subreddit_search_path = format!("/subreddits/search.json?q={}&limit={}", q.replace(' ', "+"), limit); + let subreddit_search_path = format!("/subreddits/search.json?q={}&limit={limit}", q.replace(' ', "+")); // Send a request to the url json(subreddit_search_path, false).await.unwrap_or_default()["data"]["children"] diff --git a/src/server.rs b/src/server.rs index c277b6b..adba905 100644 --- a/src/server.rs +++ b/src/server.rs @@ -46,11 +46,11 @@ impl CompressionType { /// Returns a `CompressionType` given a content coding /// in [RFC 7231](https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.4) /// format. - fn parse(s: &str) -> Option { + fn parse(s: &str) -> Option { let c = match s { // Compressors we support. - "gzip" => CompressionType::Gzip, - "br" => CompressionType::Brotli, + "gzip" => Self::Gzip, + "br" => Self::Brotli, // The wildcard means that we can choose whatever // compression we prefer. In this case, use the @@ -68,8 +68,8 @@ impl CompressionType { impl ToString for CompressionType { fn to_string(&self) -> String { match self { - CompressionType::Gzip => "gzip".to_string(), - CompressionType::Brotli => "br".to_string(), + Self::Gzip => "gzip".to_string(), + Self::Brotli => "br".to_string(), _ => String::new(), } } @@ -176,25 +176,25 @@ impl ResponseExt for Response { } impl Route<'_> { - fn method(&mut self, method: Method, dest: fn(Request) -> BoxResponse) -> &mut Self { + fn method(&mut self, method: &Method, dest: fn(Request) -> BoxResponse) -> &mut Self { self.router.add(&format!("/{}{}", method.as_str(), self.path), dest); self } /// Add an endpoint for `GET` requests pub fn get(&mut self, dest: fn(Request) -> BoxResponse) -> &mut Self { - self.method(Method::GET, dest) + self.method(&Method::GET, dest) } /// Add an endpoint for `POST` requests pub fn post(&mut self, dest: fn(Request) -> BoxResponse) -> &mut Self { - self.method(Method::POST, dest) + self.method(&Method::POST, dest) } } impl Server { pub fn new() -> Self { - Server { + Self { default_headers: HeaderMap::new(), router: Router::new(), } @@ -207,7 +207,7 @@ impl Server { } } - pub fn listen(self, addr: String) -> Boxed> { + pub fn listen(self, addr: &str) -> Boxed> { let make_svc = make_service_fn(move |_conn| { // For correct borrowing, these values need to be borrowed let router = self.router.clone(); @@ -231,7 +231,7 @@ impl Server { } // Match the visited path with an added route - match router.recognize(&format!("/{}{}", req.method().as_str(), path)) { + match router.recognize(&format!("/{}{path}", req.method().as_str())) { // If a route was configured for this path Ok(found) => { let mut parammed = req; @@ -243,7 +243,7 @@ impl Server { match func.await { Ok(mut res) => { res.headers_mut().extend(def_headers); - let _ = compress_response(req_headers, &mut res).await; + drop(compress_response(req_headers, &mut res).await); Ok(res) } @@ -260,7 +260,7 @@ impl Server { }); // Build SocketAddr from provided address - let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {} as address (example format: 0.0.0.0:8080)", addr)); + let address = &addr.parse().unwrap_or_else(|_| panic!("Cannot parse {addr} as address (example format: 0.0.0.0:8080)")); // Bind server to address specified above. Gracefully shut down if CTRL+C is pressed let server = HyperServer::bind(address).serve(make_svc).with_graceful_shutdown(async { @@ -282,7 +282,7 @@ async fn new_boilerplate( ) -> Result, String> { match Response::builder().status(status).body(body) { Ok(mut res) => { - let _ = compress_response(req_headers, &mut res).await; + drop(compress_response(req_headers, &mut res).await); res.headers_mut().extend(default_headers.clone()); Ok(res) @@ -445,10 +445,10 @@ fn determine_compressor(accept_encoding: &str) -> Option { }; } - if cur_candidate.q != f64::NEG_INFINITY { - Some(cur_candidate.alg) - } else { + if cur_candidate.q == f64::NEG_INFINITY { None + } else { + Some(cur_candidate.alg) } } @@ -460,16 +460,16 @@ fn determine_compressor(accept_encoding: &str) -> Option { /// conditions are met: /// /// 1. the HTTP client requests a compression encoding in the Content-Encoding -/// header (hence the need for the req_headers); +/// header (hence the need for the `req_headers`); /// /// 2. the content encoding corresponds to a compression algorithm we support; /// /// 3. the Media type in the Content-Type response header is text with any /// subtype (e.g. text/plain) or application/json. /// -/// compress_response returns Ok on successful compression, or if not all three +/// `compress_response` returns Ok on successful compression, or if not all three /// conditions above are met. It returns Err if there was a problem decoding -/// any header in either req_headers or res, but res will remain intact. +/// any header in either `req_headers` or res, but res will remain intact. /// /// This function logs errors to stderr, but only in debug mode. No information /// is logged in release builds. @@ -506,21 +506,18 @@ async fn compress_response(req_headers: HeaderMap, res: &mu // Quick and dirty closure for extracting a header from the request and // returning it as a &str. let get_req_header = |k: header::HeaderName| -> Option<&str> { - match req_headers.get(k) { - Some(hdr) => match from_utf8(hdr.as_bytes()) { - Ok(val) => Some(val), + req_headers.get(k).and_then(|hdr| match from_utf8(hdr.as_bytes()) { + Ok(val) => Some(val), - #[cfg(debug_assertions)] - Err(e) => { - dbg_msg!(e); - None - } + #[cfg(debug_assertions)] + Err(e) => { + dbg_msg!(e); + None + } - #[cfg(not(debug_assertions))] - Err(_) => None, - }, - None => None, - } + #[cfg(not(debug_assertions))] + Err(_) => None, + }) }; // Check to see which compressor is requested, and if we can use it. @@ -692,7 +689,7 @@ mod tests { // Perform the compression. if let Err(e) = block_on(compress_response(req_headers, &mut res)) { - panic!("compress_response(req_headers, &mut res) => Err(\"{}\")", e); + panic!("compress_response(req_headers, &mut res) => Err(\"{e}\")"); }; // If the content was compressed, we expect the Content-Encoding diff --git a/src/settings.rs b/src/settings.rs index 0fd2640..95da1eb 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -38,10 +38,10 @@ const PREFS: [&str; 11] = [ // Retrieve cookies from request "Cookie" header pub async fn get(req: Request) -> Result, String> { let url = req.uri().to_string(); - template(SettingsTemplate { - prefs: Preferences::new(req), + Ok(template(&SettingsTemplate { + prefs: Preferences::new(&req), url, - }) + })) } // Set cookies using response "Set-Cookie" header @@ -69,7 +69,7 @@ pub async fn set(req: Request) -> Result, String> { let form = url::form_urlencoded::parse(&body_bytes).collect::>(); - let mut response = redirect("/settings".to_string()); + let mut response = redirect("/settings"); for &name in &PREFS { match form.get(name) { @@ -103,12 +103,11 @@ fn set_cookies_method(req: Request, remove_cookies: bool) -> Response>(); - let path = match form.get("redirect") { - Some(value) => format!("/{}", value.replace("%26", "&").replace("%23", "#")), - None => "/".to_string(), - }; + let path = form + .get("redirect") + .map_or_else(|| "/".to_string(), |value| format!("/{}", value.replace("%26", "&").replace("%23", "#"))); - let mut response = redirect(path); + let mut response = redirect(&path); for name in [PREFS.to_vec(), vec!["subscriptions", "filters"]].concat() { match form.get(name) { diff --git a/src/subreddit.rs b/src/subreddit.rs index 75e271a..ab2f301 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -74,7 +74,7 @@ pub async fn community(req: Request) -> Result, String> { } if req.param("sub").is_some() && sub_name.starts_with("u_") { - return Ok(redirect(["/user/", &sub_name[2..]].concat())); + return Ok(redirect(&["/user/", &sub_name[2..]].concat())); } // Request subreddit metadata @@ -96,75 +96,73 @@ pub async fn community(req: Request) -> Result, String> { } }; - let path = format!("/r/{}/{}.json?{}&raw_json=1", sub_name.clone(), sort, req.uri().query().unwrap_or_default()); + let path = format!("/r/{}/{sort}.json?{}&raw_json=1", sub_name.clone(), req.uri().query().unwrap_or_default()); let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str())); let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26").replace('+', "%2B"); let filters = get_filters(&req); // If all requested subs are filtered, we don't need to fetch posts. if sub_name.split('+').all(|s| filters.contains(s)) { - template(SubredditTemplate { + Ok(template(&SubredditTemplate { sub, posts: Vec::new(), sort: (sort, param(&path, "t").unwrap_or_default()), - ends: (param(&path, "after").unwrap_or_default(), "".to_string()), - prefs: Preferences::new(req), + ends: (param(&path, "after").unwrap_or_default(), String::new()), + prefs: Preferences::new(&req), url, redirect_url, is_filtered: true, all_posts_filtered: false, all_posts_hidden_nsfw: false, - }) + })) } else { match Post::fetch(&path, quarantined).await { Ok((mut posts, after)) => { let all_posts_filtered = filter_posts(&mut posts, &filters); let all_posts_hidden_nsfw = posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"; - template(SubredditTemplate { + Ok(template(&SubredditTemplate { sub, posts, sort: (sort, param(&path, "t").unwrap_or_default()), ends: (param(&path, "after").unwrap_or_default(), after), - prefs: Preferences::new(req), + prefs: Preferences::new(&req), url, redirect_url, is_filtered: false, all_posts_filtered, all_posts_hidden_nsfw, - }) + })) } Err(msg) => match msg.as_str() { - "quarantined" => quarantine(req, sub_name), - "private" => error(req, format!("r/{} is a private community", sub_name)).await, - "banned" => error(req, format!("r/{} has been banned from Reddit", sub_name)).await, + "quarantined" => Ok(quarantine(&req, sub_name)), + "private" => error(req, format!("r/{sub_name} is a private community")).await, + "banned" => error(req, format!("r/{sub_name} has been banned from Reddit")).await, _ => error(req, msg).await, }, } } } -pub fn quarantine(req: Request, sub: String) -> Result, String> { +pub fn quarantine(req: &Request, sub: String) -> Response { let wall = WallTemplate { - title: format!("r/{} is quarantined", sub), + title: format!("r/{sub} is quarantined"), msg: "Please click the button below to continue to this subreddit.".to_string(), url: req.uri().to_string(), sub, prefs: Preferences::new(req), }; - Ok( - Response::builder() - .status(403) - .header("content-type", "text/html") - .body(wall.render().unwrap_or_default().into()) - .unwrap_or_default(), - ) + Response::builder() + .status(403) + .header("content-type", "text/html") + .body(wall.render().unwrap_or_default().into()) + .unwrap_or_default() } pub async fn add_quarantine_exception(req: Request) -> Result, String> { let subreddit = req.param("sub").ok_or("Invalid URL")?; let redir = param(&format!("?{}", req.uri().query().unwrap_or_default()), "redir").ok_or("Invalid URL")?; - let mut response = redirect(redir); + let mut response = redirect(&redir); response.insert_cookie( Cookie::build(&format!("allow_quaran_{}", subreddit.to_lowercase()), "true") .path("/") @@ -189,19 +187,18 @@ pub async fn subscriptions_filters(req: Request) -> Result, if sub == "random" || sub == "randnsfw" { if action.contains(&"filter".to_string()) || action.contains(&"unfilter".to_string()) { return Err("Can't filter random subreddit!".to_string()); - } else { - return Err("Can't subscribe to random subreddit!".to_string()); } + return Err("Can't subscribe to random subreddit!".to_string()); } let query = req.uri().query().unwrap_or_default().to_string(); - let preferences = Preferences::new(req); + let preferences = Preferences::new(&req); let mut sub_list = preferences.subscriptions; let mut filters = preferences.filters; // Retrieve list of posts for these subreddits to extract display names - let posts = json(format!("/r/{}/hot.json?raw_json=1", sub), true).await?; + let posts = json(format!("/r/{sub}/hot.json?raw_json=1"), true).await?; let display_lookup: Vec<(String, &str)> = posts["data"]["children"] .as_array() .map(|list| { @@ -226,7 +223,7 @@ pub async fn subscriptions_filters(req: Request) -> Result, display } else { // This subreddit display name isn't known, retrieve it - let path: String = format!("/r/{}/about.json?raw_json=1", part); + let path: String = format!("/r/{part}/about.json?raw_json=1"); display = json(path, true).await?; display["data"]["display_name"].as_str().ok_or_else(|| "Failed to query subreddit name".to_string())? }; @@ -257,13 +254,9 @@ pub async fn subscriptions_filters(req: Request) -> Result, // Redirect back to subreddit // check for redirect parameter if unsubscribing/unfiltering from outside sidebar - let path = if let Some(redirect_path) = param(&format!("?{}", query), "redirect") { - format!("/{}", redirect_path) - } else { - format!("/r/{}", sub) - }; + let path = param(&format!("?{query}"), "redirect").map_or_else(|| format!("/r/{sub}"), |redirect_path| format!("/{redirect_path}")); - let mut response = redirect(path); + let mut response = redirect(&path); // Delete cookie if empty, else set if sub_list.is_empty() { @@ -301,20 +294,20 @@ pub async fn wiki(req: Request) -> Result, String> { } let page = req.param("page").unwrap_or_else(|| "index".to_string()); - let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page); + let path: String = format!("/r/{sub}/wiki/{page}.json?raw_json=1"); let url = req.uri().to_string(); match json(path, quarantined).await { - Ok(response) => template(WikiTemplate { + Ok(response) => Ok(template(&WikiTemplate { sub, wiki: rewrite_urls(response["data"]["content_html"].as_str().unwrap_or("

Wiki not found

")), page, - prefs: Preferences::new(req), + prefs: Preferences::new(&req), url, - }), + })), Err(msg) => { if msg == "quarantined" { - quarantine(req, sub) + Ok(quarantine(&req, sub)) } else { error(req, msg).await } @@ -332,13 +325,13 @@ pub async fn sidebar(req: Request) -> Result, String> { } // Build the Reddit JSON API url - let path: String = format!("/r/{}/about.json?raw_json=1", sub); + let path: String = format!("/r/{sub}/about.json?raw_json=1"); let url = req.uri().to_string(); // Send a request to the url match json(path, quarantined).await { // If success, receive JSON in response - Ok(response) => template(WikiTemplate { + Ok(response) => Ok(template(&WikiTemplate { wiki: rewrite_urls(&val(&response, "description_html")), // wiki: format!( // "{}

Moderators


    {}
", @@ -347,12 +340,12 @@ pub async fn sidebar(req: Request) -> Result, String> { // ), sub, page: "Sidebar".to_string(), - prefs: Preferences::new(req), + prefs: Preferences::new(&req), url, - }), + })), Err(msg) => { if msg == "quarantined" { - quarantine(req, sub) + Ok(quarantine(&req, sub)) } else { error(req, msg).await } @@ -397,7 +390,7 @@ pub async fn sidebar(req: Request) -> Result, String> { // SUBREDDIT async fn subreddit(sub: &str, quarantined: bool) -> Result { // Build the Reddit JSON API url - let path: String = format!("/r/{}/about.json?raw_json=1", sub); + let path: String = format!("/r/{sub}/about.json?raw_json=1"); // Send a request to the url let res = json(path, quarantined).await?; diff --git a/src/user.rs b/src/user.rs index 0d03f2e..a57e1a9 100644 --- a/src/user.rs +++ b/src/user.rs @@ -49,38 +49,38 @@ pub async fn profile(req: Request) -> Result, String> { let filters = get_filters(&req); if filters.contains(&["u_", &username].concat()) { - template(UserTemplate { + Ok(template(&UserTemplate { user, posts: Vec::new(), sort: (sort, param(&path, "t").unwrap_or_default()), - ends: (param(&path, "after").unwrap_or_default(), "".to_string()), + ends: (param(&path, "after").unwrap_or_default(), String::new()), listing, - prefs: Preferences::new(req), + prefs: Preferences::new(&req), url, redirect_url, is_filtered: true, all_posts_filtered: false, all_posts_hidden_nsfw: false, - }) + })) } else { // Request user posts/comments from Reddit match Post::fetch(&path, false).await { Ok((mut posts, after)) => { let all_posts_filtered = filter_posts(&mut posts, &filters); let all_posts_hidden_nsfw = posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"; - template(UserTemplate { + Ok(template(&UserTemplate { user, posts, sort: (sort, param(&path, "t").unwrap_or_default()), ends: (param(&path, "after").unwrap_or_default(), after), listing, - prefs: Preferences::new(req), + prefs: Preferences::new(&req), url, redirect_url, is_filtered: false, all_posts_filtered, all_posts_hidden_nsfw, - }) + })) } // If there is an error show error page Err(msg) => error(req, msg).await, @@ -91,7 +91,7 @@ pub async fn profile(req: Request) -> Result, String> { // USER async fn user(name: &str) -> Result { // Build the Reddit JSON API path - let path: String = format!("/user/{}/about.json?raw_json=1", name); + let path: String = format!("/user/{name}/about.json?raw_json=1"); // Send a request to the url json(path, false).await.map(|res| { diff --git a/src/utils.rs b/src/utils.rs index 0f15537..7a652a5 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -48,8 +48,8 @@ impl FlairPart { // Parse type of flair match flair_type { // If flair contains emojis and text - "richtext" => match rich_flair { - Some(rich) => rich + "richtext" => rich_flair.map_or_else(Vec::new, |rich| { + rich .iter() // For each part of the flair, extract text and emojis .map(|part| { @@ -63,17 +63,18 @@ impl FlairPart { }, } }) - .collect::>(), - None => Vec::new(), - }, + .collect::>() + }), // If flair contains only text - "text" => match text_flair { - Some(text) => vec![Self { - flair_part_type: "text".to_string(), - value: text.to_string(), - }], - None => Vec::new(), - }, + "text" => text_flair.map_or_else( + Vec::new, + |text| { + vec![Self { + flair_part_type: "text".to_string(), + value: text.to_string(), + }] + }, + ), _ => Vec::new(), } } @@ -299,7 +300,7 @@ impl Post { alt_url: String::new(), width: data["thumbnail_width"].as_i64().unwrap_or_default(), height: data["thumbnail_height"].as_i64().unwrap_or_default(), - poster: "".to_string(), + poster: String::new(), }, media, domain: val(post, "domain"), @@ -383,7 +384,7 @@ impl std::ops::Deref for Awards { impl std::fmt::Display for Awards { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - self.iter().fold(Ok(()), |result, award| result.and_then(|_| writeln!(f, "{}", award))) + self.iter().fold(Ok(()), |result, award| result.and_then(|_| writeln!(f, "{award}"))) } } @@ -479,29 +480,29 @@ pub struct ThemeAssets; impl Preferences { // Build preferences from cookies - pub fn new(req: Request) -> Self { + pub fn new(req: &Request) -> Self { // Read available theme names from embedded css files. // Always make the default "system" theme available. let mut themes = vec!["system".to_string()]; for file in ThemeAssets::iter() { let chunks: Vec<&str> = file.as_ref().split(".css").collect(); - themes.push(chunks[0].to_owned()) + themes.push(chunks[0].to_owned()); } Self { available_themes: themes, - theme: setting(&req, "theme"), - front_page: setting(&req, "front_page"), - layout: setting(&req, "layout"), - wide: setting(&req, "wide"), - show_nsfw: setting(&req, "show_nsfw"), - blur_nsfw: setting(&req, "blur_nsfw"), - use_hls: setting(&req, "use_hls"), - hide_hls_notification: setting(&req, "hide_hls_notification"), - autoplay_videos: setting(&req, "autoplay_videos"), - comment_sort: setting(&req, "comment_sort"), - post_sort: setting(&req, "post_sort"), - subscriptions: setting(&req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), - filters: setting(&req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), + theme: setting(req, "theme"), + front_page: setting(req, "front_page"), + layout: setting(req, "layout"), + wide: setting(req, "wide"), + show_nsfw: setting(req, "show_nsfw"), + blur_nsfw: setting(req, "blur_nsfw"), + use_hls: setting(req, "use_hls"), + hide_hls_notification: setting(req, "hide_hls_notification"), + autoplay_videos: setting(req, "autoplay_videos"), + comment_sort: setting(req, "comment_sort"), + post_sort: setting(req, "post_sort"), + subscriptions: setting(req, "subscriptions").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), + filters: setting(req, "filters").split('+').map(String::from).filter(|s| !s.is_empty()).collect(), } } } @@ -530,7 +531,7 @@ pub fn filter_posts(posts: &mut Vec, filters: &HashSet) -> bool { // Grab a query parameter from a url pub fn param(path: &str, value: &str) -> Option { Some( - Url::parse(format!("https://libredd.it/{}", path).as_str()) + Url::parse(format!("https://libredd.it/{path}").as_str()) .ok()? .query_pairs() .into_owned() @@ -547,11 +548,7 @@ pub fn setting(req: &Request, name: &str) -> String { .cookie(name) .unwrap_or_else(|| { // If there is no cookie for this setting, try receiving a default from an environment variable - if let Ok(default) = std::env::var(format!("LIBREDDIT_DEFAULT_{}", name.to_uppercase())) { - Cookie::new(name, default) - } else { - Cookie::named(name) - } + std::env::var(format!("LIBREDDIT_DEFAULT_{}", name.to_uppercase())).map_or_else(|_| Cookie::named(name), |default| Cookie::new(name, default)) }) .value() .to_string() @@ -560,11 +557,11 @@ pub fn setting(req: &Request, name: &str) -> String { // Detect and redirect in the event of a random subreddit pub async fn catch_random(sub: &str, additional: &str) -> Result, String> { if sub == "random" || sub == "randnsfw" { - let new_sub = json(format!("/r/{}/about.json?raw_json=1", sub), false).await?["data"]["display_name"] + let new_sub = json(format!("/r/{sub}/about.json?raw_json=1"), false).await?["data"]["display_name"] .as_str() .unwrap_or_default() .to_string(); - Ok(redirect(format!("/r/{}{}", new_sub, additional))) + Ok(redirect(&format!("/r/{new_sub}{additional}"))) } else { Err("No redirect needed".to_string()) } @@ -699,22 +696,22 @@ pub fn val(j: &Value, k: &str) -> String { // NETWORKING // -pub fn template(t: impl Template) -> Result, String> { - Ok( - Response::builder() - .status(200) - .header("content-type", "text/html") - .body(t.render().unwrap_or_default().into()) - .unwrap_or_default(), - ) +pub fn template(t: &impl Template) -> Response { + // Ok( + Response::builder() + .status(200) + .header("content-type", "text/html") + .body(t.render().unwrap_or_default().into()) + .unwrap_or_default() + // ) } -pub fn redirect(path: String) -> Response { +pub fn redirect(path: &str) -> Response { Response::builder() .status(302) .header("content-type", "text/html") - .header("Location", &path) - .body(format!("Redirecting to {0}...", path).into()) + .header("Location", path) + .body(format!("Redirecting to {path}...").into()) .unwrap_or_default() } @@ -723,7 +720,7 @@ pub async fn error(req: Request, msg: impl ToString) -> Result