mirror of https://github.com/spikecodes/libreddit
Optimize use of Result<>
This commit is contained in:
parent
0adbb1556e
commit
7e96bb3d80
|
@ -34,8 +34,7 @@ async fn main() -> std::io::Result<()> {
|
||||||
if args.len() > 1 {
|
if args.len() > 1 {
|
||||||
for arg in args {
|
for arg in args {
|
||||||
if arg.starts_with("--address=") || arg.starts_with("-a=") {
|
if arg.starts_with("--address=") || arg.starts_with("-a=") {
|
||||||
let split: Vec<&str> = arg.split('=').collect();
|
address = arg.split('=').collect::<Vec<&str>>()[1].to_string();
|
||||||
address = split[1].to_string();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -81,6 +80,9 @@ async fn main() -> std::io::Result<()> {
|
||||||
.route("/r/{sub}/comments/{id}/{title}/{comment_id}/", web::get().to(post::item))
|
.route("/r/{sub}/comments/{id}/{title}/{comment_id}/", web::get().to(post::item))
|
||||||
})
|
})
|
||||||
.bind(&address)
|
.bind(&address)
|
||||||
|
.map(|x| {
|
||||||
|
x
|
||||||
|
})
|
||||||
.unwrap_or_else(|_| panic!("Cannot bind to the address: {}", address))
|
.unwrap_or_else(|_| panic!("Cannot bind to the address: {}", address))
|
||||||
.run()
|
.run()
|
||||||
.await
|
.await
|
||||||
|
|
24
src/post.rs
24
src/post.rs
|
@ -1,6 +1,6 @@
|
||||||
// CRATES
|
// CRATES
|
||||||
use crate::utils::{cookie, error, format_num, format_url, media, param, request, rewrite_url, val, Comment, Flags, Flair, Post};
|
use crate::utils::{cookie, error, format_num, format_url, media, param, request, rewrite_url, val, Comment, Flags, Flair, Post};
|
||||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
use actix_web::{HttpRequest, HttpResponse};
|
||||||
|
|
||||||
use async_recursion::async_recursion;
|
use async_recursion::async_recursion;
|
||||||
|
|
||||||
|
@ -30,8 +30,8 @@ pub async fn item(req: HttpRequest) -> HttpResponse {
|
||||||
// Otherwise, grab the JSON output from the request
|
// Otherwise, grab the JSON output from the request
|
||||||
Ok(res) => {
|
Ok(res) => {
|
||||||
// Parse the JSON into Post and Comment structs
|
// Parse the JSON into Post and Comment structs
|
||||||
let post = parse_post(&res[0]).await.unwrap();
|
let post = parse_post(&res[0]).await;
|
||||||
let comments = parse_comments(&res[1]).await.unwrap();
|
let comments = parse_comments(&res[1]).await;
|
||||||
|
|
||||||
// Use the Post and Comment structs to generate a website to show users
|
// Use the Post and Comment structs to generate a website to show users
|
||||||
let s = PostTemplate {
|
let s = PostTemplate {
|
||||||
|
@ -50,7 +50,7 @@ pub async fn item(req: HttpRequest) -> HttpResponse {
|
||||||
}
|
}
|
||||||
|
|
||||||
// POSTS
|
// POSTS
|
||||||
async fn parse_post(json: &serde_json::Value) -> Result<Post, &'static str> {
|
async fn parse_post(json: &serde_json::Value) -> Post {
|
||||||
// Retrieve post (as opposed to comments) from JSON
|
// Retrieve post (as opposed to comments) from JSON
|
||||||
let post: &serde_json::Value = &json["data"]["children"][0];
|
let post: &serde_json::Value = &json["data"]["children"][0];
|
||||||
|
|
||||||
|
@ -61,10 +61,10 @@ async fn parse_post(json: &serde_json::Value) -> Result<Post, &'static str> {
|
||||||
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
|
||||||
|
|
||||||
// Determine the type of media along with the media URL
|
// Determine the type of media along with the media URL
|
||||||
let media = media(&post["data"]).await;
|
let (post_type, media) = media(&post["data"]).await;
|
||||||
|
|
||||||
// Build a post using data parsed from Reddit post API
|
// Build a post using data parsed from Reddit post API
|
||||||
Ok(Post {
|
Post {
|
||||||
id: val(post, "id"),
|
id: val(post, "id"),
|
||||||
title: val(post, "title"),
|
title: val(post, "title"),
|
||||||
community: val(post, "subreddit"),
|
community: val(post, "subreddit"),
|
||||||
|
@ -78,7 +78,7 @@ async fn parse_post(json: &serde_json::Value) -> Result<Post, &'static str> {
|
||||||
permalink: val(post, "permalink"),
|
permalink: val(post, "permalink"),
|
||||||
score: format_num(score),
|
score: format_num(score),
|
||||||
upvote_ratio: ratio as i64,
|
upvote_ratio: ratio as i64,
|
||||||
post_type: media.0,
|
post_type,
|
||||||
thumbnail: format_url(val(post, "thumbnail")),
|
thumbnail: format_url(val(post, "thumbnail")),
|
||||||
flair: Flair(
|
flair: Flair(
|
||||||
val(post, "link_flair_text"),
|
val(post, "link_flair_text"),
|
||||||
|
@ -93,14 +93,14 @@ async fn parse_post(json: &serde_json::Value) -> Result<Post, &'static str> {
|
||||||
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
nsfw: post["data"]["over_18"].as_bool().unwrap_or(false),
|
||||||
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
stickied: post["data"]["stickied"].as_bool().unwrap_or(false),
|
||||||
},
|
},
|
||||||
media: media.1,
|
media,
|
||||||
time: OffsetDateTime::from_unix_timestamp(unix_time).format("%b %d %Y %H:%M UTC"),
|
time: OffsetDateTime::from_unix_timestamp(unix_time).format("%b %d %Y %H:%M UTC"),
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// COMMENTS
|
// COMMENTS
|
||||||
#[async_recursion]
|
#[async_recursion]
|
||||||
async fn parse_comments(json: &serde_json::Value) -> Result<Vec<Comment>, &'static str> {
|
async fn parse_comments(json: &serde_json::Value) -> Vec<Comment> {
|
||||||
// Separate the comment JSON into a Vector of comments
|
// Separate the comment JSON into a Vector of comments
|
||||||
let comment_data = json["data"]["children"].as_array().unwrap();
|
let comment_data = json["data"]["children"].as_array().unwrap();
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ async fn parse_comments(json: &serde_json::Value) -> Result<Vec<Comment>, &'stat
|
||||||
let body = rewrite_url(&val(comment, "body_html"));
|
let body = rewrite_url(&val(comment, "body_html"));
|
||||||
|
|
||||||
let replies: Vec<Comment> = if comment["data"]["replies"].is_object() {
|
let replies: Vec<Comment> = if comment["data"]["replies"].is_object() {
|
||||||
parse_comments(&comment["data"]["replies"]).await.unwrap_or_default()
|
parse_comments(&comment["data"]["replies"]).await
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
@ -137,5 +137,5 @@ async fn parse_comments(json: &serde_json::Value) -> Result<Vec<Comment>, &'stat
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(comments)
|
comments
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,16 +33,16 @@ pub async fn find(req: HttpRequest) -> HttpResponse {
|
||||||
let sub = req.match_info().get("sub").unwrap_or("").to_string();
|
let sub = req.match_info().get("sub").unwrap_or("").to_string();
|
||||||
|
|
||||||
match fetch_posts(&path, String::new()).await {
|
match fetch_posts(&path, String::new()).await {
|
||||||
Ok(posts) => HttpResponse::Ok().content_type("text/html").body(
|
Ok((posts, after)) => HttpResponse::Ok().content_type("text/html").body(
|
||||||
SearchTemplate {
|
SearchTemplate {
|
||||||
posts: posts.0,
|
posts,
|
||||||
sub,
|
sub,
|
||||||
params: SearchParams {
|
params: SearchParams {
|
||||||
q: param(&path, "q"),
|
q: param(&path, "q"),
|
||||||
sort,
|
sort,
|
||||||
t: param(&path, "t"),
|
t: param(&path, "t"),
|
||||||
before: param(&path, "after"),
|
before: param(&path, "after"),
|
||||||
after: posts.1,
|
after,
|
||||||
restrict_sr: param(&path, "restrict_sr"),
|
restrict_sr: param(&path, "restrict_sr"),
|
||||||
},
|
},
|
||||||
layout: cookie(req, "layout"),
|
layout: cookie(req, "layout"),
|
||||||
|
|
|
@ -36,12 +36,12 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
|
||||||
};
|
};
|
||||||
|
|
||||||
match fetch_posts(&path, String::new()).await {
|
match fetch_posts(&path, String::new()).await {
|
||||||
Ok(items) => {
|
Ok((posts,after)) => {
|
||||||
let s = SubredditTemplate {
|
let s = SubredditTemplate {
|
||||||
sub: sub_result,
|
sub: sub_result,
|
||||||
posts: items.0,
|
posts: posts,
|
||||||
sort: (sort, param(&path, "t")),
|
sort: (sort, param(&path, "t")),
|
||||||
ends: (param(&path, "after"), items.1),
|
ends: (param(&path, "after"), after),
|
||||||
layout: cookie(req, "layout"),
|
layout: cookie(req, "layout"),
|
||||||
}
|
}
|
||||||
.render()
|
.render()
|
||||||
|
|
|
@ -29,12 +29,12 @@ pub async fn profile(req: HttpRequest) -> HttpResponse {
|
||||||
let posts = fetch_posts(&path, "Comment".to_string()).await;
|
let posts = fetch_posts(&path, "Comment".to_string()).await;
|
||||||
|
|
||||||
match posts {
|
match posts {
|
||||||
Ok(items) => {
|
Ok((posts, after)) => {
|
||||||
let s = UserTemplate {
|
let s = UserTemplate {
|
||||||
user: user.unwrap(),
|
user: user.unwrap(),
|
||||||
posts: items.0,
|
posts,
|
||||||
sort: (sort, param(&path, "t")),
|
sort: (sort, param(&path, "t")),
|
||||||
ends: (param(&path, "after"), items.1),
|
ends: (param(&path, "after"), after),
|
||||||
layout: cookie(req, "layout"),
|
layout: cookie(req, "layout"),
|
||||||
}
|
}
|
||||||
.render()
|
.render()
|
||||||
|
|
14
src/utils.rs
14
src/utils.rs
|
@ -108,7 +108,7 @@ pub fn param(path: &str, value: &str) -> String {
|
||||||
pairs.get(value).unwrap_or(&String::new()).to_owned()
|
pairs.get(value).unwrap_or(&String::new()).to_owned()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cookie value from request
|
// Parse Cookie value from request
|
||||||
pub fn cookie(req: actix_web::HttpRequest, name: &str) -> String {
|
pub fn cookie(req: actix_web::HttpRequest, name: &str) -> String {
|
||||||
actix_web::HttpMessage::cookie(&req, name).unwrap_or_else(|| Cookie::new(name, "")).value().to_string()
|
actix_web::HttpMessage::cookie(&req, name).unwrap_or_else(|| Cookie::new(name, "")).value().to_string()
|
||||||
}
|
}
|
||||||
|
@ -172,7 +172,7 @@ pub fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
|
||||||
String::from(j["data"][n][k].as_str().unwrap_or_default())
|
String::from(j["data"][n][k].as_str().unwrap_or_default())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch posts of a user or subreddit
|
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
|
||||||
pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
|
pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
|
||||||
let res;
|
let res;
|
||||||
let post_list;
|
let post_list;
|
||||||
|
@ -180,9 +180,7 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||||
// Send a request to the url
|
// Send a request to the url
|
||||||
match request(&path).await {
|
match request(&path).await {
|
||||||
// If success, receive JSON in response
|
// If success, receive JSON in response
|
||||||
Ok(response) => {
|
Ok(response) => { res = response; }
|
||||||
res = response;
|
|
||||||
}
|
|
||||||
// If the Reddit API returns an error, exit this function
|
// If the Reddit API returns an error, exit this function
|
||||||
Err(msg) => return Err(msg),
|
Err(msg) => return Err(msg),
|
||||||
}
|
}
|
||||||
|
@ -203,7 +201,7 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||||
let title = val(post, "title");
|
let title = val(post, "title");
|
||||||
|
|
||||||
// Determine the type of media along with the media URL
|
// Determine the type of media along with the media URL
|
||||||
let media = media(&post["data"]).await;
|
let (post_type, media) = media(&post["data"]).await;
|
||||||
|
|
||||||
posts.push(Post {
|
posts.push(Post {
|
||||||
id: val(post, "id"),
|
id: val(post, "id"),
|
||||||
|
@ -218,9 +216,9 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||||
),
|
),
|
||||||
score: format_num(score),
|
score: format_num(score),
|
||||||
upvote_ratio: ratio as i64,
|
upvote_ratio: ratio as i64,
|
||||||
post_type: media.0,
|
post_type,
|
||||||
thumbnail: format_url(val(post, "thumbnail")),
|
thumbnail: format_url(val(post, "thumbnail")),
|
||||||
media: media.1,
|
media,
|
||||||
flair: Flair(
|
flair: Flair(
|
||||||
val(post, "link_flair_text"),
|
val(post, "link_flair_text"),
|
||||||
val(post, "link_flair_background_color"),
|
val(post, "link_flair_background_color"),
|
||||||
|
|
Loading…
Reference in New Issue