mirror of
https://github.com/spikecodes/libreddit
synced 2024-12-23 17:15:13 +01:00
Optimize use of .unwrap()
This commit is contained in:
parent
59ef30c76d
commit
2f2ed6169d
27
src/post.rs
27
src/post.rs
@ -24,24 +24,21 @@ pub async fn item(req: HttpRequest) -> Result<HttpResponse> {
|
||||
// Log the post ID being fetched in debug mode
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(&id);
|
||||
|
||||
|
||||
// Send a request to the url, receive JSON in response
|
||||
let req = request(&path).await;
|
||||
|
||||
// If the Reddit API returns an error, exit and send error page to user
|
||||
if req.is_err() {
|
||||
error(req.err().unwrap().to_string()).await
|
||||
} else {
|
||||
match request(&path).await {
|
||||
// Otherwise, grab the JSON output from the request
|
||||
let res = req.unwrap();
|
||||
Ok(res) => {
|
||||
// Parse the JSON into Post and Comment structs
|
||||
let post = parse_post(&res[0]).await.unwrap();
|
||||
let comments = parse_comments(&res[1]).await.unwrap();
|
||||
|
||||
// Parse the JSON into Post and Comment structs
|
||||
let post = parse_post(&res[0]).await.unwrap();
|
||||
let comments = parse_comments(&res[1]).await.unwrap();
|
||||
|
||||
// Use the Post and Comment structs to generate a website to show users
|
||||
let s = PostTemplate { comments, post, sort }.render().unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
// Use the Post and Comment structs to generate a website to show users
|
||||
let s = PostTemplate { comments, post, sort }.render().unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
},
|
||||
// If the Reddit API returns an error, exit and send error page to user
|
||||
Err(msg) => error(msg.to_string()).await
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -26,22 +26,19 @@ pub async fn find(req: HttpRequest) -> Result<HttpResponse> {
|
||||
};
|
||||
let sub = req.match_info().get("sub").unwrap_or("").to_string();
|
||||
|
||||
let posts = fetch_posts(&path, String::new()).await;
|
||||
|
||||
if posts.is_err() {
|
||||
error(posts.err().unwrap().to_string()).await
|
||||
} else {
|
||||
let items = posts.unwrap();
|
||||
|
||||
let s = SearchTemplate {
|
||||
posts: items.0,
|
||||
query: q,
|
||||
sub,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), items.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok(posts) => {
|
||||
let s = SearchTemplate {
|
||||
posts: posts.0,
|
||||
query: q,
|
||||
sub,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), posts.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
},
|
||||
Err(msg) => error(msg.to_string()).await
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,6 @@
|
||||
use crate::utils::{error, fetch_posts, format_num, format_url, param, request, val, Post, Subreddit};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
use std::convert::TryInto;
|
||||
|
||||
// STRUCTS
|
||||
#[derive(Template)]
|
||||
@ -22,49 +21,45 @@ pub async fn page(req: HttpRequest) -> Result<HttpResponse> {
|
||||
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
|
||||
|
||||
let sub_result = if !&sub.contains('+') && sub != "popular" {
|
||||
subreddit(&sub).await
|
||||
subreddit(&sub).await.unwrap_or_default()
|
||||
} else {
|
||||
Ok(Subreddit::default())
|
||||
Subreddit::default()
|
||||
};
|
||||
let posts = fetch_posts(&path, String::new()).await;
|
||||
|
||||
if posts.is_err() {
|
||||
error(posts.err().unwrap().to_string()).await
|
||||
} else {
|
||||
let sub = sub_result.unwrap_or_default();
|
||||
let items = posts.unwrap();
|
||||
|
||||
let s = SubredditTemplate {
|
||||
sub,
|
||||
posts: items.0,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), items.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok(items) => {
|
||||
let s = SubredditTemplate {
|
||||
sub: sub_result,
|
||||
posts: items.0,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), items.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
},
|
||||
Err(msg) => error(msg.to_string()).await
|
||||
}
|
||||
}
|
||||
|
||||
// SUBREDDIT
|
||||
async fn subreddit(sub: &str) -> Result<Subreddit, &'static str> {
|
||||
// Build the Reddit JSON API url
|
||||
let url: String = format!("r/{}/about.json?raw_json=1", sub);
|
||||
let path: String = format!("r/{}/about.json?raw_json=1", sub);
|
||||
|
||||
// Send a request to the url, receive JSON in response
|
||||
let req = request(&url).await;
|
||||
let res;
|
||||
|
||||
// If the Reddit API returns an error, exit this function
|
||||
if req.is_err() {
|
||||
return Err(req.err().unwrap());
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => { res = response; },
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg)
|
||||
}
|
||||
|
||||
// Otherwise, grab the JSON output from the request
|
||||
let res = req.unwrap();
|
||||
|
||||
// Metadata regarding the subreddit
|
||||
let members = res["data"]["subscribers"].as_u64().unwrap_or(0);
|
||||
let active = res["data"]["accounts_active"].as_u64().unwrap_or(0);
|
||||
let members: i64 = res["data"]["subscribers"].as_u64().unwrap_or_default() as i64;
|
||||
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
|
||||
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let community_icon: &str = res["data"]["community_icon"].as_str().unwrap_or("").split('?').collect::<Vec<&str>>()[0];
|
||||
@ -76,8 +71,8 @@ async fn subreddit(sub: &str) -> Result<Subreddit, &'static str> {
|
||||
description: val(&res, "public_description"),
|
||||
info: val(&res, "description_html").replace("\\", ""),
|
||||
icon: format_url(icon).await,
|
||||
members: format_num(members.try_into().unwrap_or(0)),
|
||||
active: format_num(active.try_into().unwrap_or(0)),
|
||||
members: format_num(members),
|
||||
active: format_num(active),
|
||||
};
|
||||
|
||||
Ok(sub)
|
||||
|
46
src/user.rs
46
src/user.rs
@ -25,22 +25,21 @@ pub async fn profile(req: HttpRequest) -> Result<HttpResponse> {
|
||||
// Request user profile data and user posts/comments from Reddit
|
||||
let user = user(&username).await;
|
||||
let posts = fetch_posts(&path, "Comment".to_string()).await;
|
||||
|
||||
// If there is an error show error page
|
||||
if user.is_err() || posts.is_err() {
|
||||
error(user.err().unwrap().to_string()).await
|
||||
} else {
|
||||
let posts_unwrapped = posts.unwrap();
|
||||
|
||||
let s = UserTemplate {
|
||||
user: user.unwrap(),
|
||||
posts: posts_unwrapped.0,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), posts_unwrapped.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
|
||||
match posts {
|
||||
Ok(items) => {
|
||||
let s = UserTemplate {
|
||||
user: user.unwrap(),
|
||||
posts: items.0,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), items.1),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(s))
|
||||
},
|
||||
// If there is an error show error page
|
||||
Err(msg) => error(msg.to_string()).await
|
||||
}
|
||||
}
|
||||
|
||||
@ -49,17 +48,16 @@ async fn user(name: &str) -> Result<User, &'static str> {
|
||||
// Build the Reddit JSON API path
|
||||
let path: String = format!("user/{}/about.json", name);
|
||||
|
||||
// Send a request to the url, receive JSON in response
|
||||
let req = request(&path).await;
|
||||
let res;
|
||||
|
||||
// If the Reddit API returns an error, exit this function
|
||||
if req.is_err() {
|
||||
return Err(req.err().unwrap());
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => { res = response; },
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg)
|
||||
}
|
||||
|
||||
// Otherwise, grab the JSON output from the request
|
||||
let res = req.unwrap();
|
||||
|
||||
// Grab creation date as unix timestamp
|
||||
let created: i64 = res["data"]["created"].as_f64().unwrap().round() as i64;
|
||||
|
||||
|
65
src/utils.rs
65
src/utils.rs
@ -4,7 +4,7 @@
|
||||
use actix_web::{http::StatusCode, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
use chrono::{TimeZone, Utc};
|
||||
use serde_json::{from_str, Value};
|
||||
use serde_json::{from_str};
|
||||
use url::Url;
|
||||
// use surf::{client, get, middleware::Redirect};
|
||||
|
||||
@ -135,24 +135,27 @@ pub fn val(j: &serde_json::Value, k: &str) -> String {
|
||||
|
||||
// nested_val() function used to parse JSON from Reddit APIs
|
||||
pub fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
|
||||
String::from(j["data"][n][k].as_str().unwrap())
|
||||
String::from(j["data"][n][k].as_str().unwrap_or_default())
|
||||
}
|
||||
|
||||
// Fetch posts of a user or subreddit
|
||||
pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
|
||||
// Send a request to the url, receive JSON in response
|
||||
let req = request(path).await;
|
||||
let res;
|
||||
let post_list;
|
||||
|
||||
// If the Reddit API returns an error, exit this function
|
||||
if req.is_err() {
|
||||
return Err(req.err().unwrap());
|
||||
// Send a request to the url
|
||||
match request(&path).await {
|
||||
// If success, receive JSON in response
|
||||
Ok(response) => { res = response; },
|
||||
// If the Reddit API returns an error, exit this function
|
||||
Err(msg) => return Err(msg)
|
||||
}
|
||||
|
||||
// Otherwise, grab the JSON output from the request
|
||||
let res = req.unwrap();
|
||||
|
||||
// Fetch the list of posts from the JSON response
|
||||
let post_list = res["data"]["children"].as_array().unwrap();
|
||||
match res["data"]["children"].as_array() {
|
||||
Some(list) => { post_list = list },
|
||||
None => { return Err("No posts found") }
|
||||
}
|
||||
|
||||
let mut posts: Vec<Post> = Vec::new();
|
||||
|
||||
@ -162,8 +165,8 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap().round() as i64;
|
||||
let score = post["data"]["score"].as_i64().unwrap();
|
||||
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap_or_default().round() as i64;
|
||||
let score = post["data"]["score"].as_i64().unwrap_or_default();
|
||||
let title = val(post, "title");
|
||||
|
||||
posts.push(Post {
|
||||
@ -206,7 +209,7 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||
|
||||
pub async fn error(message: String) -> Result<HttpResponse> {
|
||||
let msg = if message.is_empty() { "Page not found".to_string() } else { message };
|
||||
let body = ErrorTemplate { message: msg }.render().unwrap();
|
||||
let body = ErrorTemplate { message: msg }.render().unwrap_or_default();
|
||||
Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(body))
|
||||
}
|
||||
|
||||
@ -236,22 +239,22 @@ pub async fn request(path: &str) -> Result<serde_json::Value, &'static str> {
|
||||
// --- reqwest ---
|
||||
let res = reqwest::get(&url).await.unwrap();
|
||||
// Read the status from the response
|
||||
let success = res.status().is_success();
|
||||
// Read the body of the response
|
||||
let body = res.text().await.unwrap();
|
||||
|
||||
// Parse the response from Reddit as JSON
|
||||
let json: Value = from_str(body.as_str()).unwrap_or(Value::Null);
|
||||
|
||||
if !success {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Page not found", url));
|
||||
Err("Page not found")
|
||||
} else if json == Value::Null {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Failed to parse page JSON data", url));
|
||||
Err("Failed to parse page JSON data")
|
||||
} else {
|
||||
Ok(json)
|
||||
match res.status().is_success() {
|
||||
true => {
|
||||
// Parse the response from Reddit as JSON
|
||||
match from_str(res.text().await.unwrap_or_default().as_str()) {
|
||||
Ok(json) => Ok(json),
|
||||
Err(_) => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Failed to parse page JSON data", url));
|
||||
Err("Failed to parse page JSON data")
|
||||
}
|
||||
}
|
||||
},
|
||||
false => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Page not found", url));
|
||||
Err("Page not found")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user