diff --git a/src/post.rs b/src/post.rs index 9616bbc..de78253 100644 --- a/src/post.rs +++ b/src/post.rs @@ -46,16 +46,16 @@ async fn media(data: &serde_json::Value) -> (String, String) { let post_type: &str; let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() { post_type = "video"; - format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap().to_string()) + format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap_or_default().to_string()) } else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() { post_type = "video"; - format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap().to_string()) + format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap_or_default().to_string()) } else if data["post_hint"].as_str().unwrap_or("") == "image" { post_type = "image"; - format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap().to_string()) + format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap_or_default().to_string()) } else { post_type = "link"; - data["url"].as_str().unwrap().to_string() + data["url"].as_str().unwrap_or_default().to_string() }; (post_type.to_string(), url) @@ -67,9 +67,9 @@ async fn parse_post(json: &serde_json::Value) -> Result { let post: &serde_json::Value = &json["data"]["children"][0]; // Grab UTC time as unix timestamp - let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap().round() as i64; + let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap_or_default().round() as i64; // Parse post score and upvote ratio - let score = post["data"]["score"].as_i64().unwrap(); + let score = post["data"]["score"].as_i64().unwrap_or_default(); let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0; // Determine the type of media along with the media URL diff --git a/src/proxy.rs b/src/proxy.rs index 3f16d28..786617f 100644 --- a/src/proxy.rs +++ b/src/proxy.rs @@ -21,7 +21,7 @@ pub async fn handler(web::Path(b64): web::Path) -> Result match decode(b64) { Ok(bytes) => { - let media = String::from_utf8(bytes).unwrap(); + let media = String::from_utf8(bytes).unwrap_or_default(); match Url::parse(media.as_str()) { Ok(url) => { diff --git a/src/subreddit.rs b/src/subreddit.rs index 8ce317a..e306b73 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -58,7 +58,7 @@ pub async fn wiki(req: HttpRequest) -> HttpResponse { Ok(res) => { let s = WikiTemplate { sub: sub.to_string(), - wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap()), + wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()), page: page.to_string(), } .render() diff --git a/src/user.rs b/src/user.rs index eec3f00..3073bf3 100644 --- a/src/user.rs +++ b/src/user.rs @@ -62,14 +62,14 @@ async fn user(name: &str) -> Result { } // Grab creation date as unix timestamp - let created: i64 = res["data"]["created"].as_f64().unwrap().round() as i64; + let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64; // Parse the JSON output into a User struct Ok(User { name: name.to_string(), title: nested_val(&res, "subreddit", "title"), icon: format_url(nested_val(&res, "subreddit", "icon_img")), - karma: res["data"]["total_karma"].as_i64().unwrap(), + karma: res["data"]["total_karma"].as_i64().unwrap_or(0), created: Utc.timestamp(created, 0).format("%b %e, %Y").to_string(), banner: nested_val(&res, "subreddit", "banner_img"), description: nested_val(&res, "subreddit", "public_description"), diff --git a/src/utils.rs b/src/utils.rs index df0e5bd..6b96b0d 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -266,24 +266,32 @@ pub async fn request(path: &str) -> Result { // let body = res.body_string().await.unwrap(); // --- reqwest --- - let res = reqwest::get(&url).await.unwrap(); - // Read the status from the response - match res.status().is_success() { - true => { - // Parse the response from Reddit as JSON - match from_str(res.text().await.unwrap_or_default().as_str()) { - Ok(json) => Ok(json), - Err(_) => { + match reqwest::get(&url).await { + Ok(res) => { + // Read the status from the response + match res.status().is_success() { + true => { + // Parse the response from Reddit as JSON + match from_str(res.text().await.unwrap_or_default().as_str()) { + Ok(json) => Ok(json), + Err(_) => { + #[cfg(debug_assertions)] + dbg!(format!("{} - Failed to parse page JSON data", url)); + Err("Failed to parse page JSON data") + } + } + } + false => { #[cfg(debug_assertions)] - dbg!(format!("{} - Failed to parse page JSON data", url)); - Err("Failed to parse page JSON data") + dbg!(format!("{} - Page not found", url)); + Err("Page not found") } } - } - false => { + }, + Err(e) => { #[cfg(debug_assertions)] - dbg!(format!("{} - Page not found", url)); - Err("Page not found") + dbg!(format!("{} - {}", url, e)); + Err("Couldn't send request to Reddit") } } }