1
0
mirror of https://github.com/spikecodes/libreddit synced 2024-11-10 20:29:35 +01:00

Handle Unwrapping Errors

This commit is contained in:
spikecodes 2021-01-03 21:31:21 -08:00
parent 5934e34ea0
commit c046d00060
5 changed files with 32 additions and 24 deletions

View File

@ -46,16 +46,16 @@ async fn media(data: &serde_json::Value) -> (String, String) {
let post_type: &str; let post_type: &str;
let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() { let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() {
post_type = "video"; post_type = "video";
format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap().to_string()) format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap_or_default().to_string())
} else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() { } else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() {
post_type = "video"; post_type = "video";
format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap().to_string()) format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap_or_default().to_string())
} else if data["post_hint"].as_str().unwrap_or("") == "image" { } else if data["post_hint"].as_str().unwrap_or("") == "image" {
post_type = "image"; post_type = "image";
format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap().to_string()) format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap_or_default().to_string())
} else { } else {
post_type = "link"; post_type = "link";
data["url"].as_str().unwrap().to_string() data["url"].as_str().unwrap_or_default().to_string()
}; };
(post_type.to_string(), url) (post_type.to_string(), url)
@ -67,9 +67,9 @@ async fn parse_post(json: &serde_json::Value) -> Result<Post, &'static str> {
let post: &serde_json::Value = &json["data"]["children"][0]; let post: &serde_json::Value = &json["data"]["children"][0];
// Grab UTC time as unix timestamp // Grab UTC time as unix timestamp
let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap().round() as i64; let unix_time: i64 = post["data"]["created_utc"].as_f64().unwrap_or_default().round() as i64;
// Parse post score and upvote ratio // Parse post score and upvote ratio
let score = post["data"]["score"].as_i64().unwrap(); let score = post["data"]["score"].as_i64().unwrap_or_default();
let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0; let ratio: f64 = post["data"]["upvote_ratio"].as_f64().unwrap_or(1.0) * 100.0;
// Determine the type of media along with the media URL // Determine the type of media along with the media URL

View File

@ -21,7 +21,7 @@ pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse>
match decode(b64) { match decode(b64) {
Ok(bytes) => { Ok(bytes) => {
let media = String::from_utf8(bytes).unwrap(); let media = String::from_utf8(bytes).unwrap_or_default();
match Url::parse(media.as_str()) { match Url::parse(media.as_str()) {
Ok(url) => { Ok(url) => {

View File

@ -58,7 +58,7 @@ pub async fn wiki(req: HttpRequest) -> HttpResponse {
Ok(res) => { Ok(res) => {
let s = WikiTemplate { let s = WikiTemplate {
sub: sub.to_string(), sub: sub.to_string(),
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap()), wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()),
page: page.to_string(), page: page.to_string(),
} }
.render() .render()

View File

@ -62,14 +62,14 @@ async fn user(name: &str) -> Result<User, &'static str> {
} }
// Grab creation date as unix timestamp // Grab creation date as unix timestamp
let created: i64 = res["data"]["created"].as_f64().unwrap().round() as i64; let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
// Parse the JSON output into a User struct // Parse the JSON output into a User struct
Ok(User { Ok(User {
name: name.to_string(), name: name.to_string(),
title: nested_val(&res, "subreddit", "title"), title: nested_val(&res, "subreddit", "title"),
icon: format_url(nested_val(&res, "subreddit", "icon_img")), icon: format_url(nested_val(&res, "subreddit", "icon_img")),
karma: res["data"]["total_karma"].as_i64().unwrap(), karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
created: Utc.timestamp(created, 0).format("%b %e, %Y").to_string(), created: Utc.timestamp(created, 0).format("%b %e, %Y").to_string(),
banner: nested_val(&res, "subreddit", "banner_img"), banner: nested_val(&res, "subreddit", "banner_img"),
description: nested_val(&res, "subreddit", "public_description"), description: nested_val(&res, "subreddit", "public_description"),

View File

@ -266,7 +266,8 @@ pub async fn request(path: &str) -> Result<serde_json::Value, &'static str> {
// let body = res.body_string().await.unwrap(); // let body = res.body_string().await.unwrap();
// --- reqwest --- // --- reqwest ---
let res = reqwest::get(&url).await.unwrap(); match reqwest::get(&url).await {
Ok(res) => {
// Read the status from the response // Read the status from the response
match res.status().is_success() { match res.status().is_success() {
true => { true => {
@ -286,4 +287,11 @@ pub async fn request(path: &str) -> Result<serde_json::Value, &'static str> {
Err("Page not found") Err("Page not found")
} }
} }
},
Err(e) => {
#[cfg(debug_assertions)]
dbg!(format!("{} - {}", url, e));
Err("Couldn't send request to Reddit")
}
}
} }