From 541c741bde742146b59b210645d53658d28d300a Mon Sep 17 00:00:00 2001 From: spikecodes <19519553+spikecodes@users.noreply.github.com> Date: Mon, 11 Jan 2021 17:47:14 -0800 Subject: [PATCH] Parse GIFs correctly --- src/post.rs | 2 +- src/subreddit.rs | 2 +- src/user.rs | 2 +- src/utils.rs | 48 +++++++++++------------------------------------- 4 files changed, 14 insertions(+), 40 deletions(-) diff --git a/src/post.rs b/src/post.rs index 294be41..d658db3 100644 --- a/src/post.rs +++ b/src/post.rs @@ -91,7 +91,7 @@ async fn parse_post(json: &serde_json::Value) -> Post { score: format_num(score), upvote_ratio: ratio as i64, post_type, - thumbnail: format_url(val(post, "thumbnail")), + thumbnail: format_url(val(post, "thumbnail").as_str()), flair: Flair( val(post, "link_flair_text"), val(post, "link_flair_background_color"), diff --git a/src/subreddit.rs b/src/subreddit.rs index fe5d5e4..cda7404 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -105,7 +105,7 @@ async fn subreddit(sub: &str) -> Result { title: val(&res, "title"), description: val(&res, "public_description"), info: rewrite_url(&val(&res, "description_html").replace("\\", "")), - icon: format_url(icon), + icon: format_url(icon.as_str()), members: format_num(members), active: format_num(active), wiki: res["data"]["wiki_enabled"].as_bool().unwrap_or_default(), diff --git a/src/user.rs b/src/user.rs index 8672802..ea54f55 100644 --- a/src/user.rs +++ b/src/user.rs @@ -62,7 +62,7 @@ async fn user(name: &str) -> Result { Ok(User { name: name.to_string(), title: nested_val(&res, "subreddit", "title"), - icon: format_url(nested_val(&res, "subreddit", "icon_img")), + icon: format_url(nested_val(&res, "subreddit", "icon_img").as_str()), karma: res["data"]["total_karma"].as_i64().unwrap_or(0), created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"), banner: nested_val(&res, "subreddit", "banner_img"), diff --git a/src/utils.rs b/src/utils.rs index e2ebb1e..e925bb7 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -134,7 +134,7 @@ pub fn cookie(req: &HttpRequest, name: &str) -> String { } // Direct urls to proxy if proxy is enabled -pub fn format_url(url: String) -> String { +pub fn format_url(url: &str) -> String { if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" { String::new() } else { @@ -163,13 +163,17 @@ pub async fn media(data: &serde_json::Value) -> (String, String) { let post_type: &str; let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() { post_type = "video"; - format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap_or_default().to_string()) + format_url(data["preview"]["reddit_video_preview"]["fallback_url"].as_str().unwrap_or_default()) } else if !data["secure_media"]["reddit_video"]["fallback_url"].is_null() { post_type = "video"; - format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap_or_default().to_string()) + format_url(data["secure_media"]["reddit_video"]["fallback_url"].as_str().unwrap_or_default()) } else if data["post_hint"].as_str().unwrap_or("") == "image" { + let preview = data["preview"]["images"][0].clone(); post_type = "image"; - format_url(data["preview"]["images"][0]["source"]["url"].as_str().unwrap_or_default().to_string()) + match preview["variants"]["mp4"].as_object() { + Some(gif) => format_url(gif["source"]["url"].as_str().unwrap_or_default()), + None => format_url(preview["source"]["url"].as_str().unwrap_or_default()) + } } else if data["is_self"].as_bool().unwrap_or_default() { post_type = "self"; data["permalink"].as_str().unwrap_or_default().to_string() @@ -178,7 +182,7 @@ pub async fn media(data: &serde_json::Value) -> (String, String) { data["url"].as_str().unwrap_or_default().to_string() }; - (post_type.to_string(), url) + (post_type.to_string(), url.to_string()) } // @@ -242,7 +246,7 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec HttpResponse { pub async fn request(path: &str) -> Result { let url = format!("https://www.reddit.com{}", path); - // match reqwest::get(&url).await { - // Ok(res) => { - // // Read the status from the response - // match res.status().is_success() { - // true => { - // // Parse the response from Reddit as JSON - // match from_str(res.text().await.unwrap_or_default().as_str()) { - // Ok(json) => Ok(json), - // Err(_) => { - // #[cfg(debug_assertions)] - // dbg!(format!("{} - Failed to parse page JSON data", url)); - // Err("Failed to parse page JSON data") - // } - // } - // } - // // If Reddit returns error, tell user Page Not Found - // false => { - // #[cfg(debug_assertions)] - // dbg!(format!("{} - Page not found", url)); - // Err("Page not found") - // } - // } - // } - // // If can't send request to Reddit, return this to user - // Err(_e) => { - // #[cfg(debug_assertions)] - // dbg!(format!("{} - {}", url, _e)); - // Err("Couldn't send request to Reddit") - // } - // } - // Send request using reqwest + // Send request using ureq match ureq::get(&url).call() { // If response is success Ok(response) => {