From 2385fa33ec8052cd391629b55bceb5a48489eb42 Mon Sep 17 00:00:00 2001 From: spikecodes <19519553+spikecodes@users.noreply.github.com> Date: Fri, 15 Jan 2021 21:26:51 -0800 Subject: [PATCH] Use ureq until AWC IO error is fixed --- Cargo.lock | 57 +++++++++++++++++-- Cargo.toml | 1 + src/utils.rs | 152 +++++++++++++++++++++++++-------------------------- 3 files changed, 128 insertions(+), 82 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a42651b..38a5fe5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -31,7 +31,7 @@ dependencies = [ "futures-util", "http", "log", - "rustls", + "rustls 0.18.1", "tokio-rustls", "trust-dns-proto", "trust-dns-resolver", @@ -193,10 +193,10 @@ dependencies = [ "actix-service", "actix-utils", "futures-util", - "rustls", + "rustls 0.18.1", "tokio-rustls", "webpki", - "webpki-roots", + "webpki-roots 0.20.0", ] [[package]] @@ -249,7 +249,7 @@ dependencies = [ "mime", "pin-project 1.0.4", "regex", - "rustls", + "rustls 0.18.1", "serde", "serde_json", "serde_urlencoded", @@ -393,7 +393,7 @@ dependencies = [ "mime", "percent-encoding", "rand", - "rustls", + "rustls 0.18.1", "serde", "serde_json", "serde_urlencoded", @@ -529,6 +529,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "chunked_transfer" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7477065d45a8fe57167bf3cf8bcd3729b54cfcb81cca49bda2d038ea89ae82ca" + [[package]] name = "const_fn" version = "0.4.5" @@ -986,6 +992,7 @@ dependencies = [ "serde", "serde_json", "time", + "ureq", "url", ] @@ -1413,6 +1420,19 @@ dependencies = [ "webpki", ] +[[package]] +name = "rustls" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "064fd21ff87c6e87ed4506e68beb42459caa4a0e2eb144932e6776768556980b" +dependencies = [ + "base64 0.13.0", + "log", + "ring", + "sct", + "webpki", +] + [[package]] name = "ryu" version = "1.0.5" @@ -1749,7 +1769,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e12831b255bcfa39dc0436b01e19fea231a37db570686c06ee72c423479f889a" dependencies = [ "futures-core", - "rustls", + "rustls 0.18.1", "tokio", "webpki", ] @@ -1890,6 +1910,22 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +[[package]] +name = "ureq" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96014ded8c85822677daee4f909d18acccca744810fd4f8ffc492c284f2324bc" +dependencies = [ + "base64 0.13.0", + "chunked_transfer", + "log", + "once_cell", + "rustls 0.19.0", + "url", + "webpki", + "webpki-roots 0.21.0", +] + [[package]] name = "url" version = "2.2.0" @@ -1997,6 +2033,15 @@ dependencies = [ "webpki", ] +[[package]] +name = "webpki-roots" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82015b7e0b8bad8185994674a13a93306bea76cf5a16c5a181382fd3a5ec2376" +dependencies = [ + "webpki", +] + [[package]] name = "widestring" version = "0.4.3" diff --git a/Cargo.toml b/Cargo.toml index e35d9e5..dce6390 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,7 @@ edition = "2018" base64 = "0.13.0" actix-web = { version = "3.3.2", features = ["rustls"] } askama = "0.10.5" +ureq = "2.0.1" serde = { version = "1.0.118", default_features = false, features = ["derive"] } serde_json = "1.0" async-recursion = "0.3.1" diff --git a/src/utils.rs b/src/utils.rs index c6e957b..7154224 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -363,85 +363,85 @@ pub async fn request(path: &str) -> Result { let url = format!("https://www.reddit.com{}", path); // Send request using awc - async fn send(url: &str) -> Result { - let client = actix_web::client::Client::default(); - let response = client.get(url).header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"))).send().await; + // async fn send(url: &str) -> Result { + // let client = actix_web::client::Client::default(); + // let response = client.get(url).header("User-Agent", format!("web:libreddit:{}", env!("CARGO_PKG_VERSION"))).send().await; - match response { - Ok(mut payload) => { - // Get first number of response HTTP status code - match payload.status().to_string().chars().next() { - // If success - Some('2') => Ok(String::from_utf8(payload.body().limit(20_000_000).await.unwrap_or_default().to_vec()).unwrap_or_default()), - // If redirection - Some('3') => match payload.headers().get("location") { - Some(location) => Err((true, location.to_str().unwrap_or_default().to_string())), - None => Err((false, "Page not found".to_string())), - }, - // Otherwise - _ => Err((false, "Page not found".to_string())), - } - } - Err(e) => { dbg!(e); Err((false, "Couldn't send request to Reddit, this instance may be being rate-limited. Try another.".to_string())) }, - } - } - - // Print error if debugging then return error based on error message - fn err(url: String, msg: String) -> Result { - // #[cfg(debug_assertions)] - dbg!(format!("{} - {}", url, msg)); - Err(msg) - }; - - // Parse JSON from body. If parsing fails, return error - fn json(url: String, body: String) -> Result { - match from_str(body.as_str()) { - Ok(json) => Ok(json), - Err(_) => err(url, "Failed to parse page JSON data".to_string()), - } - } - - // Make request to Reddit using send function - match send(&url).await { - // If success, parse and return body - Ok(body) => json(url, body), - // Follow any redirects - Err((true, location)) => match send(location.as_str()).await { - // If success, parse and return body - Ok(body) => json(url, body), - // Follow any redirects again - Err((true, location)) => err(url, location), - // Return errors if request fails - Err((_, msg)) => err(url, msg), - }, - // Return errors if request fails - Err((_, msg)) => err(url, msg), - } - - // Send request using ureq - // match ureq::get(&url).call() { - // // If response is success - // Ok(response) => { - // // Parse the response from Reddit as JSON - // match from_str(&response.into_string().unwrap()) { - // Ok(json) => Ok(json), - // Err(_) => { - // #[cfg(debug_assertions)] - // dbg!(format!("{} - Failed to parse page JSON data", url)); - // Err("Failed to parse page JSON data".to_string()) + // match response { + // Ok(mut payload) => { + // // Get first number of response HTTP status code + // match payload.status().to_string().chars().next() { + // // If success + // Some('2') => Ok(String::from_utf8(payload.body().limit(20_000_000).await.unwrap_or_default().to_vec()).unwrap_or_default()), + // // If redirection + // Some('3') => match payload.headers().get("location") { + // Some(location) => Err((true, location.to_str().unwrap_or_default().to_string())), + // None => Err((false, "Page not found".to_string())), + // }, + // // Otherwise + // _ => Err((false, "Page not found".to_string())), // } // } - // } - // // If response is error - // Err(ureq::Error::Status(_, _)) => { - // #[cfg(debug_assertions)] - // dbg!(format!("{} - Page not found", url)); - // Err("Page not found".to_string()) - // } - // // If failed to send request - // Err(e) => { - // dbg!(format!("{} - {}", url, e)); - // Err("Couldn't send request to Reddit, this instance may be being rate-limited. Try another.".to_string()) + // Err(e) => { dbg!(e); Err((false, "Couldn't send request to Reddit, this instance may be being rate-limited. Try another.".to_string())) }, // } // } + + // // Print error if debugging then return error based on error message + // fn err(url: String, msg: String) -> Result { + // // #[cfg(debug_assertions)] + // dbg!(format!("{} - {}", url, msg)); + // Err(msg) + // }; + + // // Parse JSON from body. If parsing fails, return error + // fn json(url: String, body: String) -> Result { + // match from_str(body.as_str()) { + // Ok(json) => Ok(json), + // Err(_) => err(url, "Failed to parse page JSON data".to_string()), + // } + // } + + // // Make request to Reddit using send function + // match send(&url).await { + // // If success, parse and return body + // Ok(body) => json(url, body), + // // Follow any redirects + // Err((true, location)) => match send(location.as_str()).await { + // // If success, parse and return body + // Ok(body) => json(url, body), + // // Follow any redirects again + // Err((true, location)) => err(url, location), + // // Return errors if request fails + // Err((_, msg)) => err(url, msg), + // }, + // // Return errors if request fails + // Err((_, msg)) => err(url, msg), + // } + + // Send request using ureq + match ureq::get(&url).call() { + // If response is success + Ok(response) => { + // Parse the response from Reddit as JSON + match from_str(&response.into_string().unwrap()) { + Ok(json) => Ok(json), + Err(_) => { + #[cfg(debug_assertions)] + dbg!(format!("{} - Failed to parse page JSON data", url)); + Err("Failed to parse page JSON data".to_string()) + } + } + } + // If response is error + Err(ureq::Error::Status(_, _)) => { + #[cfg(debug_assertions)] + dbg!(format!("{} - Page not found", url)); + Err("Page not found".to_string()) + } + // If failed to send request + Err(e) => { + dbg!(format!("{} - {}", url, e)); + Err("Couldn't send request to Reddit, this instance may be being rate-limited. Try another.".to_string()) + } + } }