diff --git a/Cargo.lock b/Cargo.lock index bc34ea7..9105b29 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1147,7 +1147,6 @@ dependencies = [ "askama", "async-recursion", "async-std", - "base64 0.13.0", "cached", "clap", "regex", diff --git a/Cargo.toml b/Cargo.toml index f6c62fb..91433ba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,7 +11,6 @@ edition = "2018" tide = { version = "0.16.0", default-features = false, features = ["h1-server", "cookies"] } async-std = { version = "1.9.0", features = ["attributes"] } surf = { version = "2.1.0", default-features = false, features = ["curl-client", "encoding"] } -base64 = "0.13.0" cached = "0.23.0" askama = { version = "0.10.5", default-features = false, features = ["config"] } serde = { version = "1.0.123", features = ["derive"] } diff --git a/src/main.rs b/src/main.rs index 53ae973..6ba2654 100644 --- a/src/main.rs +++ b/src/main.rs @@ -9,6 +9,7 @@ mod utils; // Import Crates use clap::{App, Arg}; +use proxy::handler; use tide::{ utils::{async_trait, After}, Middleware, Next, Request, Response, @@ -171,13 +172,27 @@ async fn main() -> tide::Result<()> { app.at("/apple-touch-icon.png/").get(iphone_logo); // Proxy media through Libreddit - app.at("/proxy/*url/").get(proxy::handler); - app.at("/vid/:id/:size/").get(proxy::video); - app.at("/img/:id/").get(proxy::image); - app.at("/thumb/:point/:id/").get(proxy::thumbnail); - app.at("/emoji/:id/:name/").get(proxy::emoji); - app.at("/preview/:location/:id/:query/").get(proxy::preview); - app.at("/style/*path/").get(proxy::style); + app + .at("/vid/:id/:size/") /* */ + .get(|req| handler(req, "https://v.redd.it/{}/DASH_{}", vec!["id", "size"])); + app + .at("/img/:id/") /* */ + .get(|req| handler(req, "https://i.redd.it/{}", vec!["id"])); + app + .at("/thumb/:point/:id/") /* */ + .get(|req| handler(req, "https://{}.thumbs.redditmedia.com/{}", vec!["point", "id"])); + app + .at("/emoji/:id/:name/") /* */ + .get(|req| handler(req, "https://emoji.redditmedia.com/{}/{}", vec!["id", "name"])); + app + .at("/preview/:loc/:id/:query/") + .get(|req| handler(req, "https://{}preview.redd.it/{}?{}", vec!["loc", "id", "query"])); + app + .at("/style/*path/") /* */ + .get(|req| handler(req, "https://styles.redditmedia.com/{}", vec!["path"])); + app + .at("/static/*path/") /* */ + .get(|req| handler(req, "https://www.redditstatic.com/{}", vec!["path"])); // Browse user profile app.at("/u/:name/").get(user::profile); diff --git a/src/proxy.rs b/src/proxy.rs index 14f8c9f..5da6ddf 100644 --- a/src/proxy.rs +++ b/src/proxy.rs @@ -1,72 +1,14 @@ -use base64::decode; -use surf::{Body, Url}; +use surf::Body; use tide::{Request, Response}; -pub async fn handler(req: Request<()>) -> tide::Result { - let domains = vec![ - // ICONS - "styles.redditmedia.com", - "www.redditstatic.com", - ]; +pub async fn handler(req: Request<()>, format: &str, params: Vec<&str>) -> tide::Result { + let mut url = format.to_string(); - let decoded = decode(req.param("url").unwrap_or_default()).map(|bytes| String::from_utf8(bytes).unwrap_or_default()); - - match decoded { - Ok(media) => match Url::parse(media.as_str()) { - Ok(url) => { - if domains.contains(&url.domain().unwrap_or_default()) { - request(url.to_string()).await - } else { - Err(tide::Error::from_str(403, "Resource must be from Reddit")) - } - } - Err(_) => Err(tide::Error::from_str(400, "Can't parse base64 into URL")), - }, - Err(_) => Err(tide::Error::from_str(400, "Can't decode base64")), + for name in params { + let param = req.param(name).unwrap_or_default(); + url = url.replacen("{}", param, 1); } -} -pub async fn video(req: Request<()>) -> tide::Result { - let id = req.param("id").unwrap_or_default(); - let size = req.param("size").unwrap_or("720.mp4"); - let url = format!("https://v.redd.it/{}/DASH_{}", id, size); - request(url).await -} - -pub async fn image(req: Request<()>) -> tide::Result { - let id = req.param("id").unwrap_or_default(); - let url = format!("https://i.redd.it/{}", id); - request(url).await -} - -pub async fn thumbnail(req: Request<()>) -> tide::Result { - let id = req.param("id").unwrap_or_default(); - let point = req.param("point").unwrap_or_default(); - let url = format!("https://{}.thumbs.redditmedia.com/{}", point, id); - request(url).await -} - -pub async fn emoji(req: Request<()>) -> tide::Result { - let id = req.param("id").unwrap_or_default(); - let name = req.param("name").unwrap_or_default(); - let url = format!("https://emoji.redditmedia.com/{}/{}", id, name); - request(url).await -} - -pub async fn preview(req: Request<()>) -> tide::Result { - let id = req.param("id").unwrap_or_default(); - let query = req.param("query").unwrap_or_default(); - let prefix = match req.param("location").unwrap_or_default() { - "ext" => "external-", - _ => "" - }; - let url = format!("https://{}preview.redd.it/{}?{}", prefix, id, query); - request(url).await -} - -pub async fn style(req: Request<()>) -> tide::Result { - let path = req.param("path").unwrap_or_default(); - let url = format!("https://styles.redditmedia.com/{}", path); request(url).await } diff --git a/src/utils.rs b/src/utils.rs index bfb9e1e..8b58b34 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -2,7 +2,6 @@ // CRATES // use askama::Template; -use base64::encode; use cached::proc_macro::cached; use regex::Regex; use serde_json::{from_str, Value}; @@ -186,32 +185,37 @@ pub fn format_url(url: &str) -> String { if url.is_empty() || url == "self" || url == "default" || url == "nsfw" || url == "spoiler" { String::new() } else { - let parsed = Url::parse(url).unwrap(); - let domain = parsed.domain().unwrap_or_default(); + match Url::parse(url) { + Ok(parsed) => { + let domain = parsed.domain().unwrap_or_default(); - let capture = |regex: &str, format: &str, levels: i16| { - Regex::new(regex) - .map(|re| match re.captures(url) { - Some(caps) => match levels { - 1 => [format, &caps[1], "/"].join(""), - 2 => [format, &caps[1], "/", &caps[2], "/"].join(""), - _ => String::new(), - }, - None => String::new(), - }) - .unwrap_or_default() - }; + let capture = |regex: &str, format: &str, levels: i16| { + Regex::new(regex) + .map(|re| match re.captures(url) { + Some(caps) => match levels { + 1 => [format, &caps[1], "/"].join(""), + 2 => [format, &caps[1], "/", &caps[2], "/"].join(""), + _ => String::new(), + }, + None => String::new(), + }) + .unwrap_or_default() + }; - match domain { - "v.redd.it" => capture(r"https://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$))", "/vid/", 2), - "i.redd.it" => capture(r"https://i\.redd\.it/(.*)", "/img/", 1), - "a.thumbs.redditmedia.com" => capture(r"https://a\.thumbs\.redditmedia\.com/(.*)", "/thumb/a/", 1), - "b.thumbs.redditmedia.com" => capture(r"https://b\.thumbs\.redditmedia\.com/(.*)", "/thumb/b/", 1), - "emoji.redditmedia.com" => capture(r"https://emoji\.redditmedia\.com/(.*)/(.*)", "/emoji/", 2), - "preview.redd.it" => capture(r"https://preview\.redd\.it/(.*)\?(.*)", "/preview/int/", 2), - "external-preview.redd.it" => capture(r"https://external\-preview\.redd\.it/(.*)\?(.*)", "/preview/ext/", 2), - // "styles.redditmedia.com" => capture(r"https://styles\.redditmedia\.com/(.*)", "/style/", 1), - _ => format!("/proxy/{}/", encode(url).as_str()), + match domain { + "v.redd.it" => capture(r"https://v\.redd\.it/(.*)/DASH_([0-9]{2,4}(\.mp4|$))", "/vid/", 2), + "i.redd.it" => capture(r"https://i\.redd\.it/(.*)", "/img/", 1), + "a.thumbs.redditmedia.com" => capture(r"https://a\.thumbs\.redditmedia\.com/(.*)", "/thumb/a/", 1), + "b.thumbs.redditmedia.com" => capture(r"https://b\.thumbs\.redditmedia\.com/(.*)", "/thumb/b/", 1), + "emoji.redditmedia.com" => capture(r"https://emoji\.redditmedia\.com/(.*)/(.*)", "/emoji/", 2), + "preview.redd.it" => capture(r"https://preview\.redd\.it/(.*)\?(.*)", "/preview//", 2), + "external-preview.redd.it" => capture(r"https://external\-preview\.redd\.it/(.*)\?(.*)", "/preview/external-/", 2), + "styles.redditmedia.com" => capture(r"https://styles\.redditmedia\.com/(.*)", "/style/", 1), + "www.redditstatic.com" => capture(r"https://www\.redditstatic\.com/(.*)", "/static/", 1), + _ => String::new(), + } + } + Err(_) => String::new(), } } } @@ -510,20 +514,25 @@ pub async fn request(path: String) -> Result { let res = client.send(req).await; - let body = res.unwrap().take_body().into_string().await; - - match body { - // If response is success - Ok(response) => { - // Parse the response from Reddit as JSON - match from_str(&response) { - Ok(json) => Ok(json), - Err(e) => { - println!("{} - Failed to parse page JSON data: {}", url, e); - Err("Failed to parse page JSON data".to_string()) + match res { + Ok(mut response) => match response.take_body().into_string().await { + // If response is success + Ok(body) => { + // Parse the response from Reddit as JSON + match from_str(&body) { + Ok(json) => Ok(json), + Err(e) => { + println!("{} - Failed to parse page JSON data: {}", url, e); + Err("Failed to parse page JSON data".to_string()) + } } } - } + // Failed to parse body + Err(e) => { + println!("{} - Couldn't parse request body: {}", url, e); + Err("Couldn't parse request body".to_string()) + } + }, // If failed to send request Err(e) => { println!("{} - Couldn't send request to Reddit: {}", url, e);