diff --git a/src/proxy.rs b/src/proxy.rs index 0a62a0f..2f0e3fe 100644 --- a/src/proxy.rs +++ b/src/proxy.rs @@ -2,6 +2,7 @@ use async_std::{io, net::TcpStream, prelude::*}; use async_tls::TlsConnector; use tide::{http::url::Url, Request, Response}; +/// Handle tide routes to proxy by parsing `params` from `req`uest. pub async fn handler(req: Request<()>, format: &str, params: Vec<&str>) -> tide::Result { let mut url = format.to_string(); @@ -13,6 +14,9 @@ pub async fn handler(req: Request<()>, format: &str, params: Vec<&str>) -> tide: request(url).await } +/// Sends a request to a Reddit media domain and proxy the response. +/// +/// Relays the `Content-Length` and `Content-Type` header. async fn request(url: String) -> tide::Result { // Parse url into parts let parts = Url::parse(&url).unwrap(); @@ -65,9 +69,11 @@ async fn request(url: String) -> tide::Result { .unwrap_or_default() }; + // Parse Content-Length and Content-Type from headers let content_length = header("Content-Length"); let content_type = header("Content-Type"); + // Build response Ok( Response::builder(status) .body(tide::http::Body::from_bytes(body)) diff --git a/src/utils.rs b/src/utils.rs index 1b01305..b8cc495 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -411,10 +411,10 @@ pub fn format_url(url: &str) -> String { Ok(parsed) => { let domain = parsed.domain().unwrap_or_default(); - let capture = |regex: &str, format: &str, levels: i16| { + let capture = |regex: &str, format: &str, segments: i16| { Regex::new(regex) .map(|re| match re.captures(url) { - Some(caps) => match levels { + Some(caps) => match segments { 1 => [format, &caps[1], "/"].join(""), 2 => [format, &caps[1], "/", &caps[2], "/"].join(""), _ => String::new(),