From f33af752678e82b5f4b185d2f7b97fcf003ed19b Mon Sep 17 00:00:00 2001 From: spikecodes <19519553+spikecodes@users.noreply.github.com> Date: Mon, 30 Nov 2020 21:10:08 -0800 Subject: [PATCH] Proxy Thumbnails --- Cargo.toml | 2 +- src/post.rs | 13 +------------ src/proxy.rs | 2 -- src/subreddit.rs | 4 ++-- src/utils.rs | 21 ++++++++++++++++++--- 5 files changed, 22 insertions(+), 20 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 1532fd6..3b28464 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ name = "libreddit" description = " Alternative private front-end to Reddit" license = "AGPL-3.0" repository = "https://github.com/spikecodes/libreddit" -version = "0.1.8" +version = "0.1.9" authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"] edition = "2018" diff --git a/src/post.rs b/src/post.rs index 7ce9ccb..8af93cb 100644 --- a/src/post.rs +++ b/src/post.rs @@ -1,13 +1,10 @@ // CRATES -use crate::utils::{request, val, Comment, ErrorTemplate, Flair, Params, Post}; +use crate::utils::{format_url, request, val, Comment, ErrorTemplate, Flair, Params, Post}; use actix_web::{get, http::StatusCode, web, HttpResponse, Result}; use askama::Template; use chrono::{TimeZone, Utc}; use pulldown_cmark::{html, Options, Parser}; -#[cfg(feature = "proxy")] -use base64::encode; - // STRUCTS #[derive(Template)] #[template(path = "post.html", escape = "none")] @@ -69,14 +66,6 @@ async fn page(web::Path((_sub, id)): web::Path<(String, String)>, params: web::Q } } -async fn format_url(url: &str) -> String { - #[cfg(feature = "proxy")] - return "/imageproxy/".to_string() + encode(url).as_str(); - - #[cfg(not(feature = "proxy"))] - return url.to_string(); -} - // UTILITIES async fn media(data: &serde_json::Value) -> (String, String) { let post_type: &str; diff --git a/src/proxy.rs b/src/proxy.rs index 4047454..a2341d7 100644 --- a/src/proxy.rs +++ b/src/proxy.rs @@ -17,8 +17,6 @@ async fn handler(web::Path(url): web::Path) -> Result { Err(_e) => return Ok(HttpResponse::Ok().body("")), }; - dbg!(&media); - let client = Client::default(); client .get(media.replace("&", "&")) diff --git a/src/subreddit.rs b/src/subreddit.rs index c758433..10de48c 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -1,5 +1,5 @@ // CRATES -use crate::utils::{fetch_posts, request, val, ErrorTemplate, Params, Post, Subreddit}; +use crate::utils::{fetch_posts, format_url, request, val, ErrorTemplate, Params, Post, Subreddit}; use actix_web::{get, http::StatusCode, web, HttpResponse, Result}; use askama::Template; @@ -88,7 +88,7 @@ async fn subreddit(sub: &String) -> Result { name: val(&res, "display_name").await, title: val(&res, "title").await, description: val(&res, "public_description").await, - icon: val(&res, "icon_img").await, + icon: format_url(val(&res, "icon_img").await.as_str()).await, members: if members > 1000 { format!("{}k", members / 1000) } else { members.to_string() }, active: if active > 1000 { format!("{}k", active / 1000) } else { active.to_string() }, }; diff --git a/src/utils.rs b/src/utils.rs index 9a1a0ef..066d02c 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -5,6 +5,9 @@ use chrono::{TimeZone, Utc}; use serde_json::{from_str, Value}; // use surf::{client, get, middleware::Redirect}; +#[cfg(feature = "proxy")] +use base64::encode; + // // STRUCTS // @@ -72,6 +75,18 @@ pub struct ErrorTemplate { pub message: String, } +// +// URL HANDLING +// + +pub async fn format_url(url: &str) -> String { + #[cfg(feature = "proxy")] + return "/imageproxy/".to_string() + encode(url).as_str(); + + #[cfg(not(feature = "proxy"))] + return url.to_string(); +} + // // JSON PARSING // @@ -108,7 +123,7 @@ pub async fn fetch_posts(url: String, fallback_title: String) -> Result<(Vec Result { // let mut res = client.send(req).await.unwrap(); // let success = res.status().is_success(); // let body = res.body_string().await.unwrap(); - + // --- reqwest --- let res = reqwest::get(&url).await.unwrap(); // Read the status from the response let success = res.status().is_success(); // Read the body of the response let body = res.text().await.unwrap(); - + dbg!(url.clone()); // Parse the response from Reddit as JSON