diff --git a/Cargo.toml b/Cargo.toml index c5bec1e..d4f15df 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,13 +7,9 @@ version = "0.2.5" authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"] edition = "2018" -[features] -default = ["proxy"] -proxy = ["actix-web/rustls", "base64"] - [dependencies] -base64 = { version = "0.13.0", optional = true } -actix-web = "3.2.0" +base64 = "0.13.0" +actix-web = { version = "3.2.0", features = ["rustls"] } reqwest = { version = "0.10", default_features = false, features = ["rustls-tls"] } askama = "0.8.0" serde = "1.0.117" diff --git a/src/main.rs b/src/main.rs index b76d5c4..45a06fe 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,6 +5,7 @@ use actix_web::{get, middleware::NormalizePath, web, App, HttpResponse, HttpServ mod post; mod proxy; mod search; +// mod settings; mod subreddit; mod user; mod utils; @@ -50,6 +51,9 @@ async fn main() -> std::io::Result<()> { .route("/style.css/", web::get().to(style)) .route("/favicon.ico/", web::get().to(HttpResponse::Ok)) .route("/robots.txt/", web::get().to(robots)) + // SETTINGS SERVICE + // .route("/settings/", web::get().to(settings::get)) + // .route("/settings/save/", web::post().to(settings::set)) // PROXY SERVICE .route("/proxy/{url:.*}/", web::get().to(proxy::handler)) // SEARCH SERVICES diff --git a/src/post.rs b/src/post.rs index e3f35d5..92d92fd 100644 --- a/src/post.rs +++ b/src/post.rs @@ -16,7 +16,7 @@ struct PostTemplate { sort: String, } -pub async fn item(req: HttpRequest) -> Result { +pub async fn item(req: HttpRequest) -> HttpResponse { let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string()); let sort = param(&path, "sort"); let id = req.match_info().get("id").unwrap_or("").to_string(); @@ -35,7 +35,7 @@ pub async fn item(req: HttpRequest) -> Result { // Use the Post and Comment structs to generate a website to show users let s = PostTemplate { comments, post, sort }.render().unwrap(); - Ok(HttpResponse::Ok().content_type("text/html").body(s)) + HttpResponse::Ok().content_type("text/html").body(s) } // If the Reddit API returns an error, exit and send error page to user Err(msg) => error(msg.to_string()).await, diff --git a/src/proxy.rs b/src/proxy.rs index 71a1a5a..a1429b7 100644 --- a/src/proxy.rs +++ b/src/proxy.rs @@ -1,30 +1,40 @@ -use actix_web::{client::Client, web, Error, HttpResponse, Result}; +use actix_web::{client::Client, error, web, Error, HttpResponse, Result}; +use url::Url; -#[cfg(feature = "proxy")] use base64::decode; -pub async fn handler(web::Path(url): web::Path) -> Result { - if cfg!(feature = "proxy") { - #[cfg(feature = "proxy")] - let media: String; +pub async fn handler(web::Path(b64): web::Path) -> Result { + let domains = vec![ + "a.thumbs.redditmedia.com", + "b.thumbs.redditmedia.com", + "preview.redd.it", + "external-preview.redd.it", + "i.redd.it", + "v.redd.it", + ]; - #[cfg(not(feature = "proxy"))] - let media = url; + match decode(b64) { + Ok(bytes) => { + let media = String::from_utf8(bytes).unwrap(); - #[cfg(feature = "proxy")] - match decode(url) { - Ok(bytes) => media = String::from_utf8(bytes).unwrap(), - Err(_e) => return Ok(HttpResponse::Ok().body("")), - }; + match Url::parse(media.as_str()) { + Ok(url) => { + let domain = url.domain().unwrap_or_default(); - let client = Client::default(); - client - .get(media.replace("&", "&")) - .send() - .await - .map_err(Error::from) - .map(|res| HttpResponse::build(res.status()).streaming(res)) - } else { - Ok(HttpResponse::Ok().body("")) + if domains.contains(&domain) { + Client::default() + .get(media.replace("&", "&")) + .send() + .await + .map_err(Error::from) + .map(|res| HttpResponse::build(res.status()).streaming(res)) + } else { + Err(error::ErrorForbidden("Resource must be from Reddit")) + } + } + Err(_) => Err(error::ErrorBadRequest("Can't parse encoded base64 URL")), + } + } + Err(_) => Err(error::ErrorBadRequest("Can't decode base64 URL")), } } diff --git a/src/search.rs b/src/search.rs index 34d8d0a..5ea99e3 100644 --- a/src/search.rs +++ b/src/search.rs @@ -1,11 +1,10 @@ // CRATES use crate::utils::{error, fetch_posts, param, Post}; -use actix_web::{HttpRequest, HttpResponse, Result}; +use actix_web::{HttpRequest, HttpResponse}; use askama::Template; // STRUCTS #[derive(Template)] -#[allow(dead_code)] #[template(path = "search.html", escape = "none")] struct SearchTemplate { posts: Vec, @@ -16,7 +15,7 @@ struct SearchTemplate { } // SERVICES -pub async fn find(req: HttpRequest) -> Result { +pub async fn find(req: HttpRequest) -> HttpResponse { let path = format!("{}.json?{}", req.path(), req.query_string()); let q = param(&path, "q"); let sort = if param(&path, "sort").is_empty() { @@ -27,8 +26,8 @@ pub async fn find(req: HttpRequest) -> Result { let sub = req.match_info().get("sub").unwrap_or("").to_string(); match fetch_posts(&path, String::new()).await { - Ok(posts) => { - let s = SearchTemplate { + Ok(posts) => HttpResponse::Ok().content_type("text/html").body( + SearchTemplate { posts: posts.0, query: q, sub, @@ -36,9 +35,8 @@ pub async fn find(req: HttpRequest) -> Result { ends: (param(&path, "after"), posts.1), } .render() - .unwrap(); - Ok(HttpResponse::Ok().content_type("text/html").body(s)) - } + .unwrap(), + ), Err(msg) => error(msg.to_string()).await, } } diff --git a/src/settings.rs b/src/settings.rs new file mode 100644 index 0000000..5fc903f --- /dev/null +++ b/src/settings.rs @@ -0,0 +1,48 @@ +// // CRATES +// use crate::utils::cookies; +// use actix_web::{cookie::Cookie, web::Form, HttpRequest, HttpResponse, Result}; // http::Method, +// use askama::Template; + +// // STRUCTS +// #[derive(Template)] +// #[template(path = "settings.html", escape = "none")] +// struct SettingsTemplate { +// pref_nsfw: String, +// } + +// #[derive(serde::Deserialize)] +// pub struct Preferences { +// pref_nsfw: Option, +// } + +// // FUNCTIONS + +// // Retrieve cookies from request "Cookie" header +// pub async fn get(req: HttpRequest) -> Result { +// let cookies = cookies(req); + +// let pref_nsfw: String = cookies.get("pref_nsfw").unwrap_or(&String::new()).to_owned(); + +// let s = SettingsTemplate { pref_nsfw }.render().unwrap(); +// Ok(HttpResponse::Ok().content_type("text/html").body(s)) +// } + +// // Set cookies using response "Set-Cookie" header +// pub async fn set(form: Form) -> HttpResponse { +// let nsfw: Cookie = match &form.pref_nsfw { +// Some(value) => Cookie::build("pref_nsfw", value).path("/").secure(true).http_only(true).finish(), +// None => Cookie::build("pref_nsfw", "").finish(), +// }; + +// let body = SettingsTemplate { +// pref_nsfw: form.pref_nsfw.clone().unwrap_or_default(), +// } +// .render() +// .unwrap(); + +// HttpResponse::Found() +// .content_type("text/html") +// .set_header("Set-Cookie", nsfw.to_string()) +// .set_header("Location", "/settings") +// .body(body) +// } diff --git a/src/subreddit.rs b/src/subreddit.rs index 6e77e45..8ce317a 100644 --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -22,7 +22,7 @@ struct WikiTemplate { } // SERVICES -pub async fn page(req: HttpRequest) -> Result { +pub async fn page(req: HttpRequest) -> HttpResponse { let path = format!("{}.json?{}", req.path(), req.query_string()); let sub = req.match_info().get("sub").unwrap_or("popular").to_string(); let sort = req.match_info().get("sort").unwrap_or("hot").to_string(); @@ -43,13 +43,13 @@ pub async fn page(req: HttpRequest) -> Result { } .render() .unwrap(); - Ok(HttpResponse::Ok().content_type("text/html").body(s)) + HttpResponse::Ok().content_type("text/html").body(s) } Err(msg) => error(msg.to_string()).await, } } -pub async fn wiki(req: HttpRequest) -> Result { +pub async fn wiki(req: HttpRequest) -> HttpResponse { let sub = req.match_info().get("sub").unwrap_or("reddit.com"); let page = req.match_info().get("page").unwrap_or("index"); let path: String = format!("r/{}/wiki/{}.json?raw_json=1", sub, page); @@ -63,7 +63,7 @@ pub async fn wiki(req: HttpRequest) -> Result { } .render() .unwrap(); - Ok(HttpResponse::Ok().content_type("text/html").body(s)) + HttpResponse::Ok().content_type("text/html").body(s) } Err(msg) => error(msg.to_string()).await, } diff --git a/src/user.rs b/src/user.rs index 5183c8a..eec3f00 100644 --- a/src/user.rs +++ b/src/user.rs @@ -14,7 +14,8 @@ struct UserTemplate { ends: (String, String), } -pub async fn profile(req: HttpRequest) -> Result { +// FUNCTIONS +pub async fn profile(req: HttpRequest) -> HttpResponse { // Build the Reddit JSON API path let path = format!("{}.json?{}&raw_json=1", req.path(), req.query_string()); @@ -36,7 +37,7 @@ pub async fn profile(req: HttpRequest) -> Result { } .render() .unwrap(); - Ok(HttpResponse::Ok().content_type("text/html").body(s)) + HttpResponse::Ok().content_type("text/html").body(s) } // If there is an error show error page Err(msg) => error(msg.to_string()).await, diff --git a/src/utils.rs b/src/utils.rs index 6121bf2..e0d601e 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,17 +1,17 @@ +// use std::collections::HashMap; + // // CRATES // -use actix_web::{http::StatusCode, HttpResponse, Result}; +use actix_web::{HttpResponse, Result}; use askama::Template; +use base64::encode; use chrono::{TimeZone, Utc}; use regex::Regex; use serde_json::from_str; use url::Url; // use surf::{client, get, middleware::Redirect}; -#[cfg(feature = "proxy")] -use base64::encode; - // // STRUCTS // @@ -102,17 +102,31 @@ pub fn param(path: &str, value: &str) -> String { pairs.get(value).unwrap_or(&String::new()).to_owned() } +// Cookies from request +// pub fn cookies(req: HttpRequest) -> HashMap { +// let mut result: HashMap = HashMap::new(); + +// let cookies: Vec = req +// .headers() +// .get_all("Cookie") +// .map(|value| value.to_str().unwrap()) +// .map(|unparsed| Cookie::parse(unparsed).unwrap()) +// .collect(); + +// for cookie in cookies { +// result.insert(cookie.name().to_string(), cookie.value().to_string()); +// } + +// result +// } + // Direct urls to proxy if proxy is enabled pub fn format_url(url: String) -> String { if url.is_empty() { return String::new(); }; - #[cfg(feature = "proxy")] - return "/proxy/".to_string() + encode(url).as_str(); - - #[cfg(not(feature = "proxy"))] - return url.to_string(); + format!("/proxy/{}", encode(url).as_str()) } // Rewrite Reddit links to Libreddit in body of text @@ -217,10 +231,10 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec Result { +pub async fn error(message: String) -> HttpResponse { let msg = if message.is_empty() { "Page not found".to_string() } else { message }; let body = ErrorTemplate { message: msg }.render().unwrap_or_default(); - Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(body)) + HttpResponse::NotFound().content_type("text/html").body(body) } // Make a request to a Reddit API and parse the JSON response diff --git a/static/style.css b/static/style.css index 7782761..af50149 100644 --- a/static/style.css +++ b/static/style.css @@ -599,8 +599,15 @@ td, th { } @media screen and (max-width: 800px) { - main { flex-direction: column-reverse; } - nav { flex-direction: column; } + main { + flex-direction: column-reverse; + padding: 10px; + margin: 10px 0; + } + nav { + flex-direction: column; + margin: 10px; + } aside, #subreddit, #user { margin: 0; diff --git a/templates/base.html b/templates/base.html index ae37b97..670a9d1 100644 --- a/templates/base.html +++ b/templates/base.html @@ -4,8 +4,7 @@ {% block head %} {% block title %}Libreddit{% endblock %} - + diff --git a/templates/search.html b/templates/search.html index 18e8cd5..1ec5374 100644 --- a/templates/search.html +++ b/templates/search.html @@ -9,7 +9,7 @@ {% if sub != "" %}
- +
{% endif %} diff --git a/templates/settings.html b/templates/settings.html new file mode 100644 index 0000000..cf69691 --- /dev/null +++ b/templates/settings.html @@ -0,0 +1,18 @@ +{% extends "base.html" %} +{% import "utils.html" as utils %} + +{% block title %}Libreddit Settings{% endblock %} + +{% block search %} + {% call utils::search("".to_owned(), "", "") %} +{% endblock %} + +{% block body %} +
+
+ + + +
+
+{% endblock %} \ No newline at end of file diff --git a/templates/utils.html b/templates/utils.html index f3b2f51..b7e1f30 100644 --- a/templates/utils.html +++ b/templates/utils.html @@ -19,7 +19,7 @@ {% if root != "/r/" && !root.is_empty() %}
- +
{% endif %}