From b33d79ed9b9fac9ac2e65f1452ce44a7010c205d Mon Sep 17 00:00:00 2001 From: spikecodes <19519553+spikecodes@users.noreply.github.com> Date: Wed, 6 Jan 2021 14:19:10 -0800 Subject: [PATCH] Cache robots.txt --- src/main.rs | 4 +++- src/proxy.rs | 19 +++++++------------ 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/src/main.rs b/src/main.rs index 46eafe2..f029388 100644 --- a/src/main.rs +++ b/src/main.rs @@ -16,7 +16,9 @@ async fn style() -> HttpResponse { } async fn robots() -> HttpResponse { - HttpResponse::Ok().body(include_str!("../static/robots.txt")) + HttpResponse::Ok() + .header("Cache-Control", "public, max-age=1209600, s-maxage=86400") + .body(include_str!("../static/robots.txt")) } #[get("/favicon.ico")] diff --git a/src/proxy.rs b/src/proxy.rs index c9502b0..0f9dc36 100644 --- a/src/proxy.rs +++ b/src/proxy.rs @@ -28,18 +28,13 @@ pub async fn handler(web::Path(b64): web::Path) -> Result let domain = url.domain().unwrap_or_default(); if domains.contains(&domain) { - Client::default() - .get(media.replace("&", "&")) - .send() - .await - .map_err(Error::from) - .map(|res| - HttpResponse::build(res.status()) - .header("Cache-Control", "public, max-age=1209600, s-maxage=86400, must-revalidate") - .header("Content-Length", res.headers().get("Content-Length").unwrap().to_owned()) - .header("Content-Type", res.headers().get("Content-Type").unwrap().to_owned()) - .streaming(res) - ) + Client::default().get(media.replace("&", "&")).send().await.map_err(Error::from).map(|res| { + HttpResponse::build(res.status()) + .header("Cache-Control", "public, max-age=1209600, s-maxage=86400") + .header("Content-Length", res.headers().get("Content-Length").unwrap().to_owned()) + .header("Content-Type", res.headers().get("Content-Type").unwrap().to_owned()) + .streaming(res) + }) } else { Err(error::ErrorForbidden("Resource must be from Reddit")) }