Cache robots.txt

This commit is contained in:
spikecodes 2021-01-06 14:19:10 -08:00
parent 0f506fc41b
commit b33d79ed9b
2 changed files with 10 additions and 13 deletions

View File

@ -16,7 +16,9 @@ async fn style() -> HttpResponse {
}
async fn robots() -> HttpResponse {
HttpResponse::Ok().body(include_str!("../static/robots.txt"))
HttpResponse::Ok()
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
.body(include_str!("../static/robots.txt"))
}
#[get("/favicon.ico")]

View File

@ -28,18 +28,13 @@ pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse>
let domain = url.domain().unwrap_or_default();
if domains.contains(&domain) {
Client::default()
.get(media.replace("&amp;", "&"))
.send()
.await
.map_err(Error::from)
.map(|res|
HttpResponse::build(res.status())
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400, must-revalidate")
.header("Content-Length", res.headers().get("Content-Length").unwrap().to_owned())
.header("Content-Type", res.headers().get("Content-Type").unwrap().to_owned())
.streaming(res)
)
Client::default().get(media.replace("&amp;", "&")).send().await.map_err(Error::from).map(|res| {
HttpResponse::build(res.status())
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
.header("Content-Length", res.headers().get("Content-Length").unwrap().to_owned())
.header("Content-Type", res.headers().get("Content-Type").unwrap().to_owned())
.streaming(res)
})
} else {
Err(error::ErrorForbidden("Resource must be from Reddit"))
}