libreddit/src/proxy.rs

47 lines
1.1 KiB
Rust
Raw Normal View History

2021-01-03 05:50:23 +01:00
use actix_web::{client::Client, error, web, Error, HttpResponse, Result};
use url::Url;
2020-11-30 03:50:29 +01:00
use base64::decode;
2020-11-23 05:22:51 +01:00
2021-01-03 05:50:23 +01:00
pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse> {
let domains = vec![
2021-01-03 19:22:41 +01:00
// THUMBNAILS
2021-01-03 05:50:23 +01:00
"a.thumbs.redditmedia.com",
"b.thumbs.redditmedia.com",
2021-01-03 19:22:41 +01:00
// ICONS
"styles.redditmedia.com",
2021-01-04 03:23:57 +01:00
"www.redditstatic.com",
2021-01-03 19:22:41 +01:00
// PREVIEWS
2021-01-03 05:50:23 +01:00
"preview.redd.it",
"external-preview.redd.it",
2021-01-03 19:22:41 +01:00
// MEDIA
2021-01-03 05:50:23 +01:00
"i.redd.it",
"v.redd.it",
];
2020-11-30 03:50:29 +01:00
2021-01-03 05:50:23 +01:00
match decode(b64) {
Ok(bytes) => {
2021-01-04 06:31:21 +01:00
let media = String::from_utf8(bytes).unwrap_or_default();
2021-01-03 05:50:23 +01:00
match Url::parse(media.as_str()) {
Ok(url) => {
let domain = url.domain().unwrap_or_default();
2020-11-30 03:50:29 +01:00
2021-01-03 05:50:23 +01:00
if domains.contains(&domain) {
Client::default()
.get(media.replace("&amp;", "&"))
.send()
.await
.map_err(Error::from)
.map(|res| HttpResponse::build(res.status()).streaming(res))
} else {
Err(error::ErrorForbidden("Resource must be from Reddit"))
}
}
Err(_) => Err(error::ErrorBadRequest("Can't parse encoded base64 URL")),
}
}
Err(_) => Err(error::ErrorBadRequest("Can't decode base64 URL")),
2020-11-24 01:57:37 +01:00
}
2020-11-30 03:50:29 +01:00
}