Cache robots.txt

This commit is contained in:
spikecodes 2021-01-06 14:19:10 -08:00
parent 0f506fc41b
commit b33d79ed9b
2 changed files with 10 additions and 13 deletions

View File

@ -16,7 +16,9 @@ async fn style() -> HttpResponse {
} }
async fn robots() -> HttpResponse { async fn robots() -> HttpResponse {
HttpResponse::Ok().body(include_str!("../static/robots.txt")) HttpResponse::Ok()
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
.body(include_str!("../static/robots.txt"))
} }
#[get("/favicon.ico")] #[get("/favicon.ico")]

View File

@ -28,18 +28,13 @@ pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse>
let domain = url.domain().unwrap_or_default(); let domain = url.domain().unwrap_or_default();
if domains.contains(&domain) { if domains.contains(&domain) {
Client::default() Client::default().get(media.replace("&amp;", "&")).send().await.map_err(Error::from).map(|res| {
.get(media.replace("&amp;", "&")) HttpResponse::build(res.status())
.send() .header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
.await .header("Content-Length", res.headers().get("Content-Length").unwrap().to_owned())
.map_err(Error::from) .header("Content-Type", res.headers().get("Content-Type").unwrap().to_owned())
.map(|res| .streaming(res)
HttpResponse::build(res.status()) })
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400, must-revalidate")
.header("Content-Length", res.headers().get("Content-Length").unwrap().to_owned())
.header("Content-Type", res.headers().get("Content-Type").unwrap().to_owned())
.streaming(res)
)
} else { } else {
Err(error::ErrorForbidden("Resource must be from Reddit")) Err(error::ErrorForbidden("Resource must be from Reddit"))
} }