diff options
author | Jesse Morgan <jesse@jesterpm.net> | 2020-09-18 09:22:03 -0700 |
---|---|---|
committer | Jesse Morgan <jesse@jesterpm.net> | 2020-09-18 09:22:03 -0700 |
commit | 89734a74faed3c330bed569ac36b6480da8ed8d9 (patch) | |
tree | 278b74f181e2866a439720f69c9d9b5699276c03 /src | |
parent | 001d2d3dccafea7b368cd6545a5a0b000a84ee76 (diff) |
First pass at a photo resizing endpoint.
This is currently fetching the images from the media_url, since I ripped
it out of another project. I still need to change it to fetch from S3
directly.
This is also using actix_web::client instead of reqwest. I should
probably switch the oauth module to do the same.
Diffstat (limited to 'src')
-rw-r--r-- | src/main.rs | 172 | ||||
-rw-r--r-- | src/media.rs | 161 | ||||
-rw-r--r-- | src/micropub.rs | 168 |
3 files changed, 339 insertions, 162 deletions
diff --git a/src/main.rs b/src/main.rs index d5da4ac..2c81ccd 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,171 +1,15 @@ -use actix_multipart::Multipart; -use actix_web::http::header; -use actix_web::{middleware, web, App, HttpRequest, HttpResponse, HttpServer}; - -use chrono::Utc; - -use futures::{StreamExt, TryStreamExt}; - -use rand::distributions::Alphanumeric; -use rand::{thread_rng, Rng}; +use actix_web::client::Client; +use actix_web::{middleware, web, App, HttpServer}; use rusoto_core::Region; -use rusoto_s3::{PutObjectRequest, S3Client, S3}; +use rusoto_s3::S3Client; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; -use std::fmt::Display; -use std::iter; - +mod media; +mod micropub; mod oauth; -// To make the timepart shorter, we'll offset it with a custom epoch. -const EPOCH: i64 = 631152000; - -#[derive(Serialize, Deserialize)] -struct MicropubError { - error: String, - #[serde(skip_serializing_if = "Option::is_none")] - error_description: Option<String>, -} - -impl MicropubError { - pub fn new<S>(err: S) -> Self - where - S: Into<String>, - { - MicropubError { - error: err.into(), - error_description: None, - } - } - - pub fn with_description<S, D>(err: S, description: D) -> Self - where - S: Into<String>, - D: Display, - { - MicropubError { - error: err.into(), - error_description: Some(format!("{}", description)), - } - } -} - -fn random_id() -> String { - let now = Utc::now(); - - // Generate the time part - let ts = now.timestamp() - EPOCH; - let offset = (ts.leading_zeros() / 8) as usize; - let time_part = base32::encode( - base32::Alphabet::RFC4648 { padding: false }, - &ts.to_be_bytes()[offset..], - ); - - // Generate the random part - let mut rng = thread_rng(); - let random_part: String = iter::repeat(()) - .map(|()| rng.sample(Alphanumeric)) - .take(7) - .collect(); - - format!("{}-{}", time_part, random_part) -} - -async fn handle_upload(req: HttpRequest, mut payload: Multipart) -> HttpResponse { - let site = req - .app_data::<web::Data<SiteConfig>>() - .expect("Missing SiteConfig?"); - let s3_client = req - .app_data::<web::Data<S3Client>>() - .expect("Missing S3Client?"); - let verification_service = req - .app_data::<web::Data<oauth::VerificationService>>() - .expect("Missing VerificationService?"); - - let auth_header = match req - .headers() - .get(header::AUTHORIZATION) - .and_then(|s| s.to_str().ok()) - { - Some(auth_header) => auth_header, - None => return HttpResponse::Unauthorized().json(MicropubError::new("unauthorized")), - }; - - let access_token = match verification_service.validate(auth_header).await { - Ok(token) => token, - Err(e) => { - return HttpResponse::Unauthorized() - .json(MicropubError::with_description("unauthorized", e)) - } - }; - - if !access_token.scopes().any(|s| s == "media") { - return HttpResponse::Unauthorized().json(MicropubError::new("unauthorized")); - } - - // iterate over multipart stream - if let Ok(Some(field)) = payload.try_next().await { - let content_disp = field.content_disposition().unwrap(); - let content_type = field.content_type().clone(); - let filename = content_disp.get_filename(); - let ext = filename.and_then(|f| f.rsplit('.').next()); - let (classification, sep, suffix) = match content_type.type_() { - mime::IMAGE => ("photo", '.', ext), - mime::AUDIO => ("audio", '.', ext), - mime::VIDEO => ("video", '.', ext), - _ => ("file", '/', filename), - }; - - // This will be the key in S3. - let key = match suffix { - Some(ext) => format!("{}/{}{}{}", classification, random_id(), sep, ext), - None => format!("{}/{}", classification, random_id()), - }; - - // This will be the publicly accessible URL for the file. - let url = format!("{}/{}", site.media_url, key); - - let mut metadata: HashMap<String, String> = HashMap::new(); - metadata.insert( - "client-id".to_string(), - access_token.client_id().to_string(), - ); - metadata.insert("author".to_string(), access_token.me().to_string()); - if let Some(f) = filename { - metadata.insert("filename".to_string(), f.to_string()); - } - - let body = field - .map(|b| b.map(|b| b.to_vec())) - .try_concat() - .await - .unwrap(); - - let put_request = PutObjectRequest { - bucket: site.s3_bucket().to_owned(), - key, - body: Some(body.into()), - metadata: Some(metadata), - content_type: Some(content_type.to_string()), - ..Default::default() - }; - - match s3_client.put_object(put_request).await { - Ok(_) => { - return HttpResponse::Created() - .header(header::LOCATION, url) - .finish() - } - Err(e) => return HttpResponse::InternalServerError().body(format!("{}", e)), - }; - } - - HttpResponse::BadRequest().finish() -} - #[derive(Serialize, Deserialize, Clone)] #[serde(rename_all = "PascalCase")] pub struct SiteConfig { @@ -216,10 +60,14 @@ async fn main() -> std::io::Result<()> { HttpServer::new(move || { App::new() .wrap(middleware::Logger::default()) + .data(Client::new()) .data(site_config.clone()) .data(s3_client.clone()) .data(oauth::VerificationService::new(token_endpoint.clone())) - .service(web::resource("/micropub/media").route(web::post().to(handle_upload))) + .service( + web::resource("/micropub/media").route(web::post().to(micropub::handle_upload)), + ) + .configure(media::configure) }) .bind(bind)? .run() diff --git a/src/media.rs b/src/media.rs new file mode 100644 index 0000000..2723816 --- /dev/null +++ b/src/media.rs @@ -0,0 +1,161 @@ +use actix_web::client::{Client, ClientResponse}; +use actix_web::error::{ErrorBadRequest, ErrorInternalServerError}; +use actix_web::http::header; +use actix_web::{web, Error, HttpRequest, HttpResponse}; + +use image::imageops::FilterType; +use image::GenericImageView; +use image::ImageFormat; + +use crate::SiteConfig; + +pub fn configure(cfg: &mut web::ServiceConfig) { + cfg.service( + web::resource("/media/photo/{width:\\d+}x{height:\\d+}/{filename}") + .route(web::get().to(serve_photo)), + ); + cfg.service( + web::resource("/media/{type}/{filename:.+}") + .route(web::get().to(serve_file)) + .route(web::head().to(serve_file)), + ); +} + +async fn serve_photo( + req: HttpRequest, + config: web::Data<SiteConfig>, + client: web::Data<Client>, +) -> Result<HttpResponse, Error> { + let width = req + .match_info() + .get("width") + .ok_or(ErrorBadRequest("Bad URI")) + .and_then(|v| v.parse().map_err(|_| ErrorBadRequest("Bad URI")))?; + let height = req + .match_info() + .get("height") + .ok_or(ErrorBadRequest("Bad URI")) + .and_then(|v| v.parse().map_err(|_| ErrorBadRequest("Bad URI")))?; + let filename = req + .match_info() + .get("filename") + .ok_or(ErrorBadRequest("Bad URI"))?; + + let new_url = format!("{}/photo/{}", config.media_url(), filename); + + let forwarded_req = client.request_from(new_url, req.head()); + let forwarded_req = if let Some(addr) = req.head().peer_addr { + forwarded_req.header("x-forwarded-for", format!("{}", addr.ip())) + } else { + forwarded_req + }; + + let mut res = forwarded_req.send().await.map_err(Error::from)?; + + // Check response code + if !res.status().is_success() { + return forward_response(res).await; + } + + // Get the payload, at at least 20 MB of it... + let data = res.body().limit(20971520).await?; + + // Determine the image format + let fmt = image::guess_format(data.as_ref()).map_err(|e| ErrorInternalServerError(e))?; + + // Parse the image + let img = image::load_from_memory_with_format(data.as_ref(), fmt) + .map_err(|e| ErrorInternalServerError(e))?; + + let (orig_width, orig_height) = img.dimensions(); + + let scaled = if width < orig_width && height < orig_height { + // Take the largest size that maintains the aspect ratio + let ratio = orig_width as f64 / orig_height as f64; + let (new_width, new_height) = if width > height { + (width, (width as f64 / ratio) as u32) + } else { + ((height as f64 * ratio) as u32, height) + }; + img.resize(new_width, new_height, FilterType::CatmullRom) + } else { + // We're not going to scale up images. + img + }; + + let mut new_data = Vec::new(); + scaled + .write_to(&mut new_data, fmt) // ImageOutputFormat::Jpeg(128)) + .map_err(|e| ErrorInternalServerError(e))?; + + let mut client_resp = HttpResponse::build(res.status()); + client_resp.set(header::CacheControl(vec![header::CacheDirective::MaxAge( + 86400u32, + )])); + client_resp.set_header(header::CONTENT_TYPE, mime_for_image(fmt)); + + Ok(client_resp.body(new_data)) +} + +async fn serve_file( + req: HttpRequest, + config: web::Data<SiteConfig>, + client: web::Data<Client>, +) -> Result<HttpResponse, Error> { + let media_type = req + .match_info() + .get("type") + .ok_or(ErrorBadRequest("Bad URI"))?; + let filename = req + .match_info() + .get("filename") + .ok_or(ErrorBadRequest("Bad URI"))?; + + let new_url = format!("{}/{}/{}", config.media_url(), media_type, filename); + + let forwarded_req = client.request_from(new_url, req.head()).no_decompress(); + + let forwarded_req = if let Some(addr) = req.head().peer_addr { + forwarded_req.header("x-forwarded-for", format!("{}", addr.ip())) + } else { + forwarded_req + }; + + let res = forwarded_req.send().await.map_err(Error::from)?; + + forward_response(res).await +} + +async fn forward_response<S>(mut res: ClientResponse<S>) -> Result<HttpResponse, Error> +where + S: futures::Stream<Item = std::result::Result<bytes::Bytes, actix_web::error::PayloadError>> + + std::marker::Unpin, +{ + let mut client_resp = HttpResponse::build(res.status()); + + // Remove `Connection` as per + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Connection#Directives + for (header_name, header_value) in res.headers().iter().filter(|(h, _)| *h != "connection") { + client_resp.header(header_name.clone(), header_value.clone()); + } + + Ok(client_resp.body(res.body().limit(2147483648).await?)) +} + +fn mime_for_image(fmt: ImageFormat) -> &'static str { + match fmt { + ImageFormat::Png => "image/png", + ImageFormat::Jpeg => "image/jpeg", + ImageFormat::Gif => "image/gif", + ImageFormat::Tiff => "image/tiff", + ImageFormat::Ico => "image/vnd.microsoft.icon", + ImageFormat::WebP => "image/webp", + ImageFormat::Bmp => "image/bmp", + ImageFormat::Pnm => "image/x-portable-anymap", + ImageFormat::Tga => "image/x-tga", + ImageFormat::Dds => "image/vnd.ms-dds", + ImageFormat::Hdr => "image/vnd.radiance", + ImageFormat::Farbfeld => "image/farbfeld", + _ => "", + } +} diff --git a/src/micropub.rs b/src/micropub.rs new file mode 100644 index 0000000..1afe7ba --- /dev/null +++ b/src/micropub.rs @@ -0,0 +1,168 @@ +use actix_multipart::Multipart; +use actix_web::http::header; +use actix_web::{web, HttpRequest, HttpResponse}; + +use chrono::Utc; + +use futures::{StreamExt, TryStreamExt}; + +use rand::distributions::Alphanumeric; +use rand::{thread_rng, Rng}; + +use rusoto_s3::{PutObjectRequest, S3Client, S3}; + +use serde::{Deserialize, Serialize}; + +use std::collections::HashMap; +use std::fmt::Display; +use std::iter; + +use crate::oauth; +use crate::SiteConfig; + +// To make the timepart shorter, we'll offset it with a custom epoch. +const EPOCH: i64 = 631152000; + +#[derive(Serialize, Deserialize)] +struct MicropubError { + error: String, + #[serde(skip_serializing_if = "Option::is_none")] + error_description: Option<String>, +} + +impl MicropubError { + pub fn new<S>(err: S) -> Self + where + S: Into<String>, + { + MicropubError { + error: err.into(), + error_description: None, + } + } + + pub fn with_description<S, D>(err: S, description: D) -> Self + where + S: Into<String>, + D: Display, + { + MicropubError { + error: err.into(), + error_description: Some(format!("{}", description)), + } + } +} + +fn random_id() -> String { + let now = Utc::now(); + + // Generate the time part + let ts = now.timestamp() - EPOCH; + let offset = (ts.leading_zeros() / 8) as usize; + let time_part = base32::encode( + base32::Alphabet::RFC4648 { padding: false }, + &ts.to_be_bytes()[offset..], + ); + + // Generate the random part + let mut rng = thread_rng(); + let random_part: String = iter::repeat(()) + .map(|()| rng.sample(Alphanumeric)) + .take(7) + .collect(); + + format!("{}-{}", time_part, random_part) +} + +pub async fn handle_upload(req: HttpRequest, mut payload: Multipart) -> HttpResponse { + let site = req + .app_data::<web::Data<SiteConfig>>() + .expect("Missing SiteConfig?"); + let s3_client = req + .app_data::<web::Data<S3Client>>() + .expect("Missing S3Client?"); + let verification_service = req + .app_data::<web::Data<oauth::VerificationService>>() + .expect("Missing VerificationService?"); + + let auth_header = match req + .headers() + .get(header::AUTHORIZATION) + .and_then(|s| s.to_str().ok()) + { + Some(auth_header) => auth_header, + None => return HttpResponse::Unauthorized().json(MicropubError::new("unauthorized")), + }; + + let access_token = match verification_service.validate(auth_header).await { + Ok(token) => token, + Err(e) => { + return HttpResponse::Unauthorized() + .json(MicropubError::with_description("unauthorized", e)) + } + }; + + if !access_token.scopes().any(|s| s == "media") { + return HttpResponse::Unauthorized().json(MicropubError::new("unauthorized")); + } + + // iterate over multipart stream + if let Ok(Some(field)) = payload.try_next().await { + let content_disp = field.content_disposition().unwrap(); + let content_type = field.content_type().clone(); + let filename = content_disp.get_filename(); + let ext = filename.and_then(|f| f.rsplit('.').next()); + let (classification, sep, suffix) = match content_type.type_() { + mime::IMAGE => ("photo", '.', ext), + mime::AUDIO => ("audio", '.', ext), + mime::VIDEO => ("video", '.', ext), + _ => ("file", '/', filename), + }; + + // This will be the key in S3. + let key = match suffix { + Some(ext) => format!("{}/{}{}{}", classification, random_id(), sep, ext), + None => format!("{}/{}", classification, random_id()), + }; + + // This will be the publicly accessible URL for the file. + let url = format!("{}/{}", site.media_url, key); + + let mut metadata: HashMap<String, String> = HashMap::new(); + metadata.insert( + "client-id".to_string(), + access_token.client_id().to_string(), + ); + metadata.insert("author".to_string(), access_token.me().to_string()); + if let Some(f) = filename { + metadata.insert("filename".to_string(), f.to_string()); + } + + let body = field + .map(|b| b.map(|b| b.to_vec())) + .try_concat() + .await + .unwrap(); + + let put_request = PutObjectRequest { + bucket: site.s3_bucket().to_owned(), + key, + body: Some(body.into()), + metadata: Some(metadata), + content_type: Some(content_type.to_string()), + ..Default::default() + }; + + match s3_client.put_object(put_request).await { + Ok(_) => { + return HttpResponse::Created() + // Note: header must have a big L + .header("Location", url) + .finish(); + } + Err(e) => return HttpResponse::InternalServerError().body(format!("{}", e)), + }; + } + + HttpResponse::BadRequest().finish() +} |