diff --git a/tower-http/Cargo.toml b/tower-http/Cargo.toml index d153d18f..e31ed622 100644 --- a/tower-http/Cargo.toml +++ b/tower-http/Cargo.toml @@ -79,7 +79,7 @@ auth = ["base64"] catch-panic = ["tracing", "futures-util/std"] cors = [] follow-redirect = ["iri-string", "tower/util"] -fs = ["tokio/fs", "tokio-util/io", "tokio/io-util", "mime_guess", "mime", "percent-encoding", "httpdate", "set-status"] +fs = ["tokio/fs", "tokio-util/io", "tokio/io-util", "mime_guess", "mime", "percent-encoding", "httpdate", "set-status", "futures-util/alloc"] map-request-body = [] map-response-body = [] metrics = ["tokio/time"] diff --git a/tower-http/src/services/fs/mod.rs b/tower-http/src/services/fs/mod.rs index b05526b4..ce6ef463 100644 --- a/tower-http/src/services/fs/mod.rs +++ b/tower-http/src/services/fs/mod.rs @@ -2,139 +2,31 @@ use bytes::Bytes; use futures_util::Stream; -use http::{HeaderMap, StatusCode}; +use http::HeaderMap; use http_body::Body; -use httpdate::HttpDate; use pin_project_lite::pin_project; -use std::fs::Metadata; -use std::{ffi::OsStr, path::PathBuf}; use std::{ io, pin::Pin, task::{Context, Poll}, - time::SystemTime, }; -use tokio::fs::File; use tokio::io::{AsyncRead, AsyncReadExt, Take}; use tokio_util::io::ReaderStream; mod serve_dir; mod serve_file; -// default capacity 64KiB -const DEFAULT_CAPACITY: usize = 65536; - -use crate::content_encoding::{Encoding, QValue, SupportedEncodings}; - pub use self::{ serve_dir::{ + future::ResponseFuture as ServeFileSystemResponseFuture, DefaultServeDirFallback, // The response body and future are used for both ServeDir and ServeFile ResponseBody as ServeFileSystemResponseBody, - ResponseFuture as ServeFileSystemResponseFuture, ServeDir, }, serve_file::ServeFile, }; -#[derive(Clone, Copy, Debug, Default)] -struct PrecompressedVariants { - gzip: bool, - deflate: bool, - br: bool, -} - -impl SupportedEncodings for PrecompressedVariants { - fn gzip(&self) -> bool { - self.gzip - } - - fn deflate(&self) -> bool { - self.deflate - } - - fn br(&self) -> bool { - self.br - } -} - -// Returns the preferred_encoding encoding and modifies the path extension -// to the corresponding file extension for the encoding. -fn preferred_encoding( - path: &mut PathBuf, - negotiated_encoding: &[(Encoding, QValue)], -) -> Option { - let preferred_encoding = Encoding::preferred_encoding(negotiated_encoding); - if let Some(file_extension) = - preferred_encoding.and_then(|encoding| encoding.to_file_extension()) - { - let new_extension = path - .extension() - .map(|extension| { - let mut os_string = extension.to_os_string(); - os_string.push(file_extension); - os_string - }) - .unwrap_or_else(|| file_extension.to_os_string()); - path.set_extension(new_extension); - } - preferred_encoding -} - -// Attempts to open the file with any of the possible negotiated_encodings in the -// preferred order. If none of the negotiated_encodings have a corresponding precompressed -// file the uncompressed file is used as a fallback. -async fn open_file_with_fallback( - mut path: PathBuf, - mut negotiated_encoding: Vec<(Encoding, QValue)>, -) -> io::Result<(File, Option)> { - let (file, encoding) = loop { - // Get the preferred encoding among the negotiated ones. - let encoding = preferred_encoding(&mut path, &negotiated_encoding); - match (File::open(&path).await, encoding) { - (Ok(file), maybe_encoding) => break (file, maybe_encoding), - (Err(err), Some(encoding)) if err.kind() == io::ErrorKind::NotFound => { - // Remove the extension corresponding to a precompressed file (.gz, .br, .zz) - // to reset the path before the next iteration. - path.set_extension(OsStr::new("")); - // Remove the encoding from the negotiated_encodings since the file doesn't exist - negotiated_encoding - .retain(|(negotiated_encoding, _)| *negotiated_encoding != encoding); - continue; - } - (Err(err), _) => return Err(err), - }; - }; - Ok((file, encoding)) -} - -// Attempts to get the file metadata with any of the possible negotiated_encodings in the -// preferred order. If none of the negotiated_encodings have a corresponding precompressed -// file the uncompressed file is used as a fallback. -async fn file_metadata_with_fallback( - mut path: PathBuf, - mut negotiated_encoding: Vec<(Encoding, QValue)>, -) -> io::Result<(Metadata, Option)> { - let (file, encoding) = loop { - // Get the preferred encoding among the negotiated ones. - let encoding = preferred_encoding(&mut path, &negotiated_encoding); - match (tokio::fs::metadata(&path).await, encoding) { - (Ok(file), maybe_encoding) => break (file, maybe_encoding), - (Err(err), Some(encoding)) if err.kind() == io::ErrorKind::NotFound => { - // Remove the extension corresponding to a precompressed file (.gz, .br, .zz) - // to reset the path before the next iteration. - path.set_extension(OsStr::new("")); - // Remove the encoding from the negotiated_encodings since the file doesn't exist - negotiated_encoding - .retain(|(negotiated_encoding, _)| *negotiated_encoding != encoding); - continue; - } - (Err(err), _) => return Err(err), - }; - }; - Ok((file, encoding)) -} - pin_project! { // NOTE: This could potentially be upstreamed to `http-body`. /// Adapter that turns an `impl AsyncRead` to an `impl Body`. @@ -189,74 +81,3 @@ where Poll::Ready(Ok(None)) } } - -struct LastModified(HttpDate); - -impl From for LastModified { - fn from(time: SystemTime) -> Self { - LastModified(time.into()) - } -} - -struct IfUnmodifiedSince(HttpDate); -struct IfModifiedSince(HttpDate); - -impl IfModifiedSince { - /// Check if the supplied time means the resource has been modified. - fn is_modified(&self, last_modified: &LastModified) -> bool { - self.0 < last_modified.0 - } - - /// convert a header value into a IfModifiedSince, invalid values are silentely ignored - fn from_header_value(value: &http::header::HeaderValue) -> Option { - std::str::from_utf8(value.as_bytes()) - .ok() - .and_then(|value| httpdate::parse_http_date(&value).ok()) - .map(|time| IfModifiedSince(time.into())) - } -} - -impl IfUnmodifiedSince { - /// Check if the supplied time passes the precondtion. - fn precondition_passes(&self, last_modified: &LastModified) -> bool { - self.0 >= last_modified.0 - } - - /// convert a header value into a IfModifiedSince, invalid values are silentely ignored - fn from_header_value(value: &http::header::HeaderValue) -> Option { - std::str::from_utf8(value.as_bytes()) - .ok() - .and_then(|value| httpdate::parse_http_date(&value).ok()) - .map(|time| IfUnmodifiedSince(time.into())) - } -} - -fn check_modified_headers( - modified: Option<&LastModified>, - if_unmodified_since: Option, - if_modified_since: Option, -) -> Option { - if let Some(since) = if_unmodified_since { - let precondition = modified - .as_ref() - .map(|time| since.precondition_passes(time)) - .unwrap_or(false); - - if !precondition { - return Some(StatusCode::PRECONDITION_FAILED); - } - } - - if let Some(since) = if_modified_since { - let unmodified = modified - .as_ref() - .map(|time| !since.is_modified(&time)) - // no last_modified means its always modified - .unwrap_or(false); - if unmodified { - return Some(StatusCode::NOT_MODIFIED); - } - } - - None -} diff --git a/tower-http/src/services/fs/serve_dir.rs b/tower-http/src/services/fs/serve_dir.rs deleted file mode 100644 index bc869587..00000000 --- a/tower-http/src/services/fs/serve_dir.rs +++ /dev/null @@ -1,1465 +0,0 @@ -use super::{ - check_modified_headers, open_file_with_fallback, AsyncReadBody, IfModifiedSince, - IfUnmodifiedSince, LastModified, PrecompressedVariants, -}; -use crate::{ - content_encoding::{encodings, Encoding}, - services::fs::{file_metadata_with_fallback, DEFAULT_CAPACITY}, - set_status::SetStatus, - BoxError, -}; -use bytes::Bytes; -use futures_util::ready; -use http::response::Builder; -use http::{header, HeaderValue, Method, Request, Response, StatusCode, Uri}; -use http_body::{combinators::UnsyncBoxBody, Body, Empty, Full}; -use http_range_header::RangeUnsatisfiableError; -use percent_encoding::percent_decode; -use pin_project_lite::pin_project; -use std::{ - convert::Infallible, - fs::Metadata, - future::Future, - io, - io::SeekFrom, - ops::RangeInclusive, - path::{Component, Path, PathBuf}, - pin::Pin, - task::{Context, Poll}, -}; -use tokio::{fs::File, io::AsyncSeekExt}; -use tower_service::Service; - -/// Service that serves files from a given directory and all its sub directories. -/// -/// The `Content-Type` will be guessed from the file extension. -/// -/// An empty response with status `404 Not Found` will be returned if: -/// -/// - The file doesn't exist -/// - Any segment of the path contains `..` -/// - Any segment of the path contains a backslash -/// - We don't have necessary permissions to read the file -/// -/// # Example -/// -/// ``` -/// use tower_http::services::ServeDir; -/// -/// // This will serve files in the "assets" directory and -/// // its subdirectories -/// let service = ServeDir::new("assets"); -/// -/// # async { -/// // Run our service using `hyper` -/// let addr = std::net::SocketAddr::from(([127, 0, 0, 1], 3000)); -/// hyper::Server::bind(&addr) -/// .serve(tower::make::Shared::new(service)) -/// .await -/// .expect("server error"); -/// # }; -/// ``` -#[derive(Clone, Debug)] -pub struct ServeDir { - base: PathBuf, - buf_chunk_size: usize, - precompressed_variants: Option, - // This is used to specialise implementation for - // single files - variant: ServeVariant, - fallback: Option, -} - -// Allow the ServeDir service to be used in the ServeFile service -// with almost no overhead -#[derive(Clone, Debug)] -enum ServeVariant { - Directory { - append_index_html_on_directories: bool, - }, - SingleFile { - mime: HeaderValue, - }, -} - -impl ServeVariant { - fn build_and_validate_path(&self, base_path: &Path, requested_path: &str) -> Option { - match self { - ServeVariant::Directory { - append_index_html_on_directories: _, - } => { - let path = requested_path.trim_start_matches('/'); - - let path_decoded = percent_decode(path.as_ref()).decode_utf8().ok()?; - let path_decoded = Path::new(&*path_decoded); - - let mut full_path = base_path.to_path_buf(); - for component in path_decoded.components() { - match component { - Component::Normal(comp) => { - // protect against paths like `/foo/c:/bar/baz` (#204) - if Path::new(&comp) - .components() - .all(|c| matches!(c, Component::Normal(_))) - { - full_path.push(comp) - } else { - return None; - } - } - Component::CurDir => {} - Component::Prefix(_) | Component::RootDir | Component::ParentDir => { - return None; - } - } - } - Some(full_path) - } - ServeVariant::SingleFile { mime: _ } => Some(base_path.to_path_buf()), - } - } -} - -impl ServeDir { - /// Create a new [`ServeDir`]. - pub fn new>(path: P) -> Self { - let mut base = PathBuf::from("."); - base.push(path.as_ref()); - - Self { - base, - buf_chunk_size: DEFAULT_CAPACITY, - precompressed_variants: None, - variant: ServeVariant::Directory { - append_index_html_on_directories: true, - }, - fallback: None, - } - } - - pub(crate) fn new_single_file>(path: P, mime: HeaderValue) -> Self { - Self { - base: path.as_ref().to_owned(), - buf_chunk_size: DEFAULT_CAPACITY, - precompressed_variants: None, - variant: ServeVariant::SingleFile { mime }, - fallback: None, - } - } -} - -impl ServeDir { - /// If the requested path is a directory append `index.html`. - /// - /// This is useful for static sites. - /// - /// Defaults to `true`. - pub fn append_index_html_on_directories(mut self, append: bool) -> Self { - match &mut self.variant { - ServeVariant::Directory { - append_index_html_on_directories, - } => { - *append_index_html_on_directories = append; - self - } - ServeVariant::SingleFile { mime: _ } => self, - } - } - - /// Set a specific read buffer chunk size. - /// - /// The default capacity is 64kb. - pub fn with_buf_chunk_size(mut self, chunk_size: usize) -> Self { - self.buf_chunk_size = chunk_size; - self - } - - /// Informs the service that it should also look for a precompressed gzip - /// version of _any_ file in the directory. - /// - /// Assuming the `dir` directory is being served and `dir/foo.txt` is requested, - /// a client with an `Accept-Encoding` header that allows the gzip encoding - /// will receive the file `dir/foo.txt.gz` instead of `dir/foo.txt`. - /// If the precompressed file is not available, or the client doesn't support it, - /// the uncompressed version will be served instead. - /// Both the precompressed version and the uncompressed version are expected - /// to be present in the directory. Different precompressed variants can be combined. - pub fn precompressed_gzip(mut self) -> Self { - self.precompressed_variants - .get_or_insert(Default::default()) - .gzip = true; - self - } - - /// Informs the service that it should also look for a precompressed brotli - /// version of _any_ file in the directory. - /// - /// Assuming the `dir` directory is being served and `dir/foo.txt` is requested, - /// a client with an `Accept-Encoding` header that allows the brotli encoding - /// will receive the file `dir/foo.txt.br` instead of `dir/foo.txt`. - /// If the precompressed file is not available, or the client doesn't support it, - /// the uncompressed version will be served instead. - /// Both the precompressed version and the uncompressed version are expected - /// to be present in the directory. Different precompressed variants can be combined. - pub fn precompressed_br(mut self) -> Self { - self.precompressed_variants - .get_or_insert(Default::default()) - .br = true; - self - } - - /// Informs the service that it should also look for a precompressed deflate - /// version of _any_ file in the directory. - /// - /// Assuming the `dir` directory is being served and `dir/foo.txt` is requested, - /// a client with an `Accept-Encoding` header that allows the deflate encoding - /// will receive the file `dir/foo.txt.zz` instead of `dir/foo.txt`. - /// If the precompressed file is not available, or the client doesn't support it, - /// the uncompressed version will be served instead. - /// Both the precompressed version and the uncompressed version are expected - /// to be present in the directory. Different precompressed variants can be combined. - pub fn precompressed_deflate(mut self) -> Self { - self.precompressed_variants - .get_or_insert(Default::default()) - .deflate = true; - self - } - - /// Set the fallback service. - /// - /// This service will be called if there is no file at the path of the request. - /// - /// The status code returned by the fallback will not be altered. Use - /// [`ServeDir::not_found_service`] to set a fallback and always respond with `404 Not Found`. - /// - /// # Example - /// - /// This can be used to respond with a different file: - /// - /// ```rust - /// use tower_http::services::{ServeDir, ServeFile}; - /// - /// let service = ServeDir::new("assets") - /// // respond with `not_found.html` for missing files - /// .fallback(ServeFile::new("assets/not_found.html")); - /// - /// # async { - /// // Run our service using `hyper` - /// let addr = std::net::SocketAddr::from(([127, 0, 0, 1], 3000)); - /// hyper::Server::bind(&addr) - /// .serve(tower::make::Shared::new(service)) - /// .await - /// .expect("server error"); - /// # }; - /// ``` - pub fn fallback(self, new_fallback: F2) -> ServeDir { - ServeDir { - base: self.base, - buf_chunk_size: self.buf_chunk_size, - precompressed_variants: self.precompressed_variants, - variant: self.variant, - fallback: Some(new_fallback), - } - } - - /// Set the fallback service and override the fallback's status code to `404 Not Found`. - /// - /// This service will be called if there is no file at the path of the request. - /// - /// # Example - /// - /// This can be used to respond with a different file: - /// - /// ```rust - /// use tower_http::services::{ServeDir, ServeFile}; - /// - /// let service = ServeDir::new("assets") - /// // respond with `404 Not Found` and the contents of `not_found.html` for missing files - /// .not_found_service(ServeFile::new("assets/not_found.html")); - /// - /// # async { - /// // Run our service using `hyper` - /// let addr = std::net::SocketAddr::from(([127, 0, 0, 1], 3000)); - /// hyper::Server::bind(&addr) - /// .serve(tower::make::Shared::new(service)) - /// .await - /// .expect("server error"); - /// # }; - /// ``` - /// - /// Setups like this are often found in single page applications. - pub fn not_found_service(self, new_fallback: F2) -> ServeDir> { - self.fallback(SetStatus::new(new_fallback, StatusCode::NOT_FOUND)) - } -} - -async fn maybe_redirect_or_append_path( - full_path: &mut PathBuf, - uri: Uri, - append_index_html_on_directories: bool, -) -> Option { - if !uri.path().ends_with('/') { - if is_dir(full_path).await { - let location = HeaderValue::from_str(&append_slash_on_path(uri).to_string()).unwrap(); - return Some(Output::Redirect(location)); - } else { - return None; - } - } else if is_dir(full_path).await { - if append_index_html_on_directories { - full_path.push("index.html"); - return None; - } else { - return Some(Output::StatusCode(StatusCode::NOT_FOUND)); - } - } - None -} - -impl Service> for ServeDir -where - F: Service, Response = Response> + Clone, - F::Error: Into, - F::Future: Send + 'static, - FResBody: http_body::Body + Send + 'static, - FResBody::Error: Into>, -{ - type Response = Response; - type Error = io::Error; - type Future = ResponseFuture; - - #[inline] - fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll> { - if let Some(fallback) = &mut self.fallback { - fallback.poll_ready(cx).map_err(Into::into) - } else { - Poll::Ready(Ok(())) - } - } - - fn call(&mut self, req: Request) -> Self::Future { - if req.method() != Method::GET && req.method() != Method::HEAD { - return ResponseFuture { - inner: ResponseFutureInner::MethodNotAllowed, - }; - } - - // `ServeDir` doesn't care about the request body but the fallback might. So move out the - // body and pass it to the fallback, leaving an empty body in its place - // - // this is necessary because we cannot clone bodies - let (mut parts, body) = req.into_parts(); - // same goes for extensions - let extensions = std::mem::take(&mut parts.extensions); - let req = Request::from_parts(parts, Empty::::new()); - - let mut full_path = match self - .variant - .build_and_validate_path(&self.base, req.uri().path()) - { - Some(full_path) => full_path, - None => { - return ResponseFuture { - inner: ResponseFutureInner::InvalidPath, - }; - } - }; - - let fallback_and_request = self.fallback.as_mut().map(|fallback| { - let mut req = Request::new(body); - *req.method_mut() = req.method().clone(); - *req.uri_mut() = req.uri().clone(); - *req.headers_mut() = req.headers().clone(); - *req.extensions_mut() = extensions; - - // get the ready fallback and leave a non-ready clone in its place - let clone = fallback.clone(); - let fallback = std::mem::replace(fallback, clone); - - (fallback, req) - }); - - let buf_chunk_size = self.buf_chunk_size; - let uri = req.uri().clone(); - let range_header = req - .headers() - .get(header::RANGE) - .and_then(|value| value.to_str().ok().map(|s| s.to_owned())); - - // The negotiated encodings based on the Accept-Encoding header and - // precompressed variants - let negotiated_encodings = encodings( - req.headers(), - self.precompressed_variants.unwrap_or_default(), - ); - - let if_unmodified_since = req - .headers() - .get(header::IF_UNMODIFIED_SINCE) - .and_then(IfUnmodifiedSince::from_header_value); - let if_modified_since = req - .headers() - .get(header::IF_MODIFIED_SINCE) - .and_then(IfModifiedSince::from_header_value); - - let request_method = req.method().clone(); - let variant = self.variant.clone(); - - let open_file_future = Box::pin(async move { - let mime = match variant { - ServeVariant::Directory { - append_index_html_on_directories, - } => { - // Might already at this point know a redirect or not found result should be - // returned which corresponds to a Some(output). Otherwise the path might be - // modified and proceed to the open file/metadata future. - if let Some(output) = maybe_redirect_or_append_path( - &mut full_path, - uri, - append_index_html_on_directories, - ) - .await - { - return Ok(output); - } - let guess = mime_guess::from_path(&full_path); - guess - .first_raw() - .map(HeaderValue::from_static) - .unwrap_or_else(|| { - HeaderValue::from_str(mime::APPLICATION_OCTET_STREAM.as_ref()).unwrap() - }) - } - ServeVariant::SingleFile { mime } => mime, - }; - - match request_method { - Method::HEAD => { - let (meta, maybe_encoding) = - file_metadata_with_fallback(full_path, negotiated_encodings).await?; - - let last_modified = meta.modified().ok().map(LastModified::from); - if let Some(status_code) = check_modified_headers( - last_modified.as_ref(), - if_unmodified_since, - if_modified_since, - ) { - return Ok(Output::StatusCode(status_code)); - } - - let maybe_range = try_parse_range(range_header.as_ref(), meta.len()); - Ok(Output::File(FileRequest { - extent: FileRequestExtent::Head(meta), - chunk_size: buf_chunk_size, - mime_header_value: mime, - maybe_encoding, - maybe_range, - last_modified, - })) - } - _ => { - let (mut file, maybe_encoding) = - open_file_with_fallback(full_path, negotiated_encodings).await?; - let meta = file.metadata().await?; - let last_modified = meta.modified().ok().map(LastModified::from); - if let Some(status_code) = check_modified_headers( - last_modified.as_ref(), - if_unmodified_since, - if_modified_since, - ) { - return Ok(Output::StatusCode(status_code)); - } - - let maybe_range = try_parse_range(range_header.as_ref(), meta.len()); - if let Some(Ok(ranges)) = maybe_range.as_ref() { - // If there is any other amount of ranges than 1 we'll return an unsatisfiable later as there isn't yet support for multipart ranges - if ranges.len() == 1 { - file.seek(SeekFrom::Start(*ranges[0].start())).await?; - } - } - Ok(Output::File(FileRequest { - extent: FileRequestExtent::Full(file, meta), - chunk_size: buf_chunk_size, - mime_header_value: mime, - maybe_encoding, - maybe_range, - last_modified, - })) - } - } - }); - - ResponseFuture { - inner: ResponseFutureInner::OpenFileFuture { - future: open_file_future, - fallback_and_request, - }, - } - } -} - -fn try_parse_range( - maybe_range_ref: Option<&String>, - file_size: u64, -) -> Option>, RangeUnsatisfiableError>> { - maybe_range_ref.map(|header_value| { - http_range_header::parse_range_header(header_value) - .and_then(|first_pass| first_pass.validate(file_size)) - }) -} - -async fn is_dir(full_path: &Path) -> bool { - tokio::fs::metadata(full_path) - .await - .map(|m| m.is_dir()) - .unwrap_or(false) -} - -fn append_slash_on_path(uri: Uri) -> Uri { - let http::uri::Parts { - scheme, - authority, - path_and_query, - .. - } = uri.into_parts(); - - let mut builder = Uri::builder(); - if let Some(scheme) = scheme { - builder = builder.scheme(scheme); - } - if let Some(authority) = authority { - builder = builder.authority(authority); - } - if let Some(path_and_query) = path_and_query { - if let Some(query) = path_and_query.query() { - builder = builder.path_and_query(format!("{}/?{}", path_and_query.path(), query)); - } else { - builder = builder.path_and_query(format!("{}/", path_and_query.path())); - } - } else { - builder = builder.path_and_query("/"); - } - - builder.build().unwrap() -} - -#[allow(clippy::large_enum_variant)] -enum Output { - File(FileRequest), - Redirect(HeaderValue), - StatusCode(StatusCode), -} - -struct FileRequest { - extent: FileRequestExtent, - chunk_size: usize, - mime_header_value: HeaderValue, - maybe_encoding: Option, - maybe_range: Option>, RangeUnsatisfiableError>>, - last_modified: Option, -} - -enum FileRequestExtent { - Full(File, Metadata), - Head(Metadata), -} - -type BoxFuture = Pin + Send + 'static>>; - -pin_project! { - /// Response future of [`ServeDir`]. - pub struct ResponseFuture { - #[pin] - inner: ResponseFutureInner, - } -} - -pin_project! { - #[project = ResponseFutureInnerProj] - enum ResponseFutureInner { - OpenFileFuture { - #[pin] - future: BoxFuture>, - fallback_and_request: Option<(F, Request)>, - }, - FallbackFuture { - future: BoxFuture>>, - }, - InvalidPath, - MethodNotAllowed, - } -} - -impl Future for ResponseFuture -where - F: Service, Response = Response> + Clone, - F::Error: Into, - F::Future: Send + 'static, - ResBody: http_body::Body + Send + 'static, - ResBody::Error: Into>, -{ - type Output = io::Result>; - - fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - loop { - let mut this = self.as_mut().project(); - - let new_state = match this.inner.as_mut().project() { - ResponseFutureInnerProj::OpenFileFuture { - future: open_file_future, - fallback_and_request, - } => match ready!(open_file_future.poll(cx)) { - Ok(Output::File(file_request)) => { - let (maybe_file, size) = match file_request.extent { - FileRequestExtent::Full(file, meta) => (Some(file), meta.len()), - FileRequestExtent::Head(meta) => (None, meta.len()), - }; - let mut builder = Response::builder() - .header(header::CONTENT_TYPE, file_request.mime_header_value) - .header(header::ACCEPT_RANGES, "bytes"); - if let Some(encoding) = file_request.maybe_encoding { - builder = builder - .header(header::CONTENT_ENCODING, encoding.into_header_value()); - } - if let Some(last_modified) = file_request.last_modified { - builder = - builder.header(header::LAST_MODIFIED, last_modified.0.to_string()); - } - - let res = handle_file_request( - builder, - maybe_file, - file_request.maybe_range, - file_request.chunk_size, - size, - ); - return Poll::Ready(Ok(res.unwrap())); - } - - Ok(Output::Redirect(location)) => { - let res = Response::builder() - .header(http::header::LOCATION, location) - .status(StatusCode::TEMPORARY_REDIRECT) - .body(empty_body()) - .unwrap(); - return Poll::Ready(Ok(res)); - } - - Ok(Output::StatusCode(code)) => { - let res = Response::builder().status(code).body(empty_body()).unwrap(); - - return Poll::Ready(Ok(res)); - } - - Err(err) => match err.kind() { - io::ErrorKind::NotFound | io::ErrorKind::PermissionDenied => { - if let Some((mut fallback, request)) = fallback_and_request.take() { - call_fallback(&mut fallback, request) - } else { - return Poll::Ready(not_found()); - } - } - _ => return Poll::Ready(Err(err)), - }, - }, - - ResponseFutureInnerProj::FallbackFuture { future } => { - return Pin::new(future).poll(cx) - } - - ResponseFutureInnerProj::InvalidPath => { - return Poll::Ready(not_found()); - } - - ResponseFutureInnerProj::MethodNotAllowed => { - return Poll::Ready(method_not_allowed()); - } - }; - - this.inner.set(new_state); - } - } -} - -fn handle_file_request( - builder: Builder, - maybe_file: Option, - maybe_range: Option>, RangeUnsatisfiableError>>, - chunk_size: usize, - size: u64, -) -> Result, http::Error> { - match maybe_range { - Some(Ok(ranges)) => { - if let Some(range) = ranges.first() { - if ranges.len() > 1 { - builder - .header(header::CONTENT_RANGE, format!("bytes */{}", size)) - .status(StatusCode::RANGE_NOT_SATISFIABLE) - .body(body_from_bytes(Bytes::from( - "Cannot serve multipart range requests", - ))) - } else { - let range_size = range.end() - range.start() + 1; - let body = if let Some(file) = maybe_file { - let body = - AsyncReadBody::with_capacity_limited(file, chunk_size, range_size) - .boxed_unsync(); - ResponseBody::new(body) - } else { - empty_body() - }; - builder - .header( - header::CONTENT_RANGE, - format!("bytes {}-{}/{}", range.start(), range.end(), size), - ) - .header(header::CONTENT_LENGTH, range.end() - range.start() + 1) - .status(StatusCode::PARTIAL_CONTENT) - .body(body) - } - } else { - builder - .header(header::CONTENT_RANGE, format!("bytes */{}", size)) - .status(StatusCode::RANGE_NOT_SATISFIABLE) - .body(body_from_bytes(Bytes::from( - "No range found after parsing range header, please file an issue", - ))) - } - } - Some(Err(_)) => builder - .header(header::CONTENT_RANGE, format!("bytes */{}", size)) - .status(StatusCode::RANGE_NOT_SATISFIABLE) - .body(empty_body()), - // Not a range request - None => { - let body = if let Some(file) = maybe_file { - let box_body = AsyncReadBody::with_capacity(file, chunk_size).boxed_unsync(); - ResponseBody::new(box_body) - } else { - empty_body() - }; - builder - .header(header::CONTENT_LENGTH, size.to_string()) - .body(body) - } - } -} - -fn empty_body() -> ResponseBody { - let body = Empty::new().map_err(|err| match err {}).boxed_unsync(); - ResponseBody::new(body) -} - -fn body_from_bytes(bytes: Bytes) -> ResponseBody { - let body = Full::from(bytes).map_err(|err| match err {}).boxed_unsync(); - ResponseBody::new(body) -} - -opaque_body! { - /// Response body for [`ServeDir`] and [`ServeFile`]. - pub type ResponseBody = UnsyncBoxBody; -} - -/// The default fallback service used with [`ServeDir`]. -#[derive(Debug, Clone, Copy)] -pub struct DefaultServeDirFallback(Infallible); - -impl Service> for DefaultServeDirFallback -where - ReqBody: Send + 'static, -{ - type Response = Response; - type Error = io::Error; - type Future = ResponseFuture; - - fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { - match self.0 {} - } - - fn call(&mut self, _req: Request) -> Self::Future { - match self.0 {} - } -} - -fn not_found() -> io::Result> { - let res = Response::builder() - .status(StatusCode::NOT_FOUND) - .body(empty_body()) - .unwrap(); - Ok(res) -} - -fn method_not_allowed() -> io::Result> { - let res = Response::builder() - .status(StatusCode::METHOD_NOT_ALLOWED) - .body(empty_body()) - .unwrap(); - Ok(res) -} - -fn call_fallback(fallback: &mut F, req: Request) -> ResponseFutureInner -where - F: Service, Response = Response> + Clone, - F::Error: Into, - F::Future: Send + 'static, - FResBody: http_body::Body + Send + 'static, - FResBody::Error: Into, -{ - let future = fallback.call(req); - let future = async move { - let response = future.await.map_err(Into::into)?; - let response = response - .map(|body| { - body.map_err(|err| match err.into().downcast::() { - Ok(err) => *err, - Err(err) => io::Error::new(io::ErrorKind::Other, err), - }) - .boxed_unsync() - }) - .map(ResponseBody::new); - Ok(response) - }; - let future = Box::pin(future); - ResponseFutureInner::FallbackFuture { future } -} - -#[cfg(test)] -mod tests { - use crate::services::ServeFile; - - use super::*; - use brotli::BrotliDecompress; - use flate2::bufread::{DeflateDecoder, GzDecoder}; - use http::{Request, StatusCode}; - use http_body::Body as HttpBody; - use hyper::Body; - use std::io::Read; - use tower::ServiceExt; - - #[tokio::test] - async fn basic() { - let svc = ServeDir::new(".."); - - let req = Request::builder() - .uri("/README.md") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::OK); - assert_eq!(res.headers()["content-type"], "text/markdown"); - - let body = body_into_text(res.into_body()).await; - - let contents = std::fs::read_to_string("../README.md").unwrap(); - assert_eq!(body, contents); - } - - #[tokio::test] - async fn basic_with_index() { - let svc = ServeDir::new("../test-files"); - - let req = Request::new(Body::empty()); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::OK); - assert_eq!(res.headers()[header::CONTENT_TYPE], "text/html"); - - let body = body_into_text(res.into_body()).await; - assert_eq!(body, "HTML!\n"); - } - - #[tokio::test] - async fn head_request() { - let svc = ServeDir::new("../test-files"); - - let req = Request::builder() - .uri("/precompressed.txt") - .method(Method::HEAD) - .body(Body::empty()) - .unwrap(); - - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - assert_eq!(res.headers()["content-length"], "23"); - - let body = res.into_body().data().await; - assert!(body.is_none()); - } - - #[tokio::test] - async fn precompresed_head_request() { - let svc = ServeDir::new("../test-files").precompressed_gzip(); - - let req = Request::builder() - .uri("/precompressed.txt") - .header("Accept-Encoding", "gzip") - .method(Method::HEAD) - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - assert_eq!(res.headers()["content-encoding"], "gzip"); - assert_eq!(res.headers()["content-length"], "59"); - - let body = res.into_body().data().await; - assert!(body.is_none()); - } - - #[tokio::test] - async fn with_custom_chunk_size() { - let svc = ServeDir::new("..").with_buf_chunk_size(1024 * 32); - - let req = Request::builder() - .uri("/README.md") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::OK); - assert_eq!(res.headers()["content-type"], "text/markdown"); - - let body = body_into_text(res.into_body()).await; - - let contents = std::fs::read_to_string("../README.md").unwrap(); - assert_eq!(body, contents); - } - - #[tokio::test] - async fn precompressed_gzip() { - let svc = ServeDir::new("../test-files").precompressed_gzip(); - - let req = Request::builder() - .uri("/precompressed.txt") - .header("Accept-Encoding", "gzip") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - assert_eq!(res.headers()["content-encoding"], "gzip"); - - let body = res.into_body().data().await.unwrap().unwrap(); - let mut decoder = GzDecoder::new(&body[..]); - let mut decompressed = String::new(); - decoder.read_to_string(&mut decompressed).unwrap(); - assert!(decompressed.starts_with("\"This is a test file!\"")); - } - - #[tokio::test] - async fn precompressed_br() { - let svc = ServeDir::new("../test-files").precompressed_br(); - - let req = Request::builder() - .uri("/precompressed.txt") - .header("Accept-Encoding", "br") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - assert_eq!(res.headers()["content-encoding"], "br"); - - let body = res.into_body().data().await.unwrap().unwrap(); - let mut decompressed = Vec::new(); - BrotliDecompress(&mut &body[..], &mut decompressed).unwrap(); - let decompressed = String::from_utf8(decompressed.to_vec()).unwrap(); - assert!(decompressed.starts_with("\"This is a test file!\"")); - } - - #[tokio::test] - async fn precompressed_deflate() { - let svc = ServeDir::new("../test-files").precompressed_deflate(); - let request = Request::builder() - .uri("/precompressed.txt") - .header("Accept-Encoding", "deflate,br") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(request).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - assert_eq!(res.headers()["content-encoding"], "deflate"); - - let body = res.into_body().data().await.unwrap().unwrap(); - let mut decoder = DeflateDecoder::new(&body[..]); - let mut decompressed = String::new(); - decoder.read_to_string(&mut decompressed).unwrap(); - assert!(decompressed.starts_with("\"This is a test file!\"")); - } - - #[tokio::test] - async fn unsupported_precompression_alogrithm_fallbacks_to_uncompressed() { - let svc = ServeDir::new("../test-files").precompressed_gzip(); - - let request = Request::builder() - .uri("/precompressed.txt") - .header("Accept-Encoding", "br") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(request).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - assert!(res.headers().get("content-encoding").is_none()); - - let body = res.into_body().data().await.unwrap().unwrap(); - let body = String::from_utf8(body.to_vec()).unwrap(); - assert!(body.starts_with("\"This is a test file!\"")); - } - - #[tokio::test] - async fn only_precompressed_variant_existing() { - let svc = ServeDir::new("../test-files").precompressed_gzip(); - - let request = Request::builder() - .uri("/only_gzipped.txt") - .body(Body::empty()) - .unwrap(); - let res = svc.clone().oneshot(request).await.unwrap(); - - assert_eq!(res.status(), StatusCode::NOT_FOUND); - - // Should reply with gzipped file if client supports it - let request = Request::builder() - .uri("/only_gzipped.txt") - .header("Accept-Encoding", "gzip") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(request).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - assert_eq!(res.headers()["content-encoding"], "gzip"); - - let body = res.into_body().data().await.unwrap().unwrap(); - let mut decoder = GzDecoder::new(&body[..]); - let mut decompressed = String::new(); - decoder.read_to_string(&mut decompressed).unwrap(); - assert!(decompressed.starts_with("\"This is a test file\"")); - } - - #[tokio::test] - async fn missing_precompressed_variant_fallbacks_to_uncompressed() { - let svc = ServeDir::new("../test-files").precompressed_gzip(); - - let request = Request::builder() - .uri("/missing_precompressed.txt") - .header("Accept-Encoding", "gzip") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(request).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - // Uncompressed file is served because compressed version is missing - assert!(res.headers().get("content-encoding").is_none()); - - let body = res.into_body().data().await.unwrap().unwrap(); - let body = String::from_utf8(body.to_vec()).unwrap(); - assert!(body.starts_with("Test file!")); - } - - #[tokio::test] - async fn missing_precompressed_variant_fallbacks_to_uncompressed_for_head_request() { - let svc = ServeDir::new("../test-files").precompressed_gzip(); - - let request = Request::builder() - .uri("/missing_precompressed.txt") - .header("Accept-Encoding", "gzip") - .method(Method::HEAD) - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(request).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - assert_eq!(res.headers()["content-length"], "11"); - // Uncompressed file is served because compressed version is missing - assert!(res.headers().get("content-encoding").is_none()); - - assert!(res.into_body().data().await.is_none()); - } - - #[tokio::test] - async fn access_to_sub_dirs() { - let svc = ServeDir::new(".."); - - let req = Request::builder() - .uri("/tower-http/Cargo.toml") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::OK); - assert_eq!(res.headers()["content-type"], "text/x-toml"); - - let body = body_into_text(res.into_body()).await; - - let contents = std::fs::read_to_string("Cargo.toml").unwrap(); - assert_eq!(body, contents); - } - - #[tokio::test] - async fn not_found() { - let svc = ServeDir::new(".."); - - let req = Request::builder() - .uri("/not-found") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::NOT_FOUND); - assert!(res.headers().get(header::CONTENT_TYPE).is_none()); - - let body = body_into_text(res.into_body()).await; - assert!(body.is_empty()); - } - - #[tokio::test] - async fn not_found_precompressed() { - let svc = ServeDir::new("../test-files").precompressed_gzip(); - - let req = Request::builder() - .uri("/not-found") - .header("Accept-Encoding", "gzip") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::NOT_FOUND); - assert!(res.headers().get(header::CONTENT_TYPE).is_none()); - - let body = body_into_text(res.into_body()).await; - assert!(body.is_empty()); - } - - #[tokio::test] - async fn fallbacks_to_different_precompressed_variant_if_not_found_for_head_request() { - let svc = ServeDir::new("../test-files") - .precompressed_gzip() - .precompressed_br(); - - let req = Request::builder() - .uri("/precompressed_br.txt") - .header("Accept-Encoding", "gzip,br,deflate") - .method(Method::HEAD) - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - assert_eq!(res.headers()["content-encoding"], "br"); - assert_eq!(res.headers()["content-length"], "15"); - - assert!(res.into_body().data().await.is_none()); - } - - #[tokio::test] - async fn fallbacks_to_different_precompressed_variant_if_not_found() { - let svc = ServeDir::new("../test-files") - .precompressed_gzip() - .precompressed_br(); - - let req = Request::builder() - .uri("/precompressed_br.txt") - .header("Accept-Encoding", "gzip,br,deflate") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.headers()["content-type"], "text/plain"); - assert_eq!(res.headers()["content-encoding"], "br"); - - let body = res.into_body().data().await.unwrap().unwrap(); - let mut decompressed = Vec::new(); - BrotliDecompress(&mut &body[..], &mut decompressed).unwrap(); - let decompressed = String::from_utf8(decompressed.to_vec()).unwrap(); - assert!(decompressed.starts_with("Test file")); - } - - #[tokio::test] - async fn redirect_to_trailing_slash_on_dir() { - let svc = ServeDir::new("."); - - let req = Request::builder().uri("/src").body(Body::empty()).unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::TEMPORARY_REDIRECT); - - let location = &res.headers()[http::header::LOCATION]; - assert_eq!(location, "/src/"); - } - - #[tokio::test] - async fn empty_directory_without_index() { - let svc = ServeDir::new(".").append_index_html_on_directories(false); - - let req = Request::new(Body::empty()); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::NOT_FOUND); - assert!(res.headers().get(header::CONTENT_TYPE).is_none()); - - let body = body_into_text(res.into_body()).await; - assert!(body.is_empty()); - } - - async fn body_into_text(body: B) -> String - where - B: HttpBody + Unpin, - B::Error: std::fmt::Debug, - { - let bytes = hyper::body::to_bytes(body).await.unwrap(); - String::from_utf8(bytes.to_vec()).unwrap() - } - - #[tokio::test] - async fn access_cjk_percent_encoded_uri_path() { - // percent encoding present of 你好世界.txt - let cjk_filename_encoded = "%E4%BD%A0%E5%A5%BD%E4%B8%96%E7%95%8C.txt"; - - let svc = ServeDir::new("../test-files"); - - let req = Request::builder() - .uri(format!("/{}", cjk_filename_encoded)) - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::OK); - assert_eq!(res.headers()["content-type"], "text/plain"); - } - - #[tokio::test] - async fn access_space_percent_encoded_uri_path() { - let encoded_filename = "filename%20with%20space.txt"; - - let svc = ServeDir::new("../test-files"); - - let req = Request::builder() - .uri(format!("/{}", encoded_filename)) - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::OK); - assert_eq!(res.headers()["content-type"], "text/plain"); - } - - #[tokio::test] - async fn read_partial_in_bounds() { - let svc = ServeDir::new(".."); - let bytes_start_incl = 9; - let bytes_end_incl = 1023; - - let req = Request::builder() - .uri("/README.md") - .header( - "Range", - format!("bytes={}-{}", bytes_start_incl, bytes_end_incl), - ) - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - let file_contents = std::fs::read("../README.md").unwrap(); - assert_eq!(res.status(), StatusCode::PARTIAL_CONTENT); - assert_eq!( - res.headers()["content-length"], - (bytes_end_incl - bytes_start_incl + 1).to_string() - ); - assert!(res.headers()["content-range"] - .to_str() - .unwrap() - .starts_with(&format!( - "bytes {}-{}/{}", - bytes_start_incl, - bytes_end_incl, - file_contents.len() - ))); - assert_eq!(res.headers()["content-type"], "text/markdown"); - - let body = hyper::body::to_bytes(res.into_body()).await.ok().unwrap(); - let source = Bytes::from(file_contents[bytes_start_incl..=bytes_end_incl].to_vec()); - assert_eq!(body, source); - } - - #[tokio::test] - async fn read_partial_rejects_out_of_bounds_range() { - let svc = ServeDir::new(".."); - let bytes_start_incl = 0; - let bytes_end_excl = 9999999; - let requested_len = bytes_end_excl - bytes_start_incl; - - let req = Request::builder() - .uri("/README.md") - .header( - "Range", - format!("bytes={}-{}", bytes_start_incl, requested_len - 1), - ) - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::RANGE_NOT_SATISFIABLE); - let file_contents = std::fs::read("../README.md").unwrap(); - assert_eq!( - res.headers()["content-range"], - &format!("bytes */{}", file_contents.len()) - ) - } - - #[tokio::test] - async fn read_partial_errs_on_garbage_header() { - let svc = ServeDir::new(".."); - let req = Request::builder() - .uri("/README.md") - .header("Range", "bad_format") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - assert_eq!(res.status(), StatusCode::RANGE_NOT_SATISFIABLE); - let file_contents = std::fs::read("../README.md").unwrap(); - assert_eq!( - res.headers()["content-range"], - &format!("bytes */{}", file_contents.len()) - ) - } - - #[tokio::test] - async fn read_partial_errs_on_bad_range() { - let svc = ServeDir::new(".."); - let req = Request::builder() - .uri("/README.md") - .header("Range", "bytes=-1-15") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - assert_eq!(res.status(), StatusCode::RANGE_NOT_SATISFIABLE); - let file_contents = std::fs::read("../README.md").unwrap(); - assert_eq!( - res.headers()["content-range"], - &format!("bytes */{}", file_contents.len()) - ) - } - #[tokio::test] - async fn last_modified() { - let svc = ServeDir::new(".."); - let req = Request::builder() - .uri("/README.md") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - assert_eq!(res.status(), StatusCode::OK); - - let last_modified = res - .headers() - .get(header::LAST_MODIFIED) - .expect("Missing last modified header!"); - - // -- If-Modified-Since - - let svc = ServeDir::new(".."); - let req = Request::builder() - .uri("/README.md") - .header(header::IF_MODIFIED_SINCE, last_modified) - .body(Body::empty()) - .unwrap(); - - let res = svc.oneshot(req).await.unwrap(); - assert_eq!(res.status(), StatusCode::NOT_MODIFIED); - let body = res.into_body().data().await; - assert!(body.is_none()); - - let svc = ServeDir::new(".."); - let req = Request::builder() - .uri("/README.md") - .header(header::IF_MODIFIED_SINCE, "Fri, 09 Aug 1996 14:21:40 GMT") - .body(Body::empty()) - .unwrap(); - - let res = svc.oneshot(req).await.unwrap(); - assert_eq!(res.status(), StatusCode::OK); - let readme_bytes = include_bytes!("../../../../README.md"); - let body = res.into_body().data().await.unwrap().unwrap(); - assert_eq!(body.as_ref(), readme_bytes); - - // -- If-Unmodified-Since - - let svc = ServeDir::new(".."); - let req = Request::builder() - .uri("/README.md") - .header(header::IF_UNMODIFIED_SINCE, last_modified) - .body(Body::empty()) - .unwrap(); - - let res = svc.oneshot(req).await.unwrap(); - assert_eq!(res.status(), StatusCode::OK); - let body = res.into_body().data().await.unwrap().unwrap(); - assert_eq!(body.as_ref(), readme_bytes); - - let svc = ServeDir::new(".."); - let req = Request::builder() - .uri("/README.md") - .header(header::IF_UNMODIFIED_SINCE, "Fri, 09 Aug 1996 14:21:40 GMT") - .body(Body::empty()) - .unwrap(); - - let res = svc.oneshot(req).await.unwrap(); - assert_eq!(res.status(), StatusCode::PRECONDITION_FAILED); - let body = res.into_body().data().await; - assert!(body.is_none()); - } - - #[tokio::test] - async fn with_fallback_svc() { - async fn fallback(_: Request) -> io::Result> { - Ok(Response::new(Body::from("from fallback"))) - } - - let svc = ServeDir::new("..").fallback(tower::service_fn(fallback)); - - let req = Request::builder() - .uri("/doesnt-exist") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::OK); - - let body = body_into_text(res.into_body()).await; - assert_eq!(body, "from fallback"); - } - - #[tokio::test] - async fn with_fallback_serve_file() { - let svc = ServeDir::new("..").fallback(ServeFile::new("../README.md")); - - let req = Request::builder() - .uri("/doesnt-exist") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::OK); - assert_eq!(res.headers()["content-type"], "text/markdown"); - - let body = body_into_text(res.into_body()).await; - - let contents = std::fs::read_to_string("../README.md").unwrap(); - assert_eq!(body, contents); - } - - #[tokio::test] - async fn method_not_allowed() { - let svc = ServeDir::new(".."); - - let req = Request::builder() - .method(Method::POST) - .uri("/README.md") - .body(Body::empty()) - .unwrap(); - let res = svc.oneshot(req).await.unwrap(); - - assert_eq!(res.status(), StatusCode::METHOD_NOT_ALLOWED); - } -} diff --git a/tower-http/src/services/fs/serve_dir/future.rs b/tower-http/src/services/fs/serve_dir/future.rs new file mode 100644 index 00000000..024f2e60 --- /dev/null +++ b/tower-http/src/services/fs/serve_dir/future.rs @@ -0,0 +1,283 @@ +use super::{ + open_file::{FileOpened, FileRequestExtent, OpenFileOutput}, + DefaultServeDirFallback, ResponseBody, +}; +use crate::{services::fs::AsyncReadBody, BoxError}; +use bytes::Bytes; +use futures_util::{ + future::{BoxFuture, FutureExt, TryFutureExt}, + ready, +}; +use http::{header, Request, Response, StatusCode}; +use http_body::{Body, Empty, Full}; +use pin_project_lite::pin_project; +use std::{ + future::Future, + io, + pin::Pin, + task::{Context, Poll}, +}; +use tower_service::Service; + +pin_project! { + /// Response future of [`ServeDir`]. + pub struct ResponseFuture { + #[pin] + inner: ResponseFutureInner, + } +} + +impl ResponseFuture { + pub(super) fn open_file_future( + future: BoxFuture<'static, io::Result>, + fallback_and_request: Option<(F, Request)>, + ) -> Self { + Self { + inner: ResponseFutureInner::OpenFileFuture { + future, + fallback_and_request, + }, + } + } + + pub(super) fn invalid_path() -> Self { + Self { + inner: ResponseFutureInner::InvalidPath, + } + } + + pub(super) fn method_not_allowed() -> Self { + Self { + inner: ResponseFutureInner::MethodNotAllowed, + } + } +} + +pin_project! { + #[project = ResponseFutureInnerProj] + enum ResponseFutureInner { + OpenFileFuture { + #[pin] + future: BoxFuture<'static, io::Result>, + fallback_and_request: Option<(F, Request)>, + }, + FallbackFuture { + future: BoxFuture<'static, io::Result>>, + }, + InvalidPath, + MethodNotAllowed, + } +} + +impl Future for ResponseFuture +where + F: Service, Response = Response> + Clone, + F::Error: Into, + F::Future: Send + 'static, + ResBody: http_body::Body + Send + 'static, + ResBody::Error: Into>, +{ + type Output = io::Result>; + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + loop { + let mut this = self.as_mut().project(); + + let new_state = match this.inner.as_mut().project() { + ResponseFutureInnerProj::OpenFileFuture { + future: open_file_future, + fallback_and_request, + } => match ready!(open_file_future.poll(cx)) { + Ok(OpenFileOutput::FileOpened(file_output)) => { + break Poll::Ready(Ok(build_response(*file_output))); + } + + Ok(OpenFileOutput::Redirect { location }) => { + let mut res = response_with_status(StatusCode::TEMPORARY_REDIRECT); + res.headers_mut().insert(http::header::LOCATION, location); + break Poll::Ready(Ok(res)); + } + + Ok(OpenFileOutput::NotFound) => { + break Poll::Ready(Ok(not_found())); + } + + Ok(OpenFileOutput::PreconditionFailed) => { + break Poll::Ready(Ok(response_with_status( + StatusCode::PRECONDITION_FAILED, + ))); + } + + Ok(OpenFileOutput::NotModified) => { + break Poll::Ready(Ok(response_with_status(StatusCode::NOT_MODIFIED))); + } + + Err(err) => { + if let io::ErrorKind::NotFound | io::ErrorKind::PermissionDenied = + err.kind() + { + if let Some((mut fallback, request)) = fallback_and_request.take() { + call_fallback(&mut fallback, request) + } else { + break Poll::Ready(Ok(not_found())); + } + } else { + break Poll::Ready(Err(err)); + } + } + }, + + ResponseFutureInnerProj::FallbackFuture { future } => { + break Pin::new(future).poll(cx) + } + + ResponseFutureInnerProj::InvalidPath => { + break Poll::Ready(Ok(not_found())); + } + + ResponseFutureInnerProj::MethodNotAllowed => { + break Poll::Ready(Ok(response_with_status(StatusCode::METHOD_NOT_ALLOWED))); + } + }; + + this.inner.set(new_state); + } + } +} + +fn response_with_status(status: StatusCode) -> Response { + Response::builder() + .status(status) + .body(empty_body()) + .unwrap() +} + +fn not_found() -> Response { + response_with_status(StatusCode::NOT_FOUND) +} + +fn call_fallback(fallback: &mut F, req: Request) -> ResponseFutureInner +where + F: Service, Response = Response> + Clone, + F::Error: Into, + F::Future: Send + 'static, + FResBody: http_body::Body + Send + 'static, + FResBody::Error: Into, +{ + let future = fallback + .call(req) + .err_into() + .map_ok(|response| { + response + .map(|body| { + body.map_err(|err| match err.into().downcast::() { + Ok(err) => *err, + Err(err) => io::Error::new(io::ErrorKind::Other, err), + }) + .boxed_unsync() + }) + .map(ResponseBody::new) + }) + .boxed(); + + ResponseFutureInner::FallbackFuture { future } +} + +fn build_response(output: FileOpened) -> Response { + let (maybe_file, size) = match output.extent { + FileRequestExtent::Full(file, meta) => (Some(file), meta.len()), + FileRequestExtent::Head(meta) => (None, meta.len()), + }; + + let mut builder = Response::builder() + .header(header::CONTENT_TYPE, output.mime_header_value) + .header(header::ACCEPT_RANGES, "bytes"); + + if let Some(encoding) = output.maybe_encoding { + builder = builder.header(header::CONTENT_ENCODING, encoding.into_header_value()); + } + + if let Some(last_modified) = output.last_modified { + builder = builder.header(header::LAST_MODIFIED, last_modified.0.to_string()); + } + + match output.maybe_range { + Some(Ok(ranges)) => { + if let Some(range) = ranges.first() { + if ranges.len() > 1 { + builder + .header(header::CONTENT_RANGE, format!("bytes */{}", size)) + .status(StatusCode::RANGE_NOT_SATISFIABLE) + .body(body_from_bytes(Bytes::from( + "Cannot serve multipart range requests", + ))) + .unwrap() + } else { + let body = if let Some(file) = maybe_file { + let range_size = range.end() - range.start() + 1; + ResponseBody::new( + AsyncReadBody::with_capacity_limited( + file, + output.chunk_size, + range_size, + ) + .boxed_unsync(), + ) + } else { + empty_body() + }; + + builder + .header( + header::CONTENT_RANGE, + format!("bytes {}-{}/{}", range.start(), range.end(), size), + ) + .header(header::CONTENT_LENGTH, range.end() - range.start() + 1) + .status(StatusCode::PARTIAL_CONTENT) + .body(body) + .unwrap() + } + } else { + builder + .header(header::CONTENT_RANGE, format!("bytes */{}", size)) + .status(StatusCode::RANGE_NOT_SATISFIABLE) + .body(body_from_bytes(Bytes::from( + "No range found after parsing range header, please file an issue", + ))) + .unwrap() + } + } + + Some(Err(_)) => builder + .header(header::CONTENT_RANGE, format!("bytes */{}", size)) + .status(StatusCode::RANGE_NOT_SATISFIABLE) + .body(empty_body()) + .unwrap(), + + // Not a range request + None => { + let body = if let Some(file) = maybe_file { + ResponseBody::new( + AsyncReadBody::with_capacity(file, output.chunk_size).boxed_unsync(), + ) + } else { + empty_body() + }; + + builder + .header(header::CONTENT_LENGTH, size.to_string()) + .body(body) + .unwrap() + } + } +} + +fn body_from_bytes(bytes: Bytes) -> ResponseBody { + let body = Full::from(bytes).map_err(|err| match err {}).boxed_unsync(); + ResponseBody::new(body) +} + +fn empty_body() -> ResponseBody { + let body = Empty::new().map_err(|err| match err {}).boxed_unsync(); + ResponseBody::new(body) +} diff --git a/tower-http/src/services/fs/serve_dir/headers.rs b/tower-http/src/services/fs/serve_dir/headers.rs new file mode 100644 index 00000000..e9e80907 --- /dev/null +++ b/tower-http/src/services/fs/serve_dir/headers.rs @@ -0,0 +1,45 @@ +use http::header::HeaderValue; +use httpdate::HttpDate; +use std::time::SystemTime; + +pub(super) struct LastModified(pub(super) HttpDate); + +impl From for LastModified { + fn from(time: SystemTime) -> Self { + LastModified(time.into()) + } +} + +pub(super) struct IfModifiedSince(HttpDate); + +impl IfModifiedSince { + /// Check if the supplied time means the resource has been modified. + pub(super) fn is_modified(&self, last_modified: &LastModified) -> bool { + self.0 < last_modified.0 + } + + /// convert a header value into a IfModifiedSince, invalid values are silentely ignored + pub(super) fn from_header_value(value: &HeaderValue) -> Option { + std::str::from_utf8(value.as_bytes()) + .ok() + .and_then(|value| httpdate::parse_http_date(value).ok()) + .map(|time| IfModifiedSince(time.into())) + } +} + +pub(super) struct IfUnmodifiedSince(HttpDate); + +impl IfUnmodifiedSince { + /// Check if the supplied time passes the precondtion. + pub(super) fn precondition_passes(&self, last_modified: &LastModified) -> bool { + self.0 >= last_modified.0 + } + + /// Convert a header value into a IfModifiedSince, invalid values are silentely ignored + pub(super) fn from_header_value(value: &HeaderValue) -> Option { + std::str::from_utf8(value.as_bytes()) + .ok() + .and_then(|value| httpdate::parse_http_date(value).ok()) + .map(|time| IfUnmodifiedSince(time.into())) + } +} diff --git a/tower-http/src/services/fs/serve_dir/mod.rs b/tower-http/src/services/fs/serve_dir/mod.rs new file mode 100644 index 00000000..ef570299 --- /dev/null +++ b/tower-http/src/services/fs/serve_dir/mod.rs @@ -0,0 +1,428 @@ +use self::future::ResponseFuture; +use crate::{ + content_encoding::{encodings, SupportedEncodings}, + set_status::SetStatus, +}; +use bytes::Bytes; +use http::{header, HeaderValue, Method, Request, Response, StatusCode}; +use http_body::{combinators::UnsyncBoxBody, Empty}; +use percent_encoding::percent_decode; +use std::{ + convert::Infallible, + io, + path::{Component, Path, PathBuf}, + task::{Context, Poll}, +}; +use tower_service::Service; + +pub(crate) mod future; +mod headers; +mod open_file; + +#[cfg(test)] +mod tests; + +// default capacity 64KiB +const DEFAULT_CAPACITY: usize = 65536; + +/// Service that serves files from a given directory and all its sub directories. +/// +/// The `Content-Type` will be guessed from the file extension. +/// +/// An empty response with status `404 Not Found` will be returned if: +/// +/// - The file doesn't exist +/// - Any segment of the path contains `..` +/// - Any segment of the path contains a backslash +/// - We don't have necessary permissions to read the file +/// +/// # Example +/// +/// ``` +/// use tower_http::services::ServeDir; +/// +/// // This will serve files in the "assets" directory and +/// // its subdirectories +/// let service = ServeDir::new("assets"); +/// +/// # async { +/// // Run our service using `hyper` +/// let addr = std::net::SocketAddr::from(([127, 0, 0, 1], 3000)); +/// hyper::Server::bind(&addr) +/// .serve(tower::make::Shared::new(service)) +/// .await +/// .expect("server error"); +/// # }; +/// ``` +#[derive(Clone, Debug)] +pub struct ServeDir { + base: PathBuf, + buf_chunk_size: usize, + precompressed_variants: Option, + // This is used to specialise implementation for + // single files + variant: ServeVariant, + fallback: Option, +} + +impl ServeDir { + /// Create a new [`ServeDir`]. + pub fn new

(path: P) -> Self + where + P: AsRef, + { + let mut base = PathBuf::from("."); + base.push(path.as_ref()); + + Self { + base, + buf_chunk_size: DEFAULT_CAPACITY, + precompressed_variants: None, + variant: ServeVariant::Directory { + append_index_html_on_directories: true, + }, + fallback: None, + } + } + + pub(crate) fn new_single_file

(path: P, mime: HeaderValue) -> Self + where + P: AsRef, + { + Self { + base: path.as_ref().to_owned(), + buf_chunk_size: DEFAULT_CAPACITY, + precompressed_variants: None, + variant: ServeVariant::SingleFile { mime }, + fallback: None, + } + } +} + +impl ServeDir { + /// If the requested path is a directory append `index.html`. + /// + /// This is useful for static sites. + /// + /// Defaults to `true`. + pub fn append_index_html_on_directories(mut self, append: bool) -> Self { + match &mut self.variant { + ServeVariant::Directory { + append_index_html_on_directories, + } => { + *append_index_html_on_directories = append; + self + } + ServeVariant::SingleFile { mime: _ } => self, + } + } + + /// Set a specific read buffer chunk size. + /// + /// The default capacity is 64kb. + pub fn with_buf_chunk_size(mut self, chunk_size: usize) -> Self { + self.buf_chunk_size = chunk_size; + self + } + + /// Informs the service that it should also look for a precompressed gzip + /// version of _any_ file in the directory. + /// + /// Assuming the `dir` directory is being served and `dir/foo.txt` is requested, + /// a client with an `Accept-Encoding` header that allows the gzip encoding + /// will receive the file `dir/foo.txt.gz` instead of `dir/foo.txt`. + /// If the precompressed file is not available, or the client doesn't support it, + /// the uncompressed version will be served instead. + /// Both the precompressed version and the uncompressed version are expected + /// to be present in the directory. Different precompressed variants can be combined. + pub fn precompressed_gzip(mut self) -> Self { + self.precompressed_variants + .get_or_insert(Default::default()) + .gzip = true; + self + } + + /// Informs the service that it should also look for a precompressed brotli + /// version of _any_ file in the directory. + /// + /// Assuming the `dir` directory is being served and `dir/foo.txt` is requested, + /// a client with an `Accept-Encoding` header that allows the brotli encoding + /// will receive the file `dir/foo.txt.br` instead of `dir/foo.txt`. + /// If the precompressed file is not available, or the client doesn't support it, + /// the uncompressed version will be served instead. + /// Both the precompressed version and the uncompressed version are expected + /// to be present in the directory. Different precompressed variants can be combined. + pub fn precompressed_br(mut self) -> Self { + self.precompressed_variants + .get_or_insert(Default::default()) + .br = true; + self + } + + /// Informs the service that it should also look for a precompressed deflate + /// version of _any_ file in the directory. + /// + /// Assuming the `dir` directory is being served and `dir/foo.txt` is requested, + /// a client with an `Accept-Encoding` header that allows the deflate encoding + /// will receive the file `dir/foo.txt.zz` instead of `dir/foo.txt`. + /// If the precompressed file is not available, or the client doesn't support it, + /// the uncompressed version will be served instead. + /// Both the precompressed version and the uncompressed version are expected + /// to be present in the directory. Different precompressed variants can be combined. + pub fn precompressed_deflate(mut self) -> Self { + self.precompressed_variants + .get_or_insert(Default::default()) + .deflate = true; + self + } + + /// Set the fallback service. + /// + /// This service will be called if there is no file at the path of the request. + /// + /// The status code returned by the fallback will not be altered. Use + /// [`ServeDir::not_found_service`] to set a fallback and always respond with `404 Not Found`. + /// + /// # Example + /// + /// This can be used to respond with a different file: + /// + /// ```rust + /// use tower_http::services::{ServeDir, ServeFile}; + /// + /// let service = ServeDir::new("assets") + /// // respond with `not_found.html` for missing files + /// .fallback(ServeFile::new("assets/not_found.html")); + /// + /// # async { + /// // Run our service using `hyper` + /// let addr = std::net::SocketAddr::from(([127, 0, 0, 1], 3000)); + /// hyper::Server::bind(&addr) + /// .serve(tower::make::Shared::new(service)) + /// .await + /// .expect("server error"); + /// # }; + /// ``` + pub fn fallback(self, new_fallback: F2) -> ServeDir { + ServeDir { + base: self.base, + buf_chunk_size: self.buf_chunk_size, + precompressed_variants: self.precompressed_variants, + variant: self.variant, + fallback: Some(new_fallback), + } + } + + /// Set the fallback service and override the fallback's status code to `404 Not Found`. + /// + /// This service will be called if there is no file at the path of the request. + /// + /// # Example + /// + /// This can be used to respond with a different file: + /// + /// ```rust + /// use tower_http::services::{ServeDir, ServeFile}; + /// + /// let service = ServeDir::new("assets") + /// // respond with `404 Not Found` and the contents of `not_found.html` for missing files + /// .not_found_service(ServeFile::new("assets/not_found.html")); + /// + /// # async { + /// // Run our service using `hyper` + /// let addr = std::net::SocketAddr::from(([127, 0, 0, 1], 3000)); + /// hyper::Server::bind(&addr) + /// .serve(tower::make::Shared::new(service)) + /// .await + /// .expect("server error"); + /// # }; + /// ``` + /// + /// Setups like this are often found in single page applications. + pub fn not_found_service(self, new_fallback: F2) -> ServeDir> { + self.fallback(SetStatus::new(new_fallback, StatusCode::NOT_FOUND)) + } +} + +impl Service> for ServeDir +where + F: Service, Response = Response> + Clone, + F::Error: Into, + F::Future: Send + 'static, + FResBody: http_body::Body + Send + 'static, + FResBody::Error: Into>, +{ + type Response = Response; + type Error = io::Error; + type Future = ResponseFuture; + + #[inline] + fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll> { + if let Some(fallback) = &mut self.fallback { + fallback.poll_ready(cx).map_err(Into::into) + } else { + Poll::Ready(Ok(())) + } + } + + fn call(&mut self, req: Request) -> Self::Future { + if req.method() != Method::GET && req.method() != Method::HEAD { + return ResponseFuture::method_not_allowed(); + } + + // `ServeDir` doesn't care about the request body but the fallback might. So move out the + // body and pass it to the fallback, leaving an empty body in its place + // + // this is necessary because we cannot clone bodies + let (mut parts, body) = req.into_parts(); + // same goes for extensions + let extensions = std::mem::take(&mut parts.extensions); + let req = Request::from_parts(parts, Empty::::new()); + + let path_to_file = match self + .variant + .build_and_validate_path(&self.base, req.uri().path()) + { + Some(path_to_file) => path_to_file, + None => { + return ResponseFuture::invalid_path(); + } + }; + + let fallback_and_request = self.fallback.as_mut().map(|fallback| { + let mut req = Request::new(body); + *req.method_mut() = req.method().clone(); + *req.uri_mut() = req.uri().clone(); + *req.headers_mut() = req.headers().clone(); + *req.extensions_mut() = extensions; + + // get the ready fallback and leave a non-ready clone in its place + let clone = fallback.clone(); + let fallback = std::mem::replace(fallback, clone); + + (fallback, req) + }); + + let buf_chunk_size = self.buf_chunk_size; + let range_header = req + .headers() + .get(header::RANGE) + .and_then(|value| value.to_str().ok()) + .map(|s| s.to_owned()); + + let negotiated_encodings = encodings( + req.headers(), + self.precompressed_variants.unwrap_or_default(), + ); + + let variant = self.variant.clone(); + + let open_file_future = Box::pin(open_file::open_file( + variant, + path_to_file, + req, + negotiated_encodings, + range_header, + buf_chunk_size, + )); + + ResponseFuture::open_file_future(open_file_future, fallback_and_request) + } +} + +// Allow the ServeDir service to be used in the ServeFile service +// with almost no overhead +#[derive(Clone, Debug)] +enum ServeVariant { + Directory { + append_index_html_on_directories: bool, + }, + SingleFile { + mime: HeaderValue, + }, +} + +impl ServeVariant { + fn build_and_validate_path(&self, base_path: &Path, requested_path: &str) -> Option { + match self { + ServeVariant::Directory { + append_index_html_on_directories: _, + } => { + let path = requested_path.trim_start_matches('/'); + + let path_decoded = percent_decode(path.as_ref()).decode_utf8().ok()?; + let path_decoded = Path::new(&*path_decoded); + + let mut path_to_file = base_path.to_path_buf(); + for component in path_decoded.components() { + match component { + Component::Normal(comp) => { + // protect against paths like `/foo/c:/bar/baz` (#204) + if Path::new(&comp) + .components() + .all(|c| matches!(c, Component::Normal(_))) + { + path_to_file.push(comp) + } else { + return None; + } + } + Component::CurDir => {} + Component::Prefix(_) | Component::RootDir | Component::ParentDir => { + return None; + } + } + } + Some(path_to_file) + } + ServeVariant::SingleFile { mime: _ } => Some(base_path.to_path_buf()), + } + } +} + +opaque_body! { + /// Response body for [`ServeDir`] and [`ServeFile`]. + pub type ResponseBody = UnsyncBoxBody; +} + +/// The default fallback service used with [`ServeDir`]. +#[derive(Debug, Clone, Copy)] +pub struct DefaultServeDirFallback(Infallible); + +impl Service> for DefaultServeDirFallback +where + ReqBody: Send + 'static, +{ + type Response = Response; + type Error = io::Error; + type Future = ResponseFuture; + + fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { + match self.0 {} + } + + fn call(&mut self, _req: Request) -> Self::Future { + match self.0 {} + } +} + +#[derive(Clone, Copy, Debug, Default)] +struct PrecompressedVariants { + gzip: bool, + deflate: bool, + br: bool, +} + +impl SupportedEncodings for PrecompressedVariants { + fn gzip(&self) -> bool { + self.gzip + } + + fn deflate(&self) -> bool { + self.deflate + } + + fn br(&self) -> bool { + self.br + } +} diff --git a/tower-http/src/services/fs/serve_dir/open_file.rs b/tower-http/src/services/fs/serve_dir/open_file.rs new file mode 100644 index 00000000..6e375cc6 --- /dev/null +++ b/tower-http/src/services/fs/serve_dir/open_file.rs @@ -0,0 +1,323 @@ +use super::{ + headers::{IfModifiedSince, IfUnmodifiedSince, LastModified}, + ServeVariant, +}; +use crate::content_encoding::{Encoding, QValue}; +use bytes::Bytes; +use http::{header, HeaderValue, Method, Request, Uri}; +use http_body::Empty; +use http_range_header::RangeUnsatisfiableError; +use std::{ + ffi::OsStr, + fs::Metadata, + io::{self, SeekFrom}, + ops::RangeInclusive, + path::{Path, PathBuf}, +}; +use tokio::{fs::File, io::AsyncSeekExt}; + +pub(super) enum OpenFileOutput { + FileOpened(Box), + Redirect { location: HeaderValue }, + NotFound, + PreconditionFailed, + NotModified, +} + +pub(super) struct FileOpened { + pub(super) extent: FileRequestExtent, + pub(super) chunk_size: usize, + pub(super) mime_header_value: HeaderValue, + pub(super) maybe_encoding: Option, + pub(super) maybe_range: Option>, RangeUnsatisfiableError>>, + pub(super) last_modified: Option, +} + +pub(super) enum FileRequestExtent { + Full(File, Metadata), + Head(Metadata), +} + +pub(super) async fn open_file( + variant: ServeVariant, + mut path_to_file: PathBuf, + req: Request>, + negotiated_encodings: Vec<(Encoding, QValue)>, + range_header: Option, + buf_chunk_size: usize, +) -> io::Result { + let if_unmodified_since = req + .headers() + .get(header::IF_UNMODIFIED_SINCE) + .and_then(IfUnmodifiedSince::from_header_value); + + let if_modified_since = req + .headers() + .get(header::IF_MODIFIED_SINCE) + .and_then(IfModifiedSince::from_header_value); + + let mime = match variant { + ServeVariant::Directory { + append_index_html_on_directories, + } => { + // Might already at this point know a redirect or not found result should be + // returned which corresponds to a Some(output). Otherwise the path might be + // modified and proceed to the open file/metadata future. + if let Some(output) = maybe_redirect_or_append_path( + &mut path_to_file, + req.uri(), + append_index_html_on_directories, + ) + .await + { + return Ok(output); + } + + mime_guess::from_path(&path_to_file) + .first_raw() + .map(HeaderValue::from_static) + .unwrap_or_else(|| { + HeaderValue::from_str(mime::APPLICATION_OCTET_STREAM.as_ref()).unwrap() + }) + } + + ServeVariant::SingleFile { mime } => mime, + }; + + if req.method() == Method::HEAD { + let (meta, maybe_encoding) = + file_metadata_with_fallback(path_to_file, negotiated_encodings).await?; + + let last_modified = meta.modified().ok().map(LastModified::from); + if let Some(output) = check_modified_headers( + last_modified.as_ref(), + if_unmodified_since, + if_modified_since, + ) { + return Ok(output); + } + + let maybe_range = try_parse_range(range_header.as_deref(), meta.len()); + + Ok(OpenFileOutput::FileOpened(Box::new(FileOpened { + extent: FileRequestExtent::Head(meta), + chunk_size: buf_chunk_size, + mime_header_value: mime, + maybe_encoding, + maybe_range, + last_modified, + }))) + } else { + let (mut file, maybe_encoding) = + open_file_with_fallback(path_to_file, negotiated_encodings).await?; + let meta = file.metadata().await?; + let last_modified = meta.modified().ok().map(LastModified::from); + if let Some(output) = check_modified_headers( + last_modified.as_ref(), + if_unmodified_since, + if_modified_since, + ) { + return Ok(output); + } + + let maybe_range = try_parse_range(range_header.as_deref(), meta.len()); + if let Some(Ok(ranges)) = maybe_range.as_ref() { + // if there is any other amount of ranges than 1 we'll return an + // unsatisfiable later as there isn't yet support for multipart ranges + if ranges.len() == 1 { + file.seek(SeekFrom::Start(*ranges[0].start())).await?; + } + } + + Ok(OpenFileOutput::FileOpened(Box::new(FileOpened { + extent: FileRequestExtent::Full(file, meta), + chunk_size: buf_chunk_size, + mime_header_value: mime, + maybe_encoding, + maybe_range, + last_modified, + }))) + } +} + +fn check_modified_headers( + modified: Option<&LastModified>, + if_unmodified_since: Option, + if_modified_since: Option, +) -> Option { + if let Some(since) = if_unmodified_since { + let precondition = modified + .as_ref() + .map(|time| since.precondition_passes(time)) + .unwrap_or(false); + + if !precondition { + return Some(OpenFileOutput::PreconditionFailed); + } + } + + if let Some(since) = if_modified_since { + let unmodified = modified + .as_ref() + .map(|time| !since.is_modified(time)) + // no last_modified means its always modified + .unwrap_or(false); + if unmodified { + return Some(OpenFileOutput::NotModified); + } + } + + None +} + +// Returns the preferred_encoding encoding and modifies the path extension +// to the corresponding file extension for the encoding. +fn preferred_encoding( + path: &mut PathBuf, + negotiated_encoding: &[(Encoding, QValue)], +) -> Option { + let preferred_encoding = Encoding::preferred_encoding(negotiated_encoding); + + if let Some(file_extension) = + preferred_encoding.and_then(|encoding| encoding.to_file_extension()) + { + let new_extension = path + .extension() + .map(|extension| { + let mut os_string = extension.to_os_string(); + os_string.push(file_extension); + os_string + }) + .unwrap_or_else(|| file_extension.to_os_string()); + + path.set_extension(new_extension); + } + + preferred_encoding +} + +// Attempts to open the file with any of the possible negotiated_encodings in the +// preferred order. If none of the negotiated_encodings have a corresponding precompressed +// file the uncompressed file is used as a fallback. +async fn open_file_with_fallback( + mut path: PathBuf, + mut negotiated_encoding: Vec<(Encoding, QValue)>, +) -> io::Result<(File, Option)> { + let (file, encoding) = loop { + // Get the preferred encoding among the negotiated ones. + let encoding = preferred_encoding(&mut path, &negotiated_encoding); + match (File::open(&path).await, encoding) { + (Ok(file), maybe_encoding) => break (file, maybe_encoding), + (Err(err), Some(encoding)) if err.kind() == io::ErrorKind::NotFound => { + // Remove the extension corresponding to a precompressed file (.gz, .br, .zz) + // to reset the path before the next iteration. + path.set_extension(OsStr::new("")); + // Remove the encoding from the negotiated_encodings since the file doesn't exist + negotiated_encoding + .retain(|(negotiated_encoding, _)| *negotiated_encoding != encoding); + continue; + } + (Err(err), _) => return Err(err), + }; + }; + Ok((file, encoding)) +} + +// Attempts to get the file metadata with any of the possible negotiated_encodings in the +// preferred order. If none of the negotiated_encodings have a corresponding precompressed +// file the uncompressed file is used as a fallback. +async fn file_metadata_with_fallback( + mut path: PathBuf, + mut negotiated_encoding: Vec<(Encoding, QValue)>, +) -> io::Result<(Metadata, Option)> { + let (file, encoding) = loop { + // Get the preferred encoding among the negotiated ones. + let encoding = preferred_encoding(&mut path, &negotiated_encoding); + match (tokio::fs::metadata(&path).await, encoding) { + (Ok(file), maybe_encoding) => break (file, maybe_encoding), + (Err(err), Some(encoding)) if err.kind() == io::ErrorKind::NotFound => { + // Remove the extension corresponding to a precompressed file (.gz, .br, .zz) + // to reset the path before the next iteration. + path.set_extension(OsStr::new("")); + // Remove the encoding from the negotiated_encodings since the file doesn't exist + negotiated_encoding + .retain(|(negotiated_encoding, _)| *negotiated_encoding != encoding); + continue; + } + (Err(err), _) => return Err(err), + }; + }; + Ok((file, encoding)) +} + +async fn maybe_redirect_or_append_path( + path_to_file: &mut PathBuf, + uri: &Uri, + append_index_html_on_directories: bool, +) -> Option { + if !uri.path().ends_with('/') { + if is_dir(path_to_file).await { + let location = + HeaderValue::from_str(&append_slash_on_path(uri.clone()).to_string()).unwrap(); + Some(OpenFileOutput::Redirect { location }) + } else { + None + } + } else if is_dir(path_to_file).await { + if append_index_html_on_directories { + path_to_file.push("index.html"); + None + } else { + Some(OpenFileOutput::NotFound) + } + } else { + None + } +} + +fn try_parse_range( + maybe_range_ref: Option<&str>, + file_size: u64, +) -> Option>, RangeUnsatisfiableError>> { + maybe_range_ref.map(|header_value| { + http_range_header::parse_range_header(header_value) + .and_then(|first_pass| first_pass.validate(file_size)) + }) +} + +async fn is_dir(path_to_file: &Path) -> bool { + tokio::fs::metadata(path_to_file) + .await + .map_or(false, |meta_data| meta_data.is_dir()) +} + +fn append_slash_on_path(uri: Uri) -> Uri { + let http::uri::Parts { + scheme, + authority, + path_and_query, + .. + } = uri.into_parts(); + + let mut uri_builder = Uri::builder(); + + if let Some(scheme) = scheme { + uri_builder = uri_builder.scheme(scheme); + } + + if let Some(authority) = authority { + uri_builder = uri_builder.authority(authority); + } + + let uri_builder = if let Some(path_and_query) = path_and_query { + if let Some(query) = path_and_query.query() { + uri_builder.path_and_query(format!("{}/?{}", path_and_query.path(), query)) + } else { + uri_builder.path_and_query(format!("{}/", path_and_query.path())) + } + } else { + uri_builder.path_and_query("/") + }; + + uri_builder.build().unwrap() +} diff --git a/tower-http/src/services/fs/serve_dir/tests.rs b/tower-http/src/services/fs/serve_dir/tests.rs new file mode 100644 index 00000000..3f30d628 --- /dev/null +++ b/tower-http/src/services/fs/serve_dir/tests.rs @@ -0,0 +1,637 @@ +use crate::services::{ServeDir, ServeFile}; +use brotli::BrotliDecompress; +use bytes::Bytes; +use flate2::bufread::{DeflateDecoder, GzDecoder}; +use http::{header, Method, Response}; +use http::{Request, StatusCode}; +use http_body::Body as HttpBody; +use hyper::Body; +use std::io::{self, Read}; +use tower::ServiceExt; + +#[tokio::test] +async fn basic() { + let svc = ServeDir::new(".."); + + let req = Request::builder() + .uri("/README.md") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::OK); + assert_eq!(res.headers()["content-type"], "text/markdown"); + + let body = body_into_text(res.into_body()).await; + + let contents = std::fs::read_to_string("../README.md").unwrap(); + assert_eq!(body, contents); +} + +#[tokio::test] +async fn basic_with_index() { + let svc = ServeDir::new("../test-files"); + + let req = Request::new(Body::empty()); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::OK); + assert_eq!(res.headers()[header::CONTENT_TYPE], "text/html"); + + let body = body_into_text(res.into_body()).await; + assert_eq!(body, "HTML!\n"); +} + +#[tokio::test] +async fn head_request() { + let svc = ServeDir::new("../test-files"); + + let req = Request::builder() + .uri("/precompressed.txt") + .method(Method::HEAD) + .body(Body::empty()) + .unwrap(); + + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + assert_eq!(res.headers()["content-length"], "23"); + + let body = res.into_body().data().await; + assert!(body.is_none()); +} + +#[tokio::test] +async fn precompresed_head_request() { + let svc = ServeDir::new("../test-files").precompressed_gzip(); + + let req = Request::builder() + .uri("/precompressed.txt") + .header("Accept-Encoding", "gzip") + .method(Method::HEAD) + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + assert_eq!(res.headers()["content-encoding"], "gzip"); + assert_eq!(res.headers()["content-length"], "59"); + + let body = res.into_body().data().await; + assert!(body.is_none()); +} + +#[tokio::test] +async fn with_custom_chunk_size() { + let svc = ServeDir::new("..").with_buf_chunk_size(1024 * 32); + + let req = Request::builder() + .uri("/README.md") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::OK); + assert_eq!(res.headers()["content-type"], "text/markdown"); + + let body = body_into_text(res.into_body()).await; + + let contents = std::fs::read_to_string("../README.md").unwrap(); + assert_eq!(body, contents); +} + +#[tokio::test] +async fn precompressed_gzip() { + let svc = ServeDir::new("../test-files").precompressed_gzip(); + + let req = Request::builder() + .uri("/precompressed.txt") + .header("Accept-Encoding", "gzip") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + assert_eq!(res.headers()["content-encoding"], "gzip"); + + let body = res.into_body().data().await.unwrap().unwrap(); + let mut decoder = GzDecoder::new(&body[..]); + let mut decompressed = String::new(); + decoder.read_to_string(&mut decompressed).unwrap(); + assert!(decompressed.starts_with("\"This is a test file!\"")); +} + +#[tokio::test] +async fn precompressed_br() { + let svc = ServeDir::new("../test-files").precompressed_br(); + + let req = Request::builder() + .uri("/precompressed.txt") + .header("Accept-Encoding", "br") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + assert_eq!(res.headers()["content-encoding"], "br"); + + let body = res.into_body().data().await.unwrap().unwrap(); + let mut decompressed = Vec::new(); + BrotliDecompress(&mut &body[..], &mut decompressed).unwrap(); + let decompressed = String::from_utf8(decompressed.to_vec()).unwrap(); + assert!(decompressed.starts_with("\"This is a test file!\"")); +} + +#[tokio::test] +async fn precompressed_deflate() { + let svc = ServeDir::new("../test-files").precompressed_deflate(); + let request = Request::builder() + .uri("/precompressed.txt") + .header("Accept-Encoding", "deflate,br") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(request).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + assert_eq!(res.headers()["content-encoding"], "deflate"); + + let body = res.into_body().data().await.unwrap().unwrap(); + let mut decoder = DeflateDecoder::new(&body[..]); + let mut decompressed = String::new(); + decoder.read_to_string(&mut decompressed).unwrap(); + assert!(decompressed.starts_with("\"This is a test file!\"")); +} + +#[tokio::test] +async fn unsupported_precompression_alogrithm_fallbacks_to_uncompressed() { + let svc = ServeDir::new("../test-files").precompressed_gzip(); + + let request = Request::builder() + .uri("/precompressed.txt") + .header("Accept-Encoding", "br") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(request).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + assert!(res.headers().get("content-encoding").is_none()); + + let body = res.into_body().data().await.unwrap().unwrap(); + let body = String::from_utf8(body.to_vec()).unwrap(); + assert!(body.starts_with("\"This is a test file!\"")); +} + +#[tokio::test] +async fn only_precompressed_variant_existing() { + let svc = ServeDir::new("../test-files").precompressed_gzip(); + + let request = Request::builder() + .uri("/only_gzipped.txt") + .body(Body::empty()) + .unwrap(); + let res = svc.clone().oneshot(request).await.unwrap(); + + assert_eq!(res.status(), StatusCode::NOT_FOUND); + + // Should reply with gzipped file if client supports it + let request = Request::builder() + .uri("/only_gzipped.txt") + .header("Accept-Encoding", "gzip") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(request).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + assert_eq!(res.headers()["content-encoding"], "gzip"); + + let body = res.into_body().data().await.unwrap().unwrap(); + let mut decoder = GzDecoder::new(&body[..]); + let mut decompressed = String::new(); + decoder.read_to_string(&mut decompressed).unwrap(); + assert!(decompressed.starts_with("\"This is a test file\"")); +} + +#[tokio::test] +async fn missing_precompressed_variant_fallbacks_to_uncompressed() { + let svc = ServeDir::new("../test-files").precompressed_gzip(); + + let request = Request::builder() + .uri("/missing_precompressed.txt") + .header("Accept-Encoding", "gzip") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(request).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + // Uncompressed file is served because compressed version is missing + assert!(res.headers().get("content-encoding").is_none()); + + let body = res.into_body().data().await.unwrap().unwrap(); + let body = String::from_utf8(body.to_vec()).unwrap(); + assert!(body.starts_with("Test file!")); +} + +#[tokio::test] +async fn missing_precompressed_variant_fallbacks_to_uncompressed_for_head_request() { + let svc = ServeDir::new("../test-files").precompressed_gzip(); + + let request = Request::builder() + .uri("/missing_precompressed.txt") + .header("Accept-Encoding", "gzip") + .method(Method::HEAD) + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(request).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + assert_eq!(res.headers()["content-length"], "11"); + // Uncompressed file is served because compressed version is missing + assert!(res.headers().get("content-encoding").is_none()); + + assert!(res.into_body().data().await.is_none()); +} + +#[tokio::test] +async fn access_to_sub_dirs() { + let svc = ServeDir::new(".."); + + let req = Request::builder() + .uri("/tower-http/Cargo.toml") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::OK); + assert_eq!(res.headers()["content-type"], "text/x-toml"); + + let body = body_into_text(res.into_body()).await; + + let contents = std::fs::read_to_string("Cargo.toml").unwrap(); + assert_eq!(body, contents); +} + +#[tokio::test] +async fn not_found() { + let svc = ServeDir::new(".."); + + let req = Request::builder() + .uri("/not-found") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::NOT_FOUND); + assert!(res.headers().get(header::CONTENT_TYPE).is_none()); + + let body = body_into_text(res.into_body()).await; + assert!(body.is_empty()); +} + +#[tokio::test] +async fn not_found_precompressed() { + let svc = ServeDir::new("../test-files").precompressed_gzip(); + + let req = Request::builder() + .uri("/not-found") + .header("Accept-Encoding", "gzip") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::NOT_FOUND); + assert!(res.headers().get(header::CONTENT_TYPE).is_none()); + + let body = body_into_text(res.into_body()).await; + assert!(body.is_empty()); +} + +#[tokio::test] +async fn fallbacks_to_different_precompressed_variant_if_not_found_for_head_request() { + let svc = ServeDir::new("../test-files") + .precompressed_gzip() + .precompressed_br(); + + let req = Request::builder() + .uri("/precompressed_br.txt") + .header("Accept-Encoding", "gzip,br,deflate") + .method(Method::HEAD) + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + assert_eq!(res.headers()["content-encoding"], "br"); + assert_eq!(res.headers()["content-length"], "15"); + + assert!(res.into_body().data().await.is_none()); +} + +#[tokio::test] +async fn fallbacks_to_different_precompressed_variant_if_not_found() { + let svc = ServeDir::new("../test-files") + .precompressed_gzip() + .precompressed_br(); + + let req = Request::builder() + .uri("/precompressed_br.txt") + .header("Accept-Encoding", "gzip,br,deflate") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.headers()["content-type"], "text/plain"); + assert_eq!(res.headers()["content-encoding"], "br"); + + let body = res.into_body().data().await.unwrap().unwrap(); + let mut decompressed = Vec::new(); + BrotliDecompress(&mut &body[..], &mut decompressed).unwrap(); + let decompressed = String::from_utf8(decompressed.to_vec()).unwrap(); + assert!(decompressed.starts_with("Test file")); +} + +#[tokio::test] +async fn redirect_to_trailing_slash_on_dir() { + let svc = ServeDir::new("."); + + let req = Request::builder().uri("/src").body(Body::empty()).unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::TEMPORARY_REDIRECT); + + let location = &res.headers()[http::header::LOCATION]; + assert_eq!(location, "/src/"); +} + +#[tokio::test] +async fn empty_directory_without_index() { + let svc = ServeDir::new(".").append_index_html_on_directories(false); + + let req = Request::new(Body::empty()); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::NOT_FOUND); + assert!(res.headers().get(header::CONTENT_TYPE).is_none()); + + let body = body_into_text(res.into_body()).await; + assert!(body.is_empty()); +} + +async fn body_into_text(body: B) -> String +where + B: HttpBody + Unpin, + B::Error: std::fmt::Debug, +{ + let bytes = hyper::body::to_bytes(body).await.unwrap(); + String::from_utf8(bytes.to_vec()).unwrap() +} + +#[tokio::test] +async fn access_cjk_percent_encoded_uri_path() { + // percent encoding present of 你好世界.txt + let cjk_filename_encoded = "%E4%BD%A0%E5%A5%BD%E4%B8%96%E7%95%8C.txt"; + + let svc = ServeDir::new("../test-files"); + + let req = Request::builder() + .uri(format!("/{}", cjk_filename_encoded)) + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::OK); + assert_eq!(res.headers()["content-type"], "text/plain"); +} + +#[tokio::test] +async fn access_space_percent_encoded_uri_path() { + let encoded_filename = "filename%20with%20space.txt"; + + let svc = ServeDir::new("../test-files"); + + let req = Request::builder() + .uri(format!("/{}", encoded_filename)) + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::OK); + assert_eq!(res.headers()["content-type"], "text/plain"); +} + +#[tokio::test] +async fn read_partial_in_bounds() { + let svc = ServeDir::new(".."); + let bytes_start_incl = 9; + let bytes_end_incl = 1023; + + let req = Request::builder() + .uri("/README.md") + .header( + "Range", + format!("bytes={}-{}", bytes_start_incl, bytes_end_incl), + ) + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + let file_contents = std::fs::read("../README.md").unwrap(); + assert_eq!(res.status(), StatusCode::PARTIAL_CONTENT); + assert_eq!( + res.headers()["content-length"], + (bytes_end_incl - bytes_start_incl + 1).to_string() + ); + assert!(res.headers()["content-range"] + .to_str() + .unwrap() + .starts_with(&format!( + "bytes {}-{}/{}", + bytes_start_incl, + bytes_end_incl, + file_contents.len() + ))); + assert_eq!(res.headers()["content-type"], "text/markdown"); + + let body = hyper::body::to_bytes(res.into_body()).await.ok().unwrap(); + let source = Bytes::from(file_contents[bytes_start_incl..=bytes_end_incl].to_vec()); + assert_eq!(body, source); +} + +#[tokio::test] +async fn read_partial_rejects_out_of_bounds_range() { + let svc = ServeDir::new(".."); + let bytes_start_incl = 0; + let bytes_end_excl = 9999999; + let requested_len = bytes_end_excl - bytes_start_incl; + + let req = Request::builder() + .uri("/README.md") + .header( + "Range", + format!("bytes={}-{}", bytes_start_incl, requested_len - 1), + ) + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::RANGE_NOT_SATISFIABLE); + let file_contents = std::fs::read("../README.md").unwrap(); + assert_eq!( + res.headers()["content-range"], + &format!("bytes */{}", file_contents.len()) + ) +} + +#[tokio::test] +async fn read_partial_errs_on_garbage_header() { + let svc = ServeDir::new(".."); + let req = Request::builder() + .uri("/README.md") + .header("Range", "bad_format") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + assert_eq!(res.status(), StatusCode::RANGE_NOT_SATISFIABLE); + let file_contents = std::fs::read("../README.md").unwrap(); + assert_eq!( + res.headers()["content-range"], + &format!("bytes */{}", file_contents.len()) + ) +} + +#[tokio::test] +async fn read_partial_errs_on_bad_range() { + let svc = ServeDir::new(".."); + let req = Request::builder() + .uri("/README.md") + .header("Range", "bytes=-1-15") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + assert_eq!(res.status(), StatusCode::RANGE_NOT_SATISFIABLE); + let file_contents = std::fs::read("../README.md").unwrap(); + assert_eq!( + res.headers()["content-range"], + &format!("bytes */{}", file_contents.len()) + ) +} +#[tokio::test] +async fn last_modified() { + let svc = ServeDir::new(".."); + let req = Request::builder() + .uri("/README.md") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + assert_eq!(res.status(), StatusCode::OK); + + let last_modified = res + .headers() + .get(header::LAST_MODIFIED) + .expect("Missing last modified header!"); + + // -- If-Modified-Since + + let svc = ServeDir::new(".."); + let req = Request::builder() + .uri("/README.md") + .header(header::IF_MODIFIED_SINCE, last_modified) + .body(Body::empty()) + .unwrap(); + + let res = svc.oneshot(req).await.unwrap(); + assert_eq!(res.status(), StatusCode::NOT_MODIFIED); + let body = res.into_body().data().await; + assert!(body.is_none()); + + let svc = ServeDir::new(".."); + let req = Request::builder() + .uri("/README.md") + .header(header::IF_MODIFIED_SINCE, "Fri, 09 Aug 1996 14:21:40 GMT") + .body(Body::empty()) + .unwrap(); + + let res = svc.oneshot(req).await.unwrap(); + assert_eq!(res.status(), StatusCode::OK); + let readme_bytes = include_bytes!("../../../../../README.md"); + let body = res.into_body().data().await.unwrap().unwrap(); + assert_eq!(body.as_ref(), readme_bytes); + + // -- If-Unmodified-Since + + let svc = ServeDir::new(".."); + let req = Request::builder() + .uri("/README.md") + .header(header::IF_UNMODIFIED_SINCE, last_modified) + .body(Body::empty()) + .unwrap(); + + let res = svc.oneshot(req).await.unwrap(); + assert_eq!(res.status(), StatusCode::OK); + let body = res.into_body().data().await.unwrap().unwrap(); + assert_eq!(body.as_ref(), readme_bytes); + + let svc = ServeDir::new(".."); + let req = Request::builder() + .uri("/README.md") + .header(header::IF_UNMODIFIED_SINCE, "Fri, 09 Aug 1996 14:21:40 GMT") + .body(Body::empty()) + .unwrap(); + + let res = svc.oneshot(req).await.unwrap(); + assert_eq!(res.status(), StatusCode::PRECONDITION_FAILED); + let body = res.into_body().data().await; + assert!(body.is_none()); +} + +#[tokio::test] +async fn with_fallback_svc() { + async fn fallback(_: Request) -> io::Result> { + Ok(Response::new(Body::from("from fallback"))) + } + + let svc = ServeDir::new("..").fallback(tower::service_fn(fallback)); + + let req = Request::builder() + .uri("/doesnt-exist") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::OK); + + let body = body_into_text(res.into_body()).await; + assert_eq!(body, "from fallback"); +} + +#[tokio::test] +async fn with_fallback_serve_file() { + let svc = ServeDir::new("..").fallback(ServeFile::new("../README.md")); + + let req = Request::builder() + .uri("/doesnt-exist") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::OK); + assert_eq!(res.headers()["content-type"], "text/markdown"); + + let body = body_into_text(res.into_body()).await; + + let contents = std::fs::read_to_string("../README.md").unwrap(); + assert_eq!(body, contents); +} + +#[tokio::test] +async fn method_not_allowed() { + let svc = ServeDir::new(".."); + + let req = Request::builder() + .method(Method::POST) + .uri("/README.md") + .body(Body::empty()) + .unwrap(); + let res = svc.oneshot(req).await.unwrap(); + + assert_eq!(res.status(), StatusCode::METHOD_NOT_ALLOWED); +} diff --git a/tower-http/src/services/fs/serve_file.rs b/tower-http/src/services/fs/serve_file.rs index 45329597..25a9cdf0 100644 --- a/tower-http/src/services/fs/serve_file.rs +++ b/tower-http/src/services/fs/serve_file.rs @@ -113,7 +113,7 @@ where #[cfg(test)] mod tests { - use super::*; + use crate::services::ServeFile; use brotli::BrotliDecompress; use flate2::bufread::DeflateDecoder; use flate2::bufread::GzDecoder; @@ -122,6 +122,7 @@ mod tests { use http::{Request, StatusCode}; use http_body::Body as _; use hyper::Body; + use mime::Mime; use std::io::Read; use std::str::FromStr; use tower::ServiceExt; @@ -449,6 +450,7 @@ mod tests { assert_eq!(res.status(), StatusCode::NOT_FOUND); assert!(res.headers().get(header::CONTENT_TYPE).is_none()); } + #[tokio::test] async fn last_modified() { let svc = ServeFile::new("../README.md");