- pub fn init(args: Args, running: Arc<AtomicBool>) -> Result<Self> {
- let assets_prefix = format!("__dufs_v{}__/", env!("CARGO_PKG_VERSION"));
- let single_file_req_paths = if args.path_is_file {
- vec![
- args.uri_prefix.to_string(),
- args.uri_prefix[0..args.uri_prefix.len() - 1].to_string(),
- encode_uri(&format!(
- "{}{}",
- &args.uri_prefix,
- get_file_name(&args.serve_path)
- )),
- ]
- } else {
- vec![]
- };
- let html = match args.assets.as_ref() {
- Some(path) => Cow::Owned(std::fs::read_to_string(path.join("index.html"))?),
- None => Cow::Borrowed(INDEX_HTML),
- };
- Ok(Self {
- args,
- running,
- single_file_req_paths,
- assets_prefix,
- html,
- })
- }
-
- pub async fn call(
- self: Arc<Self>,
- req: Request,
- addr: Option<SocketAddr>,
- ) -> Result<Response, hyper::Error> {
- let uri = req.uri().clone();
- let assets_prefix = &self.assets_prefix;
- let enable_cors = self.args.enable_cors;
- let is_microsoft_webdav = req
- .headers()
- .get("user-agent")
- .and_then(|v| v.to_str().ok())
- .map(|v| v.starts_with("Microsoft-WebDAV-MiniRedir/"))
- .unwrap_or_default();
- let mut http_log_data = self.args.http_logger.data(&req);
- if let Some(addr) = addr {
- http_log_data.insert("remote_addr".to_string(), addr.ip().to_string());
- }
-
- let mut res = match self.clone().handle(req, is_microsoft_webdav).await {
- Ok(res) => {
- http_log_data.insert("status".to_string(), res.status().as_u16().to_string());
- if !uri.path().starts_with(assets_prefix) {
- self.args.http_logger.log(&http_log_data, None);
- }
- res
- }
- Err(err) => {
- let mut res = Response::default();
- let status = StatusCode::INTERNAL_SERVER_ERROR;
- *res.status_mut() = status;
- http_log_data.insert("status".to_string(), status.as_u16().to_string());
- self.args
- .http_logger
- .log(&http_log_data, Some(err.to_string()));
- res
- }
- };
-
- if is_microsoft_webdav {
- // microsoft webdav requires this.
- res.headers_mut()
- .insert(CONNECTION, HeaderValue::from_static("close"));
- }
- if enable_cors {
- add_cors(&mut res);
- }
- Ok(res)
- }
-
- pub async fn handle(
- self: Arc<Self>,
- req: Request,
- is_microsoft_webdav: bool,
- ) -> Result<Response> {
- let mut res = Response::default();
-
- let req_path = req.uri().path();
- let headers = req.headers();
- let method = req.method().clone();
-
- let relative_path = match self.resolve_path(req_path) {
- Some(v) => v,
- None => {
- status_bad_request(&mut res, "Invalid Path");
- return Ok(res);
- }
- };
-
- if method == Method::GET
- && self
- .handle_internal(&relative_path, headers, &mut res)
- .await?
- {
- return Ok(res);
- }
-
- let authorization = headers.get(AUTHORIZATION);
- let guard =
- self.args
- .auth
- .guard(&relative_path, &method, authorization, is_microsoft_webdav);
-
- let (user, access_paths) = match guard {
- (None, None) => {
- self.auth_reject(&mut res)?;
- return Ok(res);
- }
- (Some(_), None) => {
- status_forbid(&mut res);
- return Ok(res);
- }
- (x, Some(y)) => (x, y),
- };
-
- let query = req.uri().query().unwrap_or_default();
- let query_params: HashMap<String, String> = form_urlencoded::parse(query.as_bytes())
- .map(|(k, v)| (k.to_string(), v.to_string()))
- .collect();
-
- if method.as_str() == "CHECKAUTH" {
- *res.body_mut() = body_full(user.clone().unwrap_or_default());
- return Ok(res);
- } else if method.as_str() == "LOGOUT" {
- self.auth_reject(&mut res)?;
- return Ok(res);
- }
-
- let head_only = method == Method::HEAD;
-
- if self.args.path_is_file {
- if self
- .single_file_req_paths
- .iter()
- .any(|v| v.as_str() == req_path)
- {
- self.handle_send_file(&self.args.serve_path, headers, head_only, &mut res)
- .await?;
- } else {
- status_not_found(&mut res);
- }
- return Ok(res);
- }
- let path = match self.join_path(&relative_path) {
- Some(v) => v,
- None => {
- status_forbid(&mut res);
- return Ok(res);
- }
- };
-
- let path = path.as_path();
-
- let (is_miss, is_dir, is_file, size) = match fs::metadata(path).await.ok() {
- Some(meta) => (false, meta.is_dir(), meta.is_file(), meta.len()),
- None => (true, false, false, 0),
- };
-
- let allow_upload = self.args.allow_upload;
- let allow_delete = self.args.allow_delete;
- let allow_search = self.args.allow_search;
- let allow_archive = self.args.allow_archive;
- let render_index = self.args.render_index;
- let render_spa = self.args.render_spa;
- let render_try_index = self.args.render_try_index;
-
- if !self.args.allow_symlink && !is_miss && !self.is_root_contained(path).await {
- status_not_found(&mut res);
- return Ok(res);
- }
-
- match method {
- Method::GET | Method::HEAD => {
- if is_dir {
- if render_try_index {
- if allow_archive && has_query_flag(&query_params, "zip") {
- if !allow_archive {
- status_not_found(&mut res);
- return Ok(res);
- }
- self.handle_zip_dir(path, head_only, access_paths, &mut res)
- .await?;
- } else if allow_search && query_params.contains_key("q") {
- self.handle_search_dir(
- path,
- &query_params,
- head_only,
- user,
- access_paths,
- &mut res,
- )
- .await?;
- } else {
- self.handle_render_index(
- path,
- &query_params,
- headers,
- head_only,
- user,
- access_paths,
- &mut res,
- )
- .await?;
- }
- } else if render_index || render_spa {
- self.handle_render_index(
- path,
- &query_params,
- headers,
- head_only,
- user,
- access_paths,
- &mut res,
- )
- .await?;
- } else if has_query_flag(&query_params, "zip") {
- if !allow_archive {
- status_not_found(&mut res);
- return Ok(res);
- }
- self.handle_zip_dir(path, head_only, access_paths, &mut res)
- .await?;
- } else if allow_search && query_params.contains_key("q") {
- self.handle_search_dir(
- path,
- &query_params,
- head_only,
- user,
- access_paths,
- &mut res,
- )
- .await?;
- } else {
- self.handle_ls_dir(
- path,
- true,
- &query_params,
- head_only,
- user,
- access_paths,
- &mut res,
- )
- .await?;
- }
- } else if is_file {
- if has_query_flag(&query_params, "edit") {
- self.handle_edit_file(path, DataKind::Edit, head_only, user, &mut res)
- .await?;
- } else if has_query_flag(&query_params, "view") {
- self.handle_edit_file(path, DataKind::View, head_only, user, &mut res)
- .await?;
- } else if has_query_flag(&query_params, "hash") {
- self.handle_hash_file(path, head_only, &mut res).await?;
- } else {
- self.handle_send_file(path, headers, head_only, &mut res)
- .await?;
- }
- } else if render_spa {
- self.handle_render_spa(path, headers, head_only, &mut res)
- .await?;
- } else if allow_upload && req_path.ends_with('/') {
- self.handle_ls_dir(
- path,
- false,
- &query_params,
- head_only,
- user,
- access_paths,
- &mut res,
- )
- .await?;
- } else {
- status_not_found(&mut res);
- }
- }
- Method::OPTIONS => {
- set_webdav_headers(&mut res);
- }
- Method::PUT => {
- if is_dir || !allow_upload || (!allow_delete && size > 0) {
- status_forbid(&mut res);
- } else {
- self.handle_upload(path, None, size, req, &mut res).await?;
- }
- }
- Method::PATCH => {
- if is_miss {
- status_not_found(&mut res);
- } else if !allow_upload {
- status_forbid(&mut res);
- } else {
- let offset = match parse_upload_offset(headers, size) {
- Ok(v) => v,
- Err(err) => {
- status_bad_request(&mut res, &err.to_string());
- return Ok(res);
- }
- };
- match offset {
- Some(offset) => {
- if offset < size && !allow_delete {
- status_forbid(&mut res);
- }
- self.handle_upload(path, Some(offset), size, req, &mut res)
- .await?;
- }
- None => {
- *res.status_mut() = StatusCode::METHOD_NOT_ALLOWED;
- }
- }
- }
- }
- Method::DELETE => {
- if !allow_delete {
- status_forbid(&mut res);
- } else if !is_miss {
- self.handle_delete(path, is_dir, &mut res).await?
- } else {
- status_not_found(&mut res);
- }
- }
- method => match method.as_str() {
- "PROPFIND" => {
- if is_dir {
- let access_paths =
- if access_paths.perm().indexonly() && authorization.is_none() {
- // see https://github.com/sigoden/dufs/issues/229
- AccessPaths::new(AccessPerm::ReadOnly)
- } else {
- access_paths
- };
- self.handle_propfind_dir(path, headers, access_paths, &mut res)
- .await?;
- } else if is_file {
- self.handle_propfind_file(path, &mut res).await?;
- } else {
- status_not_found(&mut res);
- }
- }
- "PROPPATCH" => {
- if is_file {
- self.handle_proppatch(req_path, &mut res).await?;
- } else {
- status_not_found(&mut res);
- }
- }
- "MKCOL" => {
- if !allow_upload {
- status_forbid(&mut res);
- } else if !is_miss {
- *res.status_mut() = StatusCode::METHOD_NOT_ALLOWED;
- *res.body_mut() = body_full("Already exists");
- } else {
- self.handle_mkcol(path, &mut res).await?;
- }
- }
- "COPY" => {
- if !allow_upload {
- status_forbid(&mut res);
- } else if is_miss {
- status_not_found(&mut res);
- } else {
- self.handle_copy(path, &req, &mut res).await?
- }
- }
- "MOVE" => {
- if !allow_upload || !allow_delete {
- status_forbid(&mut res);
- } else if is_miss {
- status_not_found(&mut res);
- } else {
- self.handle_move(path, &req, &mut res).await?
- }
- }
- "LOCK" => {
- // Fake lock
- if is_file {
- let has_auth = authorization.is_some();
- self.handle_lock(req_path, has_auth, &mut res).await?;
- } else {
- status_not_found(&mut res);
- }
- }
- "UNLOCK" => {
- // Fake unlock
- if is_miss {
- status_not_found(&mut res);
- }
- }
- _ => {
- *res.status_mut() = StatusCode::METHOD_NOT_ALLOWED;
- }
- },
- }
- Ok(res)
- }
-
- async fn handle_upload(
- &self,
- path: &Path,
- upload_offset: Option<u64>,
- size: u64,
- req: Request,
- res: &mut Response,
- ) -> Result<()> {
- ensure_path_parent(path).await?;
- let (mut file, status) = match upload_offset {
- None => (fs::File::create(path).await?, StatusCode::CREATED),
- Some(offset) if offset == size => (
- fs::OpenOptions::new().append(true).open(path).await?,
- StatusCode::NO_CONTENT,
- ),
- Some(offset) => {
- let mut file = fs::OpenOptions::new().write(true).open(path).await?;
- file.seek(SeekFrom::Start(offset)).await?;
- (file, StatusCode::NO_CONTENT)
- }
- };
- let stream = IncomingStream::new(req.into_body());
-
- let body_with_io_error = stream.map_err(|err| io::Error::new(io::ErrorKind::Other, err));
- let body_reader = StreamReader::new(body_with_io_error);
-
- pin_mut!(body_reader);
-
- let ret = io::copy(&mut body_reader, &mut file).await;
- let size = fs::metadata(path)
- .await
- .map(|v| v.len())
- .unwrap_or_default();
- if ret.is_err() {
- if upload_offset.is_none() && size < RESUMABLE_UPLOAD_MIN_SIZE {
- let _ = tokio::fs::remove_file(&path).await;
- }
- ret?;
- }
-
- *res.status_mut() = status;
-
- Ok(())
- }
-
- async fn handle_delete(&self, path: &Path, is_dir: bool, res: &mut Response) -> Result<()> {
- match is_dir {
- true => fs::remove_dir_all(path).await?,
- false => fs::remove_file(path).await?,
- }
-
- status_no_content(res);
- Ok(())
- }
-
- async fn handle_ls_dir(
- &self,
- path: &Path,
- exist: bool,
- query_params: &HashMap<String, String>,
- head_only: bool,
- user: Option<String>,
- access_paths: AccessPaths,
- res: &mut Response,
- ) -> Result<()> {
- let mut paths = vec![];
- if exist {
- paths = match self.list_dir(path, path, access_paths.clone()).await {
- Ok(paths) => paths,
- Err(_) => {
- status_forbid(res);
- return Ok(());
- }
- }
- };
- self.send_index(
- path,
- paths,
- exist,
- query_params,
- head_only,
- user,
- access_paths,
- res,
- )
- }
-
- async fn handle_search_dir(
- &self,
- path: &Path,
- query_params: &HashMap<String, String>,
- head_only: bool,
- user: Option<String>,
- access_paths: AccessPaths,
- res: &mut Response,
- ) -> Result<()> {
- let mut paths: Vec<PathItem> = vec![];
- let search = query_params
- .get("q")
- .ok_or_else(|| anyhow!("invalid q"))?
- .to_lowercase();
- if search.is_empty() {
- return self
- .handle_ls_dir(path, true, query_params, head_only, user, access_paths, res)
- .await;
- } else {
- let path_buf = path.to_path_buf();
- let hidden = Arc::new(self.args.hidden.to_vec());
- let search = search.clone();
-
- let access_paths = access_paths.clone();
- let search_paths = tokio::spawn(collect_dir_entries(
- access_paths,
- self.running.clone(),
- path_buf,
- hidden,
- self.args.allow_symlink,
- self.args.serve_path.clone(),
- move |x| get_file_name(x.path()).to_lowercase().contains(&search),
- ))
- .await?;
-
- for search_path in search_paths.into_iter() {
- if let Ok(Some(item)) = self.to_pathitem(search_path, path.to_path_buf()).await {
- paths.push(item);
- }
- }
- }
- self.send_index(
- path,
- paths,
- true,
- query_params,
- head_only,
- user,
- access_paths,
- res,
- )
- }
-
- async fn handle_zip_dir(
- &self,
- path: &Path,
- head_only: bool,
- access_paths: AccessPaths,
- res: &mut Response,
- ) -> Result<()> {
- let (mut writer, reader) = tokio::io::duplex(BUF_SIZE);
- let filename = try_get_file_name(path)?;
- set_content_disposition(res, false, &format!("{}.zip", filename))?;
- res.headers_mut()
- .insert("content-type", HeaderValue::from_static("application/zip"));
- if head_only {
- return Ok(());
- }
- let path = path.to_owned();
- let hidden = self.args.hidden.clone();
- let running = self.running.clone();
- let compression = self.args.compress.to_compression();
- let follow_symlinks = self.args.allow_symlink;
- let serve_path = self.args.serve_path.clone();
- tokio::spawn(async move {
- if let Err(e) = zip_dir(
- &mut writer,
- &path,
- access_paths,
- &hidden,
- compression,
- follow_symlinks,
- serve_path,
- running,
- )
- .await
- {
- error!("Failed to zip {}, {e}", path.display());
- }
- });
- let reader_stream = ReaderStream::with_capacity(reader, BUF_SIZE);
- let stream_body = StreamBody::new(
- reader_stream
- .map_ok(Frame::data)
- .map_err(|err| anyhow!("{err}")),
- );
- let boxed_body = stream_body.boxed();
- *res.body_mut() = boxed_body;
- Ok(())
- }
-
- async fn handle_render_index(
- &self,
- path: &Path,
- query_params: &HashMap<String, String>,
- headers: &HeaderMap<HeaderValue>,
- head_only: bool,
- user: Option<String>,
- access_paths: AccessPaths,
- res: &mut Response,
- ) -> Result<()> {
- let index_path = path.join(INDEX_NAME);
- if fs::metadata(&index_path)
- .await
- .ok()
- .map(|v| v.is_file())
- .unwrap_or_default()
- {
- self.handle_send_file(&index_path, headers, head_only, res)
- .await?;
- } else if self.args.render_try_index {
- self.handle_ls_dir(path, true, query_params, head_only, user, access_paths, res)
- .await?;
- } else {
- status_not_found(res)
- }
- Ok(())
- }
-
- async fn handle_render_spa(
- &self,
- path: &Path,
- headers: &HeaderMap<HeaderValue>,
- head_only: bool,
- res: &mut Response,
- ) -> Result<()> {
- if path.extension().is_none() {
- let path = self.args.serve_path.join(INDEX_NAME);
- self.handle_send_file(&path, headers, head_only, res)
- .await?;
- } else {
- status_not_found(res)
- }
- Ok(())
- }
-
- async fn handle_internal(
- &self,
- req_path: &str,
- headers: &HeaderMap<HeaderValue>,
- res: &mut Response,
- ) -> Result<bool> {
- if let Some(name) = req_path.strip_prefix(&self.assets_prefix) {
- match self.args.assets.as_ref() {
- Some(assets_path) => {
- let path = assets_path.join(name);
- if path.exists() {
- self.handle_send_file(&path, headers, false, res).await?;
- } else {
- status_not_found(res);
- return Ok(true);
- }
- }
- None => match name {
- "index.js" => {
- *res.body_mut() = body_full(INDEX_JS);
- res.headers_mut().insert(
- "content-type",
- HeaderValue::from_static("application/javascript; charset=UTF-8"),
- );
- }
- "index.css" => {
- *res.body_mut() = body_full(INDEX_CSS);
- res.headers_mut().insert(
- "content-type",
- HeaderValue::from_static("text/css; charset=UTF-8"),
- );
- }
- "favicon.ico" => {
- *res.body_mut() = body_full(FAVICON_ICO);
- res.headers_mut()
- .insert("content-type", HeaderValue::from_static("image/x-icon"));
- }
- _ => {
- status_not_found(res);
- }
- },
- }
- res.headers_mut().insert(
- "cache-control",
- HeaderValue::from_static("public, max-age=31536000, immutable"),
- );
- res.headers_mut().insert(
- "x-content-type-options",
- HeaderValue::from_static("nosniff"),
- );
- Ok(true)
- } else if req_path == HEALTH_CHECK_PATH {
- res.headers_mut()
- .typed_insert(ContentType::from(mime_guess::mime::APPLICATION_JSON));
-
- *res.body_mut() = body_full(r#"{"status":"OK"}"#);
- Ok(true)
- } else {
- Ok(false)
- }
- }
-
- async fn handle_send_file(
- &self,
- path: &Path,
- headers: &HeaderMap<HeaderValue>,
- head_only: bool,
- res: &mut Response,
- ) -> Result<()> {
- let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),);
- let (mut file, meta) = (file?, meta?);
- let size = meta.len();
- let mut use_range = true;
- if let Some((etag, last_modified)) = extract_cache_headers(&meta) {
- if let Some(if_unmodified_since) = headers.typed_get::<IfUnmodifiedSince>() {
- if !if_unmodified_since.precondition_passes(last_modified.into()) {
- *res.status_mut() = StatusCode::PRECONDITION_FAILED;
- return Ok(());
- }
- }
- if let Some(if_match) = headers.typed_get::<IfMatch>() {
- if !if_match.precondition_passes(&etag) {
- *res.status_mut() = StatusCode::PRECONDITION_FAILED;
- return Ok(());
- }
- }
- if let Some(if_modified_since) = headers.typed_get::<IfModifiedSince>() {
- if !if_modified_since.is_modified(last_modified.into()) {
- *res.status_mut() = StatusCode::NOT_MODIFIED;
- return Ok(());
- }
- }
- if let Some(if_none_match) = headers.typed_get::<IfNoneMatch>() {
- if !if_none_match.precondition_passes(&etag) {
- *res.status_mut() = StatusCode::NOT_MODIFIED;
- return Ok(());
- }
- }
-
- res.headers_mut()
- .typed_insert(CacheControl::new().with_no_cache());
- res.headers_mut().typed_insert(last_modified);
- res.headers_mut().typed_insert(etag.clone());
-
- if headers.typed_get::<Range>().is_some() {
- use_range = headers
- .typed_get::<IfRange>()
- .map(|if_range| !if_range.is_modified(Some(&etag), Some(&last_modified)))
- // Always be fresh if there is no validators
- .unwrap_or(true);
- } else {
- use_range = false;
- }
- }
-
- let ranges = if use_range {
- headers.get(RANGE).map(|range| {
- range
- .to_str()
- .ok()
- .and_then(|range| parse_range(range, size))
- })
- } else {
- None
- };
-
- res.headers_mut().insert(
- CONTENT_TYPE,
- HeaderValue::from_str(&get_content_type(path).await?)?,
- );
-
- let filename = try_get_file_name(path)?;
- set_content_disposition(res, true, filename)?;
-
- res.headers_mut().typed_insert(AcceptRanges::bytes());
-
- if let Some(ranges) = ranges {
- if let Some(ranges) = ranges {
- if ranges.len() == 1 {
- let (start, end) = ranges[0];
- file.seek(SeekFrom::Start(start)).await?;
- let range_size = end - start + 1;
- *res.status_mut() = StatusCode::PARTIAL_CONTENT;
- let content_range = format!("bytes {}-{}/{}", start, end, size);
- res.headers_mut()
- .insert(CONTENT_RANGE, content_range.parse()?);
- res.headers_mut()
- .insert(CONTENT_LENGTH, format!("{range_size}").parse()?);
- if head_only {
- return Ok(());
- }
-
- let stream_body = StreamBody::new(
- LengthLimitedStream::new(file, range_size as usize)
- .map_ok(Frame::data)
- .map_err(|err| anyhow!("{err}")),
- );
- let boxed_body = stream_body.boxed();
- *res.body_mut() = boxed_body;
- } else {
- *res.status_mut() = StatusCode::PARTIAL_CONTENT;
- let boundary = Uuid::new_v4();
- let mut body = Vec::new();
- let content_type = get_content_type(path).await?;
- for (start, end) in ranges {
- file.seek(SeekFrom::Start(start)).await?;
- let range_size = end - start + 1;
- let content_range = format!("bytes {}-{}/{}", start, end, size);
- let part_header = format!(
- "--{boundary}\r\nContent-Type: {content_type}\r\nContent-Range: {content_range}\r\n\r\n",
- );
- body.extend_from_slice(part_header.as_bytes());
- let mut buffer = vec![0; range_size as usize];
- file.read_exact(&mut buffer).await?;
- body.extend_from_slice(&buffer);
- body.extend_from_slice(b"\r\n");
- }
- body.extend_from_slice(format!("--{boundary}--\r\n").as_bytes());
- res.headers_mut().insert(
- CONTENT_TYPE,
- format!("multipart/byteranges; boundary={boundary}").parse()?,
- );
- res.headers_mut()
- .insert(CONTENT_LENGTH, format!("{}", body.len()).parse()?);
- if head_only {
- return Ok(());
- }
- *res.body_mut() = body_full(body);
- }
- } else {
- *res.status_mut() = StatusCode::RANGE_NOT_SATISFIABLE;
- res.headers_mut()
- .insert(CONTENT_RANGE, format!("bytes */{size}").parse()?);
- }
- } else {
- res.headers_mut()
- .insert(CONTENT_LENGTH, format!("{size}").parse()?);
- if head_only {
- return Ok(());
- }
-
- let reader_stream = ReaderStream::with_capacity(file, BUF_SIZE);
- let stream_body = StreamBody::new(
- reader_stream
- .map_ok(Frame::data)
- .map_err(|err| anyhow!("{err}")),
- );
- let boxed_body = stream_body.boxed();
- *res.body_mut() = boxed_body;
- }
- Ok(())
- }
-
- async fn handle_edit_file(
- &self,
- path: &Path,
- kind: DataKind,
- head_only: bool,
- user: Option<String>,
- res: &mut Response,
- ) -> Result<()> {
- let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),);
- let (file, meta) = (file?, meta?);
- let href = format!(
- "/{}",
- normalize_path(path.strip_prefix(&self.args.serve_path)?)
- );
- let mut buffer: Vec<u8> = vec![];
- file.take(1024).read_to_end(&mut buffer).await?;
- let editable =
- meta.len() <= EDITABLE_TEXT_MAX_SIZE && content_inspector::inspect(&buffer).is_text();
- let data = EditData {
- href,
- kind,
- uri_prefix: self.args.uri_prefix.clone(),
- allow_upload: self.args.allow_upload,
- allow_delete: self.args.allow_delete,
- auth: self.args.auth.exist(),
- user,
- editable,
- };
- res.headers_mut()
- .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
- let index_data = STANDARD.encode(serde_json::to_string(&data)?);
- let output = self
- .html
- .replace(
- "__ASSETS_PREFIX__",
- &format!("{}{}", self.args.uri_prefix, self.assets_prefix),
- )
- .replace("__INDEX_DATA__", &index_data);
- res.headers_mut()
- .typed_insert(ContentLength(output.len() as u64));
- res.headers_mut()
- .typed_insert(CacheControl::new().with_no_cache());
- if head_only {
- return Ok(());
- }
- *res.body_mut() = body_full(output);
- Ok(())
- }
-
- async fn handle_hash_file(
- &self,
- path: &Path,
- head_only: bool,
- res: &mut Response,
- ) -> Result<()> {
- let output = sha256_file(path).await?;
- res.headers_mut()
- .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
- res.headers_mut()
- .typed_insert(ContentLength(output.len() as u64));
- if head_only {
- return Ok(());
- }
- *res.body_mut() = body_full(output);
- Ok(())
- }
-
- async fn handle_propfind_dir(
- &self,
- path: &Path,
- headers: &HeaderMap<HeaderValue>,
- access_paths: AccessPaths,
- res: &mut Response,
- ) -> Result<()> {
- let depth: u32 = match headers.get("depth") {
- Some(v) => match v.to_str().ok().and_then(|v| v.parse().ok()) {
- Some(0) => 0,
- Some(1) => 1,
- _ => {
- status_bad_request(res, "Invalid depth: only 0 and 1 are allowed.");
- return Ok(());
- }
- },
- None => 1,
- };
- let mut paths = match self.to_pathitem(path, &self.args.serve_path).await? {
- Some(v) => vec![v],
- None => vec![],
- };
- if depth == 1 {
- match self
- .list_dir(path, &self.args.serve_path, access_paths)
- .await
- {
- Ok(child) => paths.extend(child),
- Err(_) => {
- status_forbid(res);
- return Ok(());
- }
- }
- }
- let output = paths
- .iter()
- .map(|v| v.to_dav_xml(self.args.uri_prefix.as_str()))
- .fold(String::new(), |mut acc, v| {
- acc.push_str(&v);
- acc
- });
- res_multistatus(res, &output);
- Ok(())
- }
-
- async fn handle_propfind_file(&self, path: &Path, res: &mut Response) -> Result<()> {
- if let Some(pathitem) = self.to_pathitem(path, &self.args.serve_path).await? {
- res_multistatus(res, &pathitem.to_dav_xml(self.args.uri_prefix.as_str()));
- } else {
- status_not_found(res);
- }
- Ok(())
- }
-
- async fn handle_mkcol(&self, path: &Path, res: &mut Response) -> Result<()> {
- fs::create_dir_all(path).await?;
- *res.status_mut() = StatusCode::CREATED;
- Ok(())
- }
-
- async fn handle_copy(&self, path: &Path, req: &Request, res: &mut Response) -> Result<()> {
- let dest = match self.extract_dest(req, res) {
- Some(dest) => dest,
- None => {
- return Ok(());
- }
- };
-
- let meta = fs::symlink_metadata(path).await?;
- if meta.is_dir() {
- status_forbid(res);
- return Ok(());
- }
-
- ensure_path_parent(&dest).await?;
-
- fs::copy(path, &dest).await?;
-
- status_no_content(res);
- Ok(())
- }
-
- async fn handle_move(&self, path: &Path, req: &Request, res: &mut Response) -> Result<()> {
- let dest = match self.extract_dest(req, res) {
- Some(dest) => dest,
- None => {
- return Ok(());
- }
- };
-
- ensure_path_parent(&dest).await?;
-
- fs::rename(path, &dest).await?;
-
- status_no_content(res);
- Ok(())
- }
-
- async fn handle_lock(&self, req_path: &str, auth: bool, res: &mut Response) -> Result<()> {
- let token = if auth {
- format!("opaquelocktoken:{}", Uuid::new_v4())
- } else {
- Utc::now().timestamp().to_string()
- };
-
- res.headers_mut().insert(
- "content-type",
- HeaderValue::from_static("application/xml; charset=utf-8"),
- );
- res.headers_mut()
- .insert("lock-token", format!("<{token}>").parse()?);
-
- *res.body_mut() = body_full(format!(
- r#"<?xml version="1.0" encoding="utf-8"?>
+ pub fn init(args: Args, running: Arc<AtomicBool>) -> Result<Self> {
+ let assets_prefix = format!("__ozva_v{}__/", env!("CARGO_PKG_VERSION"));
+ let single_file_req_paths = if args.path_is_file {
+ vec![
+ args.uri_prefix.to_string(),
+ args.uri_prefix[0..args.uri_prefix.len() - 1].to_string(),
+ encode_uri(&format!(
+ "{}{}",
+ &args.uri_prefix,
+ get_file_name(&args.serve_path)
+ )),
+ ]
+ } else {
+ vec![]
+ };
+ let html = match args.assets.as_ref() {
+ Some(path) => Cow::Owned(std::fs::read_to_string(path.join("index.html"))?),
+ None => Cow::Borrowed(INDEX_HTML),
+ };
+ Ok(Self {
+ args,
+ running,
+ single_file_req_paths,
+ assets_prefix,
+ html,
+ })
+ }
+
+ pub async fn call(
+ self: Arc<Self>,
+ req: Request,
+ addr: Option<SocketAddr>,
+ ) -> Result<Response, hyper::Error> {
+ let uri = req.uri().clone();
+ let assets_prefix = &self.assets_prefix;
+ let enable_cors = self.args.enable_cors;
+ let is_microsoft_webdav = req
+ .headers()
+ .get("user-agent")
+ .and_then(|v| v.to_str().ok())
+ .map(|v| v.starts_with("Microsoft-WebDAV-MiniRedir/"))
+ .unwrap_or_default();
+ let mut http_log_data = self.args.http_logger.data(&req);
+ if let Some(addr) = addr {
+ http_log_data.insert("remote_addr".to_string(), addr.ip().to_string());
+ }
+
+ let mut res = match self.clone().handle(req, is_microsoft_webdav).await {
+ Ok(res) => {
+ http_log_data.insert("status".to_string(), res.status().as_u16().to_string());
+ if !uri.path().starts_with(assets_prefix) {
+ self.args.http_logger.log(&http_log_data, None);
+ }
+ res
+ }
+ Err(err) => {
+ let mut res = Response::default();
+ let status = StatusCode::INTERNAL_SERVER_ERROR;
+ *res.status_mut() = status;
+ http_log_data.insert("status".to_string(), status.as_u16().to_string());
+ self.args
+ .http_logger
+ .log(&http_log_data, Some(err.to_string()));
+ res
+ }
+ };
+
+ if is_microsoft_webdav {
+ // microsoft webdav requires this.
+ res.headers_mut()
+ .insert(CONNECTION, HeaderValue::from_static("close"));
+ }
+ if enable_cors {
+ add_cors(&mut res);
+ }
+ Ok(res)
+ }
+
+ pub async fn handle(
+ self: Arc<Self>,
+ req: Request,
+ is_microsoft_webdav: bool,
+ ) -> Result<Response> {
+ let mut res = Response::default();
+
+ let req_path = req.uri().path();
+ let headers = req.headers();
+ let method = req.method().clone();
+
+ let relative_path = match self.resolve_path(req_path) {
+ Some(v) => v,
+ None => {
+ status_bad_request(&mut res, "Invalid Path");
+ return Ok(res);
+ }
+ };
+
+ if method == Method::GET
+ && self
+ .handle_internal(&relative_path, headers, &mut res)
+ .await?
+ {
+ return Ok(res);
+ }
+
+ let authorization = headers.get(AUTHORIZATION);
+ let guard =
+ self.args
+ .auth
+ .guard(&relative_path, &method, authorization, is_microsoft_webdav);
+
+ let (user, access_paths) = match guard {
+ (None, None) => {
+ self.auth_reject(&mut res)?;
+ return Ok(res);
+ }
+ (Some(_), None) => {
+ status_forbid(&mut res);
+ return Ok(res);
+ }
+ (x, Some(y)) => (x, y),
+ };
+
+ let query = req.uri().query().unwrap_or_default();
+ let query_params: HashMap<String, String> = form_urlencoded::parse(query.as_bytes())
+ .map(|(k, v)| (k.to_string(), v.to_string()))
+ .collect();
+
+ if method.as_str() == "CHECKAUTH" {
+ *res.body_mut() = body_full(user.clone().unwrap_or_default());
+ return Ok(res);
+ } else if method.as_str() == "LOGOUT" {
+ self.auth_reject(&mut res)?;
+ return Ok(res);
+ }
+
+ let head_only = method == Method::HEAD;
+
+ if self.args.path_is_file {
+ if self
+ .single_file_req_paths
+ .iter()
+ .any(|v| v.as_str() == req_path)
+ {
+ self.handle_send_file(&self.args.serve_path, headers, head_only, &mut res)
+ .await?;
+ } else {
+ status_not_found(&mut res);
+ }
+ return Ok(res);
+ }
+ let path = match self.join_path(&relative_path) {
+ Some(v) => v,
+ None => {
+ status_forbid(&mut res);
+ return Ok(res);
+ }
+ };
+
+ let path = path.as_path();
+
+ let (is_miss, is_dir, is_file, size) = match fs::metadata(path).await.ok() {
+ Some(meta) => (false, meta.is_dir(), meta.is_file(), meta.len()),
+ None => (true, false, false, 0),
+ };
+
+ let allow_upload = self.args.allow_upload;
+ let allow_delete = self.args.allow_delete;
+ let allow_search = self.args.allow_search;
+ let allow_archive = self.args.allow_archive;
+ let render_index = self.args.render_index;
+ let render_spa = self.args.render_spa;
+ let render_try_index = self.args.render_try_index;
+
+ if !self.args.allow_symlink && !is_miss && !self.is_root_contained(path).await {
+ status_not_found(&mut res);
+ return Ok(res);
+ }
+
+ match method {
+ Method::GET | Method::HEAD => {
+ if is_dir {
+ if render_try_index {
+ if allow_archive && has_query_flag(&query_params, "zip") {
+ if !allow_archive {
+ status_not_found(&mut res);
+ return Ok(res);
+ }
+ self.handle_zip_dir(path, head_only, access_paths, &mut res)
+ .await?;
+ } else if allow_search && query_params.contains_key("q") {
+ self.handle_search_dir(
+ path,
+ &query_params,
+ head_only,
+ user,
+ access_paths,
+ &mut res,
+ )
+ .await?;
+ } else {
+ self.handle_render_index(
+ path,
+ &query_params,
+ headers,
+ head_only,
+ user,
+ access_paths,
+ &mut res,
+ )
+ .await?;
+ }
+ } else if render_index || render_spa {
+ self.handle_render_index(
+ path,
+ &query_params,
+ headers,
+ head_only,
+ user,
+ access_paths,
+ &mut res,
+ )
+ .await?;
+ } else if has_query_flag(&query_params, "zip") {
+ if !allow_archive {
+ status_not_found(&mut res);
+ return Ok(res);
+ }
+ self.handle_zip_dir(path, head_only, access_paths, &mut res)
+ .await?;
+ } else if allow_search && query_params.contains_key("q") {
+ self.handle_search_dir(
+ path,
+ &query_params,
+ head_only,
+ user,
+ access_paths,
+ &mut res,
+ )
+ .await?;
+ } else {
+ self.handle_ls_dir(
+ path,
+ true,
+ &query_params,
+ head_only,
+ user,
+ access_paths,
+ &mut res,
+ )
+ .await?;
+ }
+ } else if is_file {
+ if has_query_flag(&query_params, "edit") {
+ self.handle_edit_file(path, DataKind::Edit, head_only, user, &mut res)
+ .await?;
+ } else if has_query_flag(&query_params, "view") {
+ self.handle_edit_file(path, DataKind::View, head_only, user, &mut res)
+ .await?;
+ } else if has_query_flag(&query_params, "hash") {
+ self.handle_hash_file(path, head_only, &mut res).await?;
+ } else {
+ self.handle_send_file(path, headers, head_only, &mut res)
+ .await?;
+ }
+ } else if render_spa {
+ self.handle_render_spa(path, headers, head_only, &mut res)
+ .await?;
+ } else if allow_upload && req_path.ends_with('/') {
+ self.handle_ls_dir(
+ path,
+ false,
+ &query_params,
+ head_only,
+ user,
+ access_paths,
+ &mut res,
+ )
+ .await?;
+ } else {
+ status_not_found(&mut res);
+ }
+ }
+ Method::OPTIONS => {
+ set_webdav_headers(&mut res);
+ }
+ Method::PUT => {
+ if is_dir || !allow_upload || (!allow_delete && size > 0) {
+ status_forbid(&mut res);
+ } else {
+ self.handle_upload(path, None, size, req, &mut res).await?;
+ }
+ }
+ Method::PATCH => {
+ if is_miss {
+ status_not_found(&mut res);
+ } else if !allow_upload {
+ status_forbid(&mut res);
+ } else {
+ let offset = match parse_upload_offset(headers, size) {
+ Ok(v) => v,
+ Err(err) => {
+ status_bad_request(&mut res, &err.to_string());
+ return Ok(res);
+ }
+ };
+ match offset {
+ Some(offset) => {
+ if offset < size && !allow_delete {
+ status_forbid(&mut res);
+ }
+ self.handle_upload(path, Some(offset), size, req, &mut res)
+ .await?;
+ }
+ None => {
+ *res.status_mut() = StatusCode::METHOD_NOT_ALLOWED;
+ }
+ }
+ }
+ }
+ Method::DELETE => {
+ if !allow_delete {
+ status_forbid(&mut res);
+ } else if !is_miss {
+ self.handle_delete(path, is_dir, &mut res).await?
+ } else {
+ status_not_found(&mut res);
+ }
+ }
+ method => match method.as_str() {
+ "PROPFIND" => {
+ if is_dir {
+ let access_paths =
+ if access_paths.perm().indexonly() && authorization.is_none() {
+ // see https://github.com/sigoden/dufs/issues/229
+ AccessPaths::new(AccessPerm::ReadOnly)
+ } else {
+ access_paths
+ };
+ self.handle_propfind_dir(path, headers, access_paths, &mut res)
+ .await?;
+ } else if is_file {
+ self.handle_propfind_file(path, &mut res).await?;
+ } else {
+ status_not_found(&mut res);
+ }
+ }
+ "PROPPATCH" => {
+ if is_file {
+ self.handle_proppatch(req_path, &mut res).await?;
+ } else {
+ status_not_found(&mut res);
+ }
+ }
+ "MKCOL" => {
+ if !allow_upload {
+ status_forbid(&mut res);
+ } else if !is_miss {
+ *res.status_mut() = StatusCode::METHOD_NOT_ALLOWED;
+ *res.body_mut() = body_full("Already exists");
+ } else {
+ self.handle_mkcol(path, &mut res).await?;
+ }
+ }
+ "COPY" => {
+ if !allow_upload {
+ status_forbid(&mut res);
+ } else if is_miss {
+ status_not_found(&mut res);
+ } else {
+ self.handle_copy(path, &req, &mut res).await?
+ }
+ }
+ "MOVE" => {
+ if !allow_upload || !allow_delete {
+ status_forbid(&mut res);
+ } else if is_miss {
+ status_not_found(&mut res);
+ } else {
+ self.handle_move(path, &req, &mut res).await?
+ }
+ }
+ "LOCK" => {
+ // Fake lock
+ if is_file {
+ let has_auth = authorization.is_some();
+ self.handle_lock(req_path, has_auth, &mut res).await?;
+ } else {
+ status_not_found(&mut res);
+ }
+ }
+ "UNLOCK" => {
+ // Fake unlock
+ if is_miss {
+ status_not_found(&mut res);
+ }
+ }
+ _ => {
+ *res.status_mut() = StatusCode::METHOD_NOT_ALLOWED;
+ }
+ },
+ }
+ Ok(res)
+ }
+
+ async fn handle_upload(
+ &self,
+ path: &Path,
+ upload_offset: Option<u64>,
+ size: u64,
+ req: Request,
+ res: &mut Response,
+ ) -> Result<()> {
+ ensure_path_parent(path).await?;
+ let (mut file, status) = match upload_offset {
+ None => (fs::File::create(path).await?, StatusCode::CREATED),
+ Some(offset) if offset == size => (
+ fs::OpenOptions::new().append(true).open(path).await?,
+ StatusCode::NO_CONTENT,
+ ),
+ Some(offset) => {
+ let mut file = fs::OpenOptions::new().write(true).open(path).await?;
+ file.seek(SeekFrom::Start(offset)).await?;
+ (file, StatusCode::NO_CONTENT)
+ }
+ };
+ let stream = IncomingStream::new(req.into_body());
+
+ let body_with_io_error = stream.map_err(|err| io::Error::new(io::ErrorKind::Other, err));
+ let body_reader = StreamReader::new(body_with_io_error);
+
+ pin_mut!(body_reader);
+
+ let ret = io::copy(&mut body_reader, &mut file).await;
+ let size = fs::metadata(path)
+ .await
+ .map(|v| v.len())
+ .unwrap_or_default();
+ if ret.is_err() {
+ if upload_offset.is_none() && size < RESUMABLE_UPLOAD_MIN_SIZE {
+ let _ = tokio::fs::remove_file(&path).await;
+ }
+ ret?;
+ }
+
+ *res.status_mut() = status;
+
+ Ok(())
+ }
+
+ async fn handle_delete(&self, path: &Path, is_dir: bool, res: &mut Response) -> Result<()> {
+ match is_dir {
+ true => fs::remove_dir_all(path).await?,
+ false => fs::remove_file(path).await?,
+ }
+
+ status_no_content(res);
+ Ok(())
+ }
+
+ async fn handle_ls_dir(
+ &self,
+ path: &Path,
+ exist: bool,
+ query_params: &HashMap<String, String>,
+ head_only: bool,
+ user: Option<String>,
+ access_paths: AccessPaths,
+ res: &mut Response,
+ ) -> Result<()> {
+ let mut paths = vec![];
+ if exist {
+ paths = match self.list_dir(path, path, access_paths.clone()).await {
+ Ok(paths) => paths,
+ Err(_) => {
+ status_forbid(res);
+ return Ok(());
+ }
+ }
+ };
+ self.send_index(
+ path,
+ paths,
+ exist,
+ query_params,
+ head_only,
+ user,
+ access_paths,
+ res,
+ )
+ }
+
+ async fn handle_search_dir(
+ &self,
+ path: &Path,
+ query_params: &HashMap<String, String>,
+ head_only: bool,
+ user: Option<String>,
+ access_paths: AccessPaths,
+ res: &mut Response,
+ ) -> Result<()> {
+ let mut paths: Vec<PathItem> = vec![];
+ let search = query_params
+ .get("q")
+ .ok_or_else(|| anyhow!("invalid q"))?
+ .to_lowercase();
+ if search.is_empty() {
+ return self
+ .handle_ls_dir(path, true, query_params, head_only, user, access_paths, res)
+ .await;
+ } else {
+ let path_buf = path.to_path_buf();
+ let hidden = Arc::new(self.args.hidden.to_vec());
+ let search = search.clone();
+
+ let access_paths = access_paths.clone();
+ let search_paths = tokio::spawn(collect_dir_entries(
+ access_paths,
+ self.running.clone(),
+ path_buf,
+ hidden,
+ self.args.allow_symlink,
+ self.args.serve_path.clone(),
+ move |x| get_file_name(x.path()).to_lowercase().contains(&search),
+ ))
+ .await?;
+
+ for search_path in search_paths.into_iter() {
+ if let Ok(Some(item)) = self.to_pathitem(search_path, path.to_path_buf()).await {
+ paths.push(item);
+ }
+ }
+ }
+ self.send_index(
+ path,
+ paths,
+ true,
+ query_params,
+ head_only,
+ user,
+ access_paths,
+ res,
+ )
+ }
+
+ async fn handle_zip_dir(
+ &self,
+ path: &Path,
+ head_only: bool,
+ access_paths: AccessPaths,
+ res: &mut Response,
+ ) -> Result<()> {
+ let (mut writer, reader) = tokio::io::duplex(BUF_SIZE);
+ let filename = try_get_file_name(path)?;
+ set_content_disposition(res, false, &format!("{}.zip", filename))?;
+ res.headers_mut()
+ .insert("content-type", HeaderValue::from_static("application/zip"));
+ if head_only {
+ return Ok(());
+ }
+ let path = path.to_owned();
+ let hidden = self.args.hidden.clone();
+ let running = self.running.clone();
+ let compression = self.args.compress.to_compression();
+ let follow_symlinks = self.args.allow_symlink;
+ let serve_path = self.args.serve_path.clone();
+ tokio::spawn(async move {
+ if let Err(e) = zip_dir(
+ &mut writer,
+ &path,
+ access_paths,
+ &hidden,
+ compression,
+ follow_symlinks,
+ serve_path,
+ running,
+ )
+ .await
+ {
+ error!("Failed to zip {}, {e}", path.display());
+ }
+ });
+ let reader_stream = ReaderStream::with_capacity(reader, BUF_SIZE);
+ let stream_body = StreamBody::new(
+ reader_stream
+ .map_ok(Frame::data)
+ .map_err(|err| anyhow!("{err}")),
+ );
+ let boxed_body = stream_body.boxed();
+ *res.body_mut() = boxed_body;
+ Ok(())
+ }
+
+ async fn handle_render_index(
+ &self,
+ path: &Path,
+ query_params: &HashMap<String, String>,
+ headers: &HeaderMap<HeaderValue>,
+ head_only: bool,
+ user: Option<String>,
+ access_paths: AccessPaths,
+ res: &mut Response,
+ ) -> Result<()> {
+ let index_path = path.join(INDEX_NAME);
+ if fs::metadata(&index_path)
+ .await
+ .ok()
+ .map(|v| v.is_file())
+ .unwrap_or_default()
+ {
+ self.handle_send_file(&index_path, headers, head_only, res)
+ .await?;
+ } else if self.args.render_try_index {
+ self.handle_ls_dir(path, true, query_params, head_only, user, access_paths, res)
+ .await?;
+ } else {
+ status_not_found(res)
+ }
+ Ok(())
+ }
+
+ async fn handle_render_spa(
+ &self,
+ path: &Path,
+ headers: &HeaderMap<HeaderValue>,
+ head_only: bool,
+ res: &mut Response,
+ ) -> Result<()> {
+ if path.extension().is_none() {
+ let path = self.args.serve_path.join(INDEX_NAME);
+ self.handle_send_file(&path, headers, head_only, res)
+ .await?;
+ } else {
+ status_not_found(res)
+ }
+ Ok(())
+ }
+
+ async fn handle_internal(
+ &self,
+ req_path: &str,
+ headers: &HeaderMap<HeaderValue>,
+ res: &mut Response,
+ ) -> Result<bool> {
+ if let Some(name) = req_path.strip_prefix(&self.assets_prefix) {
+ match self.args.assets.as_ref() {
+ Some(assets_path) => {
+ let path = assets_path.join(name);
+ if path.exists() {
+ self.handle_send_file(&path, headers, false, res).await?;
+ } else {
+ status_not_found(res);
+ return Ok(true);
+ }
+ }
+ None => match name {
+ "index.js" => {
+ *res.body_mut() = body_full(INDEX_JS);
+ res.headers_mut().insert(
+ "content-type",
+ HeaderValue::from_static("application/javascript; charset=UTF-8"),
+ );
+ }
+ "index.css" => {
+ *res.body_mut() = body_full(INDEX_CSS);
+ res.headers_mut().insert(
+ "content-type",
+ HeaderValue::from_static("text/css; charset=UTF-8"),
+ );
+ }
+ "favicon.ico" => {
+ *res.body_mut() = body_full(FAVICON_ICO);
+ res.headers_mut()
+ .insert("content-type", HeaderValue::from_static("image/x-icon"));
+ }
+ _ => {
+ status_not_found(res);
+ }
+ },
+ }
+ res.headers_mut().insert(
+ "cache-control",
+ HeaderValue::from_static("public, max-age=31536000, immutable"),
+ );
+ res.headers_mut().insert(
+ "x-content-type-options",
+ HeaderValue::from_static("nosniff"),
+ );
+ Ok(true)
+ } else if req_path == HEALTH_CHECK_PATH {
+ res.headers_mut()
+ .typed_insert(ContentType::from(mime_guess::mime::APPLICATION_JSON));
+
+ *res.body_mut() = body_full(r#"{"status":"OK"}"#);
+ Ok(true)
+ } else {
+ Ok(false)
+ }
+ }
+
+ async fn handle_send_file(
+ &self,
+ path: &Path,
+ headers: &HeaderMap<HeaderValue>,
+ head_only: bool,
+ res: &mut Response,
+ ) -> Result<()> {
+ let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),);
+ let (mut file, meta) = (file?, meta?);
+ let size = meta.len();
+ let mut use_range = true;
+ if let Some((etag, last_modified)) = extract_cache_headers(&meta) {
+ if let Some(if_unmodified_since) = headers.typed_get::<IfUnmodifiedSince>() {
+ if !if_unmodified_since.precondition_passes(last_modified.into()) {
+ *res.status_mut() = StatusCode::PRECONDITION_FAILED;
+ return Ok(());
+ }
+ }
+ if let Some(if_match) = headers.typed_get::<IfMatch>() {
+ if !if_match.precondition_passes(&etag) {
+ *res.status_mut() = StatusCode::PRECONDITION_FAILED;
+ return Ok(());
+ }
+ }
+ if let Some(if_modified_since) = headers.typed_get::<IfModifiedSince>() {
+ if !if_modified_since.is_modified(last_modified.into()) {
+ *res.status_mut() = StatusCode::NOT_MODIFIED;
+ return Ok(());
+ }
+ }
+ if let Some(if_none_match) = headers.typed_get::<IfNoneMatch>() {
+ if !if_none_match.precondition_passes(&etag) {
+ *res.status_mut() = StatusCode::NOT_MODIFIED;
+ return Ok(());
+ }
+ }
+
+ res.headers_mut()
+ .typed_insert(CacheControl::new().with_no_cache());
+ res.headers_mut().typed_insert(last_modified);
+ res.headers_mut().typed_insert(etag.clone());
+
+ if headers.typed_get::<Range>().is_some() {
+ use_range = headers
+ .typed_get::<IfRange>()
+ .map(|if_range| !if_range.is_modified(Some(&etag), Some(&last_modified)))
+ // Always be fresh if there is no validators
+ .unwrap_or(true);
+ } else {
+ use_range = false;
+ }
+ }
+
+ let ranges = if use_range {
+ headers.get(RANGE).map(|range| {
+ range
+ .to_str()
+ .ok()
+ .and_then(|range| parse_range(range, size))
+ })
+ } else {
+ None
+ };
+
+ res.headers_mut().insert(
+ CONTENT_TYPE,
+ HeaderValue::from_str(&get_content_type(path).await?)?,
+ );
+
+ let filename = try_get_file_name(path)?;
+ set_content_disposition(res, true, filename)?;
+
+ res.headers_mut().typed_insert(AcceptRanges::bytes());
+
+ if let Some(ranges) = ranges {
+ if let Some(ranges) = ranges {
+ if ranges.len() == 1 {
+ let (start, end) = ranges[0];
+ file.seek(SeekFrom::Start(start)).await?;
+ let range_size = end - start + 1;
+ *res.status_mut() = StatusCode::PARTIAL_CONTENT;
+ let content_range = format!("bytes {}-{}/{}", start, end, size);
+ res.headers_mut()
+ .insert(CONTENT_RANGE, content_range.parse()?);
+ res.headers_mut()
+ .insert(CONTENT_LENGTH, format!("{range_size}").parse()?);
+ if head_only {
+ return Ok(());
+ }
+
+ let stream_body = StreamBody::new(
+ LengthLimitedStream::new(file, range_size as usize)
+ .map_ok(Frame::data)
+ .map_err(|err| anyhow!("{err}")),
+ );
+ let boxed_body = stream_body.boxed();
+ *res.body_mut() = boxed_body;
+ } else {
+ *res.status_mut() = StatusCode::PARTIAL_CONTENT;
+ let boundary = Uuid::new_v4();
+ let mut body = Vec::new();
+ let content_type = get_content_type(path).await?;
+ for (start, end) in ranges {
+ file.seek(SeekFrom::Start(start)).await?;
+ let range_size = end - start + 1;
+ let content_range = format!("bytes {}-{}/{}", start, end, size);
+ let part_header = format!(
+ "--{boundary}\r\nContent-Type: {content_type}\r\nContent-Range: {content_range}\r\n\r\n",
+ );
+ body.extend_from_slice(part_header.as_bytes());
+ let mut buffer = vec![0; range_size as usize];
+ file.read_exact(&mut buffer).await?;
+ body.extend_from_slice(&buffer);
+ body.extend_from_slice(b"\r\n");
+ }
+ body.extend_from_slice(format!("--{boundary}--\r\n").as_bytes());
+ res.headers_mut().insert(
+ CONTENT_TYPE,
+ format!("multipart/byteranges; boundary={boundary}").parse()?,
+ );
+ res.headers_mut()
+ .insert(CONTENT_LENGTH, format!("{}", body.len()).parse()?);
+ if head_only {
+ return Ok(());
+ }
+ *res.body_mut() = body_full(body);
+ }
+ } else {
+ *res.status_mut() = StatusCode::RANGE_NOT_SATISFIABLE;
+ res.headers_mut()
+ .insert(CONTENT_RANGE, format!("bytes */{size}").parse()?);
+ }
+ } else {
+ res.headers_mut()
+ .insert(CONTENT_LENGTH, format!("{size}").parse()?);
+ if head_only {
+ return Ok(());
+ }
+
+ let reader_stream = ReaderStream::with_capacity(file, BUF_SIZE);
+ let stream_body = StreamBody::new(
+ reader_stream
+ .map_ok(Frame::data)
+ .map_err(|err| anyhow!("{err}")),
+ );
+ let boxed_body = stream_body.boxed();
+ *res.body_mut() = boxed_body;
+ }
+ Ok(())
+ }
+
+ async fn handle_edit_file(
+ &self,
+ path: &Path,
+ kind: DataKind,
+ head_only: bool,
+ user: Option<String>,
+ res: &mut Response,
+ ) -> Result<()> {
+ let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),);
+ let (file, meta) = (file?, meta?);
+ let href = format!(
+ "/{}",
+ normalize_path(path.strip_prefix(&self.args.serve_path)?)
+ );
+ let mut buffer: Vec<u8> = vec![];
+ file.take(1024).read_to_end(&mut buffer).await?;
+ let editable =
+ meta.len() <= EDITABLE_TEXT_MAX_SIZE && content_inspector::inspect(&buffer).is_text();
+ let data = EditData {
+ href,
+ kind,
+ uri_prefix: self.args.uri_prefix.clone(),
+ allow_upload: self.args.allow_upload,
+ allow_delete: self.args.allow_delete,
+ auth: self.args.auth.exist(),
+ user,
+ editable,
+ };
+ res.headers_mut()
+ .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
+ let index_data = STANDARD.encode(serde_json::to_string(&data)?);
+ let output = self
+ .html
+ .replace(
+ "__ASSETS_PREFIX__",
+ &format!("{}{}", self.args.uri_prefix, self.assets_prefix),
+ )
+ .replace("__INDEX_DATA__", &index_data);
+ res.headers_mut()
+ .typed_insert(ContentLength(output.len() as u64));
+ res.headers_mut()
+ .typed_insert(CacheControl::new().with_no_cache());
+ if head_only {
+ return Ok(());
+ }
+ *res.body_mut() = body_full(output);
+ Ok(())
+ }
+
+ async fn handle_hash_file(
+ &self,
+ path: &Path,
+ head_only: bool,
+ res: &mut Response,
+ ) -> Result<()> {
+ let output = sha256_file(path).await?;
+ res.headers_mut()
+ .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
+ res.headers_mut()
+ .typed_insert(ContentLength(output.len() as u64));
+ if head_only {
+ return Ok(());
+ }
+ *res.body_mut() = body_full(output);
+ Ok(())
+ }
+
+ async fn handle_propfind_dir(
+ &self,
+ path: &Path,
+ headers: &HeaderMap<HeaderValue>,
+ access_paths: AccessPaths,
+ res: &mut Response,
+ ) -> Result<()> {
+ let depth: u32 = match headers.get("depth") {
+ Some(v) => match v.to_str().ok().and_then(|v| v.parse().ok()) {
+ Some(0) => 0,
+ Some(1) => 1,
+ _ => {
+ status_bad_request(res, "Invalid depth: only 0 and 1 are allowed.");
+ return Ok(());
+ }
+ },
+ None => 1,
+ };
+ let mut paths = match self.to_pathitem(path, &self.args.serve_path).await? {
+ Some(v) => vec![v],
+ None => vec![],
+ };
+ if depth == 1 {
+ match self
+ .list_dir(path, &self.args.serve_path, access_paths)
+ .await
+ {
+ Ok(child) => paths.extend(child),
+ Err(_) => {
+ status_forbid(res);
+ return Ok(());
+ }
+ }
+ }
+ let output = paths
+ .iter()
+ .map(|v| v.to_dav_xml(self.args.uri_prefix.as_str()))
+ .fold(String::new(), |mut acc, v| {
+ acc.push_str(&v);
+ acc
+ });
+ res_multistatus(res, &output);
+ Ok(())
+ }
+
+ async fn handle_propfind_file(&self, path: &Path, res: &mut Response) -> Result<()> {
+ if let Some(pathitem) = self.to_pathitem(path, &self.args.serve_path).await? {
+ res_multistatus(res, &pathitem.to_dav_xml(self.args.uri_prefix.as_str()));
+ } else {
+ status_not_found(res);
+ }
+ Ok(())
+ }
+
+ async fn handle_mkcol(&self, path: &Path, res: &mut Response) -> Result<()> {
+ fs::create_dir_all(path).await?;
+ *res.status_mut() = StatusCode::CREATED;
+ Ok(())
+ }
+
+ async fn handle_copy(&self, path: &Path, req: &Request, res: &mut Response) -> Result<()> {
+ let dest = match self.extract_dest(req, res) {
+ Some(dest) => dest,
+ None => {
+ return Ok(());
+ }
+ };
+
+ let meta = fs::symlink_metadata(path).await?;
+ if meta.is_dir() {
+ status_forbid(res);
+ return Ok(());
+ }
+
+ ensure_path_parent(&dest).await?;
+
+ fs::copy(path, &dest).await?;
+
+ status_no_content(res);
+ Ok(())
+ }
+
+ async fn handle_move(&self, path: &Path, req: &Request, res: &mut Response) -> Result<()> {
+ let dest = match self.extract_dest(req, res) {
+ Some(dest) => dest,
+ None => {
+ return Ok(());
+ }
+ };
+
+ ensure_path_parent(&dest).await?;
+
+ fs::rename(path, &dest).await?;
+
+ status_no_content(res);
+ Ok(())
+ }
+
+ async fn handle_lock(&self, req_path: &str, auth: bool, res: &mut Response) -> Result<()> {
+ let token = if auth {
+ format!("opaquelocktoken:{}", Uuid::new_v4())
+ } else {
+ Utc::now().timestamp().to_string()
+ };
+
+ res.headers_mut().insert(
+ "content-type",
+ HeaderValue::from_static("application/xml; charset=utf-8"),
+ );
+ res.headers_mut()
+ .insert("lock-token", format!("<{token}>").parse()?);
+
+ *res.body_mut() = body_full(format!(
+ r#"<?xml version="1.0" encoding="utf-8"?>