1use std::collections::{HashMap, HashSet};
4use std::ffi::OsStr;
5use std::fmt::{self, Debug, Display, Formatter, Write};
6use std::fs::Metadata;
7use std::path::{Path, PathBuf};
8use std::str::FromStr;
9use std::time::SystemTime;
10
11use salvo_core::fs::NamedFile;
12use salvo_core::handler::Handler;
13use salvo_core::http::header::ACCEPT_ENCODING;
14use salvo_core::http::{self, HeaderValue, Request, Response, StatusCode, StatusError, mime};
15use salvo_core::writing::Text;
16use salvo_core::{Depot, FlowCtrl, IntoVecString, async_trait};
17use serde::{Deserialize, Serialize};
18use serde_json::json;
19use time::OffsetDateTime;
20use time::macros::format_description;
21use tokio::io::AsyncReadExt;
22
23use super::{
24 decode_url_path_safely, encode_url_path, format_url_path_safely, join_path, redirect_to_dir_url,
25};
26
27#[derive(Eq, PartialEq, Clone, Copy, Debug, Hash)]
29#[non_exhaustive]
30pub enum CompressionAlgo {
31 Brotli,
33 Deflate,
35 Gzip,
37 Zstd,
39}
40impl FromStr for CompressionAlgo {
41 type Err = String;
42
43 fn from_str(s: &str) -> Result<Self, Self::Err> {
44 match s {
45 "br" | "brotli" => Ok(Self::Brotli),
46 "deflate" => Ok(Self::Deflate),
47 "gzip" => Ok(Self::Gzip),
48 "zstd" => Ok(Self::Zstd),
49 _ => Err(format!("unknown compression algorithm: {s}")),
50 }
51 }
52}
53
54impl Display for CompressionAlgo {
55 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
56 match self {
57 Self::Brotli => write!(f, "br"),
58 Self::Deflate => write!(f, "deflate"),
59 Self::Gzip => write!(f, "gzip"),
60 Self::Zstd => write!(f, "zstd"),
61 }
62 }
63}
64
65impl From<CompressionAlgo> for HeaderValue {
66 #[inline]
67 fn from(algo: CompressionAlgo) -> Self {
68 match algo {
69 CompressionAlgo::Brotli => Self::from_static("br"),
70 CompressionAlgo::Deflate => Self::from_static("deflate"),
71 CompressionAlgo::Gzip => Self::from_static("gzip"),
72 CompressionAlgo::Zstd => Self::from_static("zstd"),
73 }
74 }
75}
76
77pub trait StaticRoots {
79 fn collect(self) -> Vec<PathBuf>;
81}
82
83impl StaticRoots for &str {
84 #[inline]
85 fn collect(self) -> Vec<PathBuf> {
86 vec![PathBuf::from(self)]
87 }
88}
89impl StaticRoots for &String {
90 #[inline]
91 fn collect(self) -> Vec<PathBuf> {
92 vec![PathBuf::from(self)]
93 }
94}
95impl StaticRoots for String {
96 #[inline]
97 fn collect(self) -> Vec<PathBuf> {
98 vec![PathBuf::from(self)]
99 }
100}
101impl StaticRoots for PathBuf {
102 #[inline]
103 fn collect(self) -> Vec<PathBuf> {
104 vec![self]
105 }
106}
107impl<T> StaticRoots for Vec<T>
108where
109 T: Into<PathBuf> + AsRef<OsStr>,
110{
111 #[inline]
112 fn collect(self) -> Vec<PathBuf> {
113 self.iter().map(Into::into).collect()
114 }
115}
116impl<T, const N: usize> StaticRoots for [T; N]
117where
118 T: Into<PathBuf> + AsRef<OsStr>,
119{
120 #[inline]
121 fn collect(self) -> Vec<PathBuf> {
122 self.iter().map(Into::into).collect()
123 }
124}
125
126#[non_exhaustive]
147pub struct StaticDir {
148 pub roots: Vec<PathBuf>,
150 pub chunk_size: Option<u64>,
152 pub include_dot_files: bool,
154 exclude_filters: Vec<Box<dyn Fn(&str) -> bool + Send + Sync>>,
155 pub auto_list: bool,
157 pub compressed_variations: HashMap<CompressionAlgo, Vec<String>>,
159 pub defaults: Vec<String>,
161 pub fallback: Option<String>,
163}
164impl Debug for StaticDir {
165 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
166 f.debug_struct("StaticDir")
167 .field("roots", &self.roots)
168 .field("chunk_size", &self.chunk_size)
169 .field("include_dot_files", &self.include_dot_files)
170 .field("auto_list", &self.auto_list)
171 .field("compressed_variations", &self.compressed_variations)
172 .field("defaults", &self.defaults)
173 .field("fallback", &self.fallback)
174 .finish()
175 }
176}
177impl StaticDir {
178 #[inline]
180 pub fn new<T: StaticRoots + Sized>(roots: T) -> Self {
181 let mut compressed_variations = HashMap::new();
182 compressed_variations.insert(CompressionAlgo::Brotli, vec!["br".to_owned()]);
183 compressed_variations.insert(CompressionAlgo::Zstd, vec!["zst".to_owned()]);
184 compressed_variations.insert(CompressionAlgo::Gzip, vec!["gz".to_owned()]);
185 compressed_variations.insert(CompressionAlgo::Deflate, vec!["deflate".to_owned()]);
186
187 Self {
188 roots: roots.collect(),
189 chunk_size: None,
190 include_dot_files: false,
191 exclude_filters: vec![],
192 auto_list: false,
193 compressed_variations,
194 defaults: vec![],
195 fallback: None,
196 }
197 }
198
199 #[inline]
201 #[must_use]
202 pub fn include_dot_files(mut self, include_dot_files: bool) -> Self {
203 self.include_dot_files = include_dot_files;
204 self
205 }
206
207 #[inline]
211 #[must_use]
212 pub fn exclude<F>(mut self, filter: F) -> Self
213 where
214 F: Fn(&str) -> bool + Send + Sync + 'static,
215 {
216 self.exclude_filters.push(Box::new(filter));
217 self
218 }
219
220 #[inline]
222 #[must_use]
223 pub fn auto_list(mut self, auto_list: bool) -> Self {
224 self.auto_list = auto_list;
225 self
226 }
227
228 #[inline]
230 #[must_use]
231 pub fn compressed_variation<A>(mut self, algo: A, exts: &str) -> Self
232 where
233 A: Into<CompressionAlgo>,
234 {
235 self.compressed_variations.insert(
236 algo.into(),
237 exts.split(',').map(|s| s.trim().to_owned()).collect(),
238 );
239 self
240 }
241
242 #[inline]
244 #[must_use]
245 pub fn defaults(mut self, defaults: impl IntoVecString) -> Self {
246 self.defaults = defaults.into_vec_string();
247 self
248 }
249
250 #[must_use]
252 pub fn fallback(mut self, fallback: impl Into<String>) -> Self {
253 self.fallback = Some(fallback.into());
254 self
255 }
256
257 #[inline]
264 #[must_use]
265 pub fn chunk_size(mut self, size: u64) -> Self {
266 self.chunk_size = Some(size);
267 self
268 }
269
270 #[inline]
271 fn is_compressed_ext(&self, ext: &str) -> bool {
272 for exts in self.compressed_variations.values() {
273 if exts.iter().any(|e| e == ext) {
274 return true;
275 }
276 }
277 false
278 }
279}
280#[derive(Serialize, Deserialize, Debug)]
281struct CurrentInfo {
282 path: String,
283 files: Vec<FileInfo>,
284 dirs: Vec<DirInfo>,
285}
286impl CurrentInfo {
287 #[inline]
288 fn new(path: String, files: Vec<FileInfo>, dirs: Vec<DirInfo>) -> Self {
289 Self { path, files, dirs }
290 }
291}
292#[derive(Serialize, Deserialize, Debug)]
293struct FileInfo {
294 name: String,
295 size: u64,
296 modified: OffsetDateTime,
297}
298impl FileInfo {
299 #[inline]
300 #[must_use]
301 fn new(name: String, metadata: &Metadata) -> Self {
302 Self {
303 name,
304 size: metadata.len(),
305 modified: metadata
306 .modified()
307 .unwrap_or_else(|_| SystemTime::now())
308 .into(),
309 }
310 }
311}
312#[derive(Serialize, Deserialize, Debug)]
313struct DirInfo {
314 name: String,
315 modified: OffsetDateTime,
316}
317impl DirInfo {
318 #[inline]
319 fn new(name: String, metadata: &Metadata) -> Self {
320 Self {
321 name,
322 modified: metadata
323 .modified()
324 .unwrap_or_else(|_| SystemTime::now())
325 .into(),
326 }
327 }
328}
329
330#[async_trait]
331impl Handler for StaticDir {
332 async fn handle(
333 &self,
334 req: &mut Request,
335 _depot: &mut Depot,
336 res: &mut Response,
337 _ctrl: &mut FlowCtrl,
338 ) {
339 let req_path = req.uri().path();
340 let rel_path = if let Some(rest) = req.params().tail() {
341 rest
342 } else {
343 &*decode_url_path_safely(req_path)
344 };
345 let rel_path = format_url_path_safely(rel_path);
346 let mut files: HashMap<String, Metadata> = HashMap::new();
347 let mut dirs: HashMap<String, Metadata> = HashMap::new();
348 let is_dot_file = Path::new(&rel_path)
349 .file_name()
350 .and_then(|s| s.to_str())
351 .map(|s| s.starts_with('.'))
352 .unwrap_or(false);
353 let mut abs_path = None;
354 if self.include_dot_files || !is_dot_file {
355 for root in &self.roots {
356 let raw_path = join_path!(root, &rel_path);
357 if !Path::new(&raw_path).starts_with(root) {
360 continue;
361 }
362 if self.exclude_filters.iter().any(|filter| filter(&raw_path)) {
363 continue;
364 }
365 let path = Path::new(&raw_path);
366 if path.is_dir() {
367 if !req_path.ends_with('/') && !req_path.is_empty() {
368 redirect_to_dir_url(req.uri(), res);
369 return;
370 }
371
372 for ifile in &self.defaults {
373 let ipath = path.join(ifile);
374 if ipath.is_file() {
375 abs_path = Some(ipath);
376 break;
377 }
378 }
379
380 if self.auto_list && abs_path.is_none() {
381 abs_path = Some(path.to_path_buf());
382 }
383 if abs_path.is_some() {
384 break;
385 }
386 } else if path.is_file() {
387 abs_path = Some(path.to_path_buf());
388 }
389 }
390 }
391 let fallback = self.fallback.as_deref().unwrap_or_default();
392 if abs_path.is_none() && !fallback.is_empty() {
393 for root in &self.roots {
394 let raw_path = join_path!(root, fallback);
395 if self.exclude_filters.iter().any(|filter| filter(&raw_path)) {
396 continue;
397 }
398 let path = Path::new(&raw_path);
399 if path.is_file() {
400 abs_path = Some(path.to_path_buf());
401 break;
402 }
403 }
404 }
405
406 let Some(abs_path) = abs_path else {
407 res.render(StatusError::not_found());
408 return;
409 };
410
411 if abs_path.is_file() {
412 let ext = abs_path
413 .extension()
414 .and_then(|s| s.to_str())
415 .map(|s| s.to_lowercase());
416 let is_compressed_ext = ext
417 .as_deref()
418 .map(|ext| self.is_compressed_ext(ext))
419 .unwrap_or(false);
420 let mut content_encoding = None;
421 let mut content_type = mime_infer::from_path(&abs_path).first();
422
423 if let Some(content_type) = &mut content_type
424 && mime::is_charset_required_mime(content_type)
425 && let Ok(file) = tokio::fs::File::open(&abs_path).await
426 {
427 let mut buffer: Vec<u8> = vec![];
428 let _ = file.take(1024).read(&mut buffer).await;
429 mime::fill_mime_charset_if_need(content_type, &buffer);
430 }
431
432 let named_path = if !is_compressed_ext {
433 if !self.compressed_variations.is_empty() {
434 let mut new_abs_path = None;
435 let header = req
436 .headers()
437 .get(ACCEPT_ENCODING)
438 .and_then(|v| v.to_str().ok())
439 .unwrap_or_default();
440 let accept_algos = http::parse_accept_encoding(header)
441 .into_iter()
442 .filter_map(|(algo, _level)| algo.parse::<CompressionAlgo>().ok())
443 .collect::<HashSet<_>>();
444 for (algo, exts) in &self.compressed_variations {
445 if accept_algos.contains(algo) {
446 for zip_ext in exts {
447 let mut path = abs_path.clone();
448 path.as_mut_os_string().push(&*format!(".{zip_ext}"));
449 if path.is_file() {
450 new_abs_path = Some(path);
451 content_encoding = Some(algo.to_string());
452 break;
453 }
454 }
455 }
456 }
457 new_abs_path.unwrap_or(abs_path)
458 } else {
459 abs_path
460 }
461 } else {
462 abs_path
463 };
464
465 let builder = {
466 let mut builder = NamedFile::builder(named_path);
467 if let Some(content_encoding) = content_encoding {
468 builder = builder.content_encoding(content_encoding);
469 }
470 if let Some(size) = self.chunk_size {
471 builder = builder.buffer_size(size);
472 }
473 if let Some(content_type) = content_type {
474 builder = builder.content_type(content_type);
475 }
476 builder
477 };
478 if let Ok(named_file) = builder.build().await {
479 let headers = req.headers();
480 named_file.send(headers, res).await;
481 } else {
482 res.render(StatusError::internal_server_error().brief("Read file failed."));
483 }
484 } else if abs_path.is_dir() {
485 if let Ok(mut entries) = tokio::fs::read_dir(&abs_path).await {
487 while let Ok(Some(entry)) = entries.next_entry().await {
488 let file_name = entry.file_name().to_string_lossy().to_string();
489 if self.include_dot_files || !file_name.starts_with('.') {
490 let raw_path = join_path!(&abs_path, &file_name);
491 if self.exclude_filters.iter().any(|filter| filter(&raw_path)) {
492 continue;
493 }
494 if let Ok(metadata) = entry.metadata().await {
495 if metadata.is_dir() {
496 dirs.entry(file_name).or_insert(metadata);
497 } else {
498 files.entry(file_name).or_insert(metadata);
499 }
500 }
501 }
502 }
503 }
504
505 let format = req.first_accept().unwrap_or(mime::TEXT_HTML);
506 let mut files: Vec<FileInfo> = files
507 .into_iter()
508 .map(|(name, metadata)| FileInfo::new(name, &metadata))
509 .collect();
510 files.sort_by(|a, b| a.name.cmp(&b.name));
511 let mut dirs: Vec<DirInfo> = dirs
512 .into_iter()
513 .map(|(name, metadata)| DirInfo::new(name, &metadata))
514 .collect();
515 dirs.sort_by(|a, b| a.name.cmp(&b.name));
516 let root = CurrentInfo::new(decode_url_path_safely(req_path), files, dirs);
517 res.status_code(StatusCode::OK);
518 match format.subtype().as_ref() {
519 "plain" => res.render(Text::Plain(list_text(&root))),
520 "json" => res.render(Text::Json(list_json(&root))),
521 "xml" => res.render(Text::Xml(list_xml(&root))),
522 _ => res.render(Text::Html(list_html(&root))),
523 };
524 }
525 }
526}
527
528#[inline]
529fn list_json(current: &CurrentInfo) -> String {
530 json!(current).to_string()
531}
532fn list_xml(current: &CurrentInfo) -> String {
533 let mut ftxt = "<list>".to_owned();
534 if current.dirs.is_empty() && current.files.is_empty() {
535 ftxt.push_str("No files");
536 } else {
537 let format = format_description!("[year]-[month]-[day] [hour]:[minute]:[second]");
538 for dir in ¤t.dirs {
539 let _ = write!(
540 ftxt,
541 "<dir><name>{}</name><modified>{}</modified><link>{}</link></dir>",
542 dir.name,
543 dir.modified.format(&format).expect("format time failed"),
544 encode_url_path(&dir.name),
545 );
546 }
547 for file in ¤t.files {
548 let _ = write!(
549 ftxt,
550 "<file><name>{}</name><modified>{}</modified><size>{}</size><link>{}</link></file>",
551 file.name,
552 file.modified.format(&format).expect("format time failed"),
553 file.size,
554 encode_url_path(&file.name),
555 );
556 }
557 }
558 ftxt.push_str("</list>");
559 ftxt
560}
561fn human_size(bytes: u64) -> String {
562 let units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
563 let mut index = 0;
564 let mut bytes = bytes as f64;
565
566 while bytes >= 1024.0 && index < units.len() - 1 {
567 bytes /= 1024.0;
568 index += 1;
569 }
570
571 bytes = (bytes * 100.0).round() / 100.0;
572 if bytes == 1024.0 && index < units.len() - 1 {
573 index += 1;
574 bytes = 1.0;
575 }
576 format!("{} {}", bytes, units[index])
577}
578fn list_html(current: &CurrentInfo) -> String {
579 fn header_link(path: &str) -> String {
580 let segments = path
581 .trim_start_matches('/')
582 .trim_end_matches('/')
583 .split('/');
584 let mut link = "".to_owned();
585 format!(
586 r#"<a href="/">{}</a>{}"#,
587 HOME_ICON,
588 segments
589 .map(|seg| {
590 link = format!("{link}/{}", encode_url_path(seg));
591 format!("/<a href=\"{link}\">{}</a>", encode_url_path(seg))
592 })
593 .collect::<Vec<_>>()
594 .join("")
595 )
596 }
597 let mut ftxt = format!(
598 r#"<!DOCTYPE html><html><head>
599 <meta charset="utf-8">
600 <meta name="viewport" content="width=device-width">
601 <title>{}</title>
602 <style>{}</style></head><body><header><h3>Index of: {}</h3></header><hr/>"#,
603 encode_url_path(¤t.path),
604 HTML_STYLE,
605 header_link(¤t.path)
606 );
607 if current.dirs.is_empty() && current.files.is_empty() {
608 let _ = write!(ftxt, "<p>No files</p>");
609 } else {
610 let _ = write!(ftxt, "<table><tr><th>");
611 if !(current.path.is_empty() || current.path == "/") {
612 let _ = write!(ftxt, "<a href=\"../\">[..]</a>");
613 }
614 let _ = write!(
615 ftxt,
616 "</th><th>Name</th><th>Last modified</th><th>Size</th></tr>"
617 );
618 let format = format_description!("[year]-[month]-[day] [hour]:[minute]:[second]");
619 for dir in ¤t.dirs {
620 let _ = write!(
621 ftxt,
622 r#"<tr><td>{}</td><td><a href="./{}/">{}</a></td><td>{}</td><td></td></tr>"#,
623 DIR_ICON,
624 encode_url_path(&dir.name),
625 encode_url_path(&dir.name),
626 dir.modified.format(&format).expect("format time failed"),
627 );
628 }
629 for file in ¤t.files {
630 let _ = write!(
631 ftxt,
632 r#"<tr><td>{}</td><td><a href="./{}">{}</a></td><td>{}</td><td>{}</td></tr>"#,
633 FILE_ICON,
634 encode_url_path(&file.name),
635 encode_url_path(&file.name),
636 file.modified.format(&format).expect("format time failed"),
637 human_size(file.size)
638 );
639 }
640 let _ = write!(ftxt, "</table>");
641 }
642 let _ = write!(
643 ftxt,
644 r#"<hr/><footer><a href="https://salvo.rs" target="_blank">salvo</a></footer></body>"#
645 );
646 ftxt
647}
648#[inline]
649fn list_text(current: &CurrentInfo) -> String {
650 json!(current).to_string()
651}
652
653const HTML_STYLE: &str = r#"
654 :root {
655 --bg-color: #fff;
656 --text-color: #222;
657 --link-color: #0366d6;
658 --link-visited-color: #f22526;
659 --dir-icon-color: #79b8ff;
660 --file-icon-color: #959da5;
661 }
662 body {background: var(--bg-color); color: var(--text-color);}
663 a {text-decoration:none;color:var(--link-color);}
664 a:visited {color: var(--link-visited-color);}
665 a:hover {text-decoration:underline;}
666 header a {padding: 0 6px;}
667 footer {text-align:center;font-size:12px;}
668 table {text-align:left;border-collapse: collapse;}
669 tr {border-bottom: solid 1px #ccc;}
670 tr:last-child {border-bottom: none;}
671 th, td {padding: 5px;}
672 th:first-child,td:first-child {text-align: center;}
673 svg[data-icon="dir"] {vertical-align: text-bottom; color: var(--dir-icon-color); fill: currentColor;}
674 svg[data-icon="file"] {vertical-align: text-bottom; color: var(--file-icon-color); fill: currentColor;}
675 svg[data-icon="home"] {width:18px;}
676 @media (prefers-color-scheme: dark) {
677 :root {
678 --bg-color: #222;
679 --text-color: #ddd;
680 --link-color: #539bf5;
681 --link-visited-color: #f25555;
682 --dir-icon-color: #7da3d0;
683 --file-icon-color: #545d68;
684 }
685 }"#;
686const DIR_ICON: &str = r#"<svg aria-label="Directory" data-icon="dir" width="20" height="20" viewBox="0 0 512 512" version="1.1" role="img"><path fill="currentColor" d="M464 128H272l-64-64H48C21.49 64 0 85.49 0 112v288c0 26.51 21.49 48 48 48h416c26.51 0 48-21.49 48-48V176c0-26.51-21.49-48-48-48z"></path></svg>"#;
687const FILE_ICON: &str = r#"<svg aria-label="File" data-icon="file" width="20" height="20" viewBox="0 0 384 512" version="1.1" role="img"><path d="M369.9 97.9L286 14C277 5 264.8-.1 252.1-.1H48C21.5 0 0 21.5 0 48v416c0 26.5 21.5 48 48 48h288c26.5 0 48-21.5 48-48V131.9c0-12.7-5.1-25-14.1-34zM332.1 128H256V51.9l76.1 76.1zM48 464V48h160v104c0 13.3 10.7 24 24 24h104v288H48z"/></svg>"#;
688const HOME_ICON: &str = r#"<svg aria-hidden="true" data-icon="home" viewBox="0 0 576 512"><path fill="currentColor" d="M280.37 148.26L96 300.11V464a16 16 0 0 0 16 16l112.06-.29a16 16 0 0 0 15.92-16V368a16 16 0 0 1 16-16h64a16 16 0 0 1 16 16v95.64a16 16 0 0 0 16 16.05L464 480a16 16 0 0 0 16-16V300L295.67 148.26a12.19 12.19 0 0 0-15.3 0zM571.6 251.47L488 182.56V44.05a12 12 0 0 0-12-12h-56a12 12 0 0 0-12 12v72.61L318.47 43a48 48 0 0 0-61 0L4.34 251.47a12 12 0 0 0-1.6 16.9l25.5 31A12 12 0 0 0 45.15 301l235.22-193.74a12.19 12.19 0 0 1 15.3 0L530.9 301a12 12 0 0 0 16.9-1.6l25.5-31a12 12 0 0 0-1.7-16.93z"></path></svg>"#;
689
690#[cfg(test)]
691mod tests {
692 use crate::dir::human_size;
693
694 #[tokio::test]
695 async fn test_convert_bytes_to_units() {
696 assert_eq!("94.03 MB", human_size(98595176)); let unit = 1024;
699 assert_eq!("1 KB", human_size(unit));
700 assert_eq!("1023 B", human_size(unit - 1));
701
702 assert_eq!("1 MB", human_size(unit * unit));
703 assert_eq!("1 MB", human_size(unit * unit - 1));
704 assert_eq!("1023.99 KB", human_size(unit * unit - 10));
705
706 assert_eq!("1 GB", human_size(unit * unit * unit));
707 assert_eq!("1 GB", human_size(unit * unit * unit - 1));
708
709 assert_eq!("1 TB", human_size(unit * unit * unit * unit));
710 assert_eq!("1 TB", human_size(unit * unit * unit * unit - 1));
711
712 assert_eq!("1 PB", human_size(unit * unit * unit * unit * unit));
713 assert_eq!("1 PB", human_size(unit * unit * unit * unit * unit - 1));
714 }
715}