1use std::collections::{HashMap, HashSet};
4use std::ffi::OsStr;
5use std::fmt::{self, Debug, Display, Formatter, Write};
6use std::fs::Metadata;
7use std::path::{Path, PathBuf};
8use std::str::FromStr;
9use std::time::SystemTime;
10
11use salvo_core::fs::NamedFile;
12use salvo_core::handler::Handler;
13use salvo_core::http::header::ACCEPT_ENCODING;
14use salvo_core::http::{self, HeaderValue, Request, Response, StatusCode, StatusError, mime};
15use salvo_core::writing::Text;
16use salvo_core::{Depot, FlowCtrl, IntoVecString, async_trait};
17use serde::{Deserialize, Serialize};
18use serde_json::json;
19use time::{OffsetDateTime, macros::format_description};
20use tokio::io::AsyncReadExt;
21
22use super::{
23 decode_url_path_safely, encode_url_path, format_url_path_safely, join_path, redirect_to_dir_url,
24};
25
26#[derive(Eq, PartialEq, Clone, Copy, Debug, Hash)]
28#[non_exhaustive]
29pub enum CompressionAlgo {
30 Brotli,
32 Deflate,
34 Gzip,
36 Zstd,
38}
39impl FromStr for CompressionAlgo {
40 type Err = String;
41
42 fn from_str(s: &str) -> Result<Self, Self::Err> {
43 match s {
44 "br" | "brotli" => Ok(Self::Brotli),
45 "deflate" => Ok(Self::Deflate),
46 "gzip" => Ok(Self::Gzip),
47 "zstd" => Ok(Self::Zstd),
48 _ => Err(format!("unknown compression algorithm: {s}")),
49 }
50 }
51}
52
53impl Display for CompressionAlgo {
54 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
55 match self {
56 Self::Brotli => write!(f, "br"),
57 Self::Deflate => write!(f, "deflate"),
58 Self::Gzip => write!(f, "gzip"),
59 Self::Zstd => write!(f, "zstd"),
60 }
61 }
62}
63
64impl From<CompressionAlgo> for HeaderValue {
65 #[inline]
66 fn from(algo: CompressionAlgo) -> Self {
67 match algo {
68 CompressionAlgo::Brotli => Self::from_static("br"),
69 CompressionAlgo::Deflate => Self::from_static("deflate"),
70 CompressionAlgo::Gzip => Self::from_static("gzip"),
71 CompressionAlgo::Zstd => Self::from_static("zstd"),
72 }
73 }
74}
75
76pub trait StaticRoots {
78 fn collect(self) -> Vec<PathBuf>;
80}
81
82impl StaticRoots for &str {
83 #[inline]
84 fn collect(self) -> Vec<PathBuf> {
85 vec![PathBuf::from(self)]
86 }
87}
88impl StaticRoots for &String {
89 #[inline]
90 fn collect(self) -> Vec<PathBuf> {
91 vec![PathBuf::from(self)]
92 }
93}
94impl StaticRoots for String {
95 #[inline]
96 fn collect(self) -> Vec<PathBuf> {
97 vec![PathBuf::from(self)]
98 }
99}
100impl StaticRoots for PathBuf {
101 #[inline]
102 fn collect(self) -> Vec<PathBuf> {
103 vec![self]
104 }
105}
106impl<T> StaticRoots for Vec<T>
107where
108 T: Into<PathBuf> + AsRef<OsStr>,
109{
110 #[inline]
111 fn collect(self) -> Vec<PathBuf> {
112 self.iter().map(Into::into).collect()
113 }
114}
115impl<T, const N: usize> StaticRoots for [T; N]
116where
117 T: Into<PathBuf> + AsRef<OsStr>,
118{
119 #[inline]
120 fn collect(self) -> Vec<PathBuf> {
121 self.iter().map(Into::into).collect()
122 }
123}
124
125#[non_exhaustive]
144pub struct StaticDir {
145 pub roots: Vec<PathBuf>,
147 pub chunk_size: Option<u64>,
149 pub include_dot_files: bool,
151 exclude_filters: Vec<Box<dyn Fn(&str) -> bool + Send + Sync>>,
152 pub auto_list: bool,
154 pub compressed_variations: HashMap<CompressionAlgo, Vec<String>>,
156 pub defaults: Vec<String>,
158 pub fallback: Option<String>,
160}
161impl Debug for StaticDir {
162 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
163 f.debug_struct("StaticDir")
164 .field("roots", &self.roots)
165 .field("chunk_size", &self.chunk_size)
166 .field("include_dot_files", &self.include_dot_files)
167 .field("auto_list", &self.auto_list)
168 .field("compressed_variations", &self.compressed_variations)
169 .field("defaults", &self.defaults)
170 .field("fallback", &self.fallback)
171 .finish()
172 }
173}
174impl StaticDir {
175 #[inline]
177 pub fn new<T: StaticRoots + Sized>(roots: T) -> Self {
178 let mut compressed_variations = HashMap::new();
179 compressed_variations.insert(CompressionAlgo::Brotli, vec!["br".to_owned()]);
180 compressed_variations.insert(CompressionAlgo::Zstd, vec!["zst".to_owned()]);
181 compressed_variations.insert(CompressionAlgo::Gzip, vec!["gz".to_owned()]);
182 compressed_variations.insert(CompressionAlgo::Deflate, vec!["deflate".to_owned()]);
183
184 Self {
185 roots: roots.collect(),
186 chunk_size: None,
187 include_dot_files: false,
188 exclude_filters: vec![],
189 auto_list: false,
190 compressed_variations,
191 defaults: vec![],
192 fallback: None,
193 }
194 }
195
196 #[inline]
198 #[must_use]
199 pub fn include_dot_files(mut self, include_dot_files: bool) -> Self {
200 self.include_dot_files = include_dot_files;
201 self
202 }
203
204 #[inline]
208 #[must_use]
209 pub fn exclude<F>(mut self, filter: F) -> Self
210 where
211 F: Fn(&str) -> bool + Send + Sync + 'static,
212 {
213 self.exclude_filters.push(Box::new(filter));
214 self
215 }
216
217 #[inline]
219 #[must_use]
220 pub fn auto_list(mut self, auto_list: bool) -> Self {
221 self.auto_list = auto_list;
222 self
223 }
224
225 #[inline]
227 #[must_use]
228 pub fn compressed_variation<A>(mut self, algo: A, exts: &str) -> Self
229 where
230 A: Into<CompressionAlgo>,
231 {
232 self.compressed_variations.insert(
233 algo.into(),
234 exts.split(',').map(|s| s.trim().to_owned()).collect(),
235 );
236 self
237 }
238
239 #[inline]
241 #[must_use]
242 pub fn defaults(mut self, defaults: impl IntoVecString) -> Self {
243 self.defaults = defaults.into_vec_string();
244 self
245 }
246
247 #[must_use]
249 pub fn fallback(mut self, fallback: impl Into<String>) -> Self {
250 self.fallback = Some(fallback.into());
251 self
252 }
253
254 #[inline]
261 #[must_use]
262 pub fn chunk_size(mut self, size: u64) -> Self {
263 self.chunk_size = Some(size);
264 self
265 }
266
267 #[inline]
268 fn is_compressed_ext(&self, ext: &str) -> bool {
269 for exts in self.compressed_variations.values() {
270 if exts.iter().any(|e| e == ext) {
271 return true;
272 }
273 }
274 false
275 }
276}
277#[derive(Serialize, Deserialize, Debug)]
278struct CurrentInfo {
279 path: String,
280 files: Vec<FileInfo>,
281 dirs: Vec<DirInfo>,
282}
283impl CurrentInfo {
284 #[inline]
285 fn new(path: String, files: Vec<FileInfo>, dirs: Vec<DirInfo>) -> Self {
286 Self { path, files, dirs }
287 }
288}
289#[derive(Serialize, Deserialize, Debug)]
290struct FileInfo {
291 name: String,
292 size: u64,
293 modified: OffsetDateTime,
294}
295impl FileInfo {
296 #[inline]
297 #[must_use]
298 fn new(name: String, metadata: &Metadata) -> Self {
299 Self {
300 name,
301 size: metadata.len(),
302 modified: metadata
303 .modified()
304 .unwrap_or_else(|_| SystemTime::now())
305 .into(),
306 }
307 }
308}
309#[derive(Serialize, Deserialize, Debug)]
310struct DirInfo {
311 name: String,
312 modified: OffsetDateTime,
313}
314impl DirInfo {
315 #[inline]
316 fn new(name: String, metadata: &Metadata) -> Self {
317 Self {
318 name,
319 modified: metadata
320 .modified()
321 .unwrap_or_else(|_| SystemTime::now())
322 .into(),
323 }
324 }
325}
326
327#[async_trait]
328impl Handler for StaticDir {
329 async fn handle(
330 &self,
331 req: &mut Request,
332 _depot: &mut Depot,
333 res: &mut Response,
334 _ctrl: &mut FlowCtrl,
335 ) {
336 let req_path = req.uri().path();
337 let rel_path = if let Some(rest) = req.params().tail() {
338 rest
339 } else {
340 &*decode_url_path_safely(req_path)
341 };
342 let rel_path = format_url_path_safely(rel_path);
343 let mut files: HashMap<String, Metadata> = HashMap::new();
344 let mut dirs: HashMap<String, Metadata> = HashMap::new();
345 let is_dot_file = Path::new(&rel_path)
346 .file_name()
347 .and_then(|s| s.to_str())
348 .map(|s| s.starts_with('.'))
349 .unwrap_or(false);
350 let mut abs_path = None;
351 if self.include_dot_files || !is_dot_file {
352 for root in &self.roots {
353 let raw_path = join_path!(root, &rel_path);
354 if !Path::new(&raw_path).starts_with(root) {
356 continue;
357 }
358 if self.exclude_filters.iter().any(|filter| filter(&raw_path)) {
359 continue;
360 }
361 let path = Path::new(&raw_path);
362 if path.is_dir() {
363 if !req_path.ends_with('/') && !req_path.is_empty() {
364 redirect_to_dir_url(req.uri(), res);
365 return;
366 }
367
368 for ifile in &self.defaults {
369 let ipath = path.join(ifile);
370 if ipath.is_file() {
371 abs_path = Some(ipath);
372 break;
373 }
374 }
375
376 if self.auto_list && abs_path.is_none() {
377 abs_path = Some(path.to_path_buf());
378 }
379 if abs_path.is_some() {
380 break;
381 }
382 } else if path.is_file() {
383 abs_path = Some(path.to_path_buf());
384 }
385 }
386 }
387 let fallback = self.fallback.as_deref().unwrap_or_default();
388 if abs_path.is_none() && !fallback.is_empty() {
389 for root in &self.roots {
390 let raw_path = join_path!(root, fallback);
391 if self.exclude_filters.iter().any(|filter| filter(&raw_path)) {
392 continue;
393 }
394 let path = Path::new(&raw_path);
395 if path.is_file() {
396 abs_path = Some(path.to_path_buf());
397 break;
398 }
399 }
400 }
401
402 let Some(abs_path) = abs_path else {
403 res.render(StatusError::not_found());
404 return;
405 };
406
407 if abs_path.is_file() {
408 let ext = abs_path
409 .extension()
410 .and_then(|s| s.to_str())
411 .map(|s| s.to_lowercase());
412 let is_compressed_ext = ext
413 .as_deref()
414 .map(|ext| self.is_compressed_ext(ext))
415 .unwrap_or(false);
416 let mut content_encoding = None;
417 let mut content_type = mime_infer::from_path(&abs_path).first();
418 if let Some(content_type) = &mut content_type {
419 if mime::is_charset_required_mime(&content_type) {
420 if let Ok(file) = tokio::fs::File::open(&abs_path).await {
421 let mut buffer: Vec<u8> = vec![];
422 let _ = file.take(1024).read(&mut buffer).await;
423 mime::fill_mime_charset_if_need(content_type, &buffer);
424 }
425 }
426 }
427 let named_path = if !is_compressed_ext {
428 if !self.compressed_variations.is_empty() {
429 let mut new_abs_path = None;
430 let header = req
431 .headers()
432 .get(ACCEPT_ENCODING)
433 .and_then(|v| v.to_str().ok())
434 .unwrap_or_default();
435 let accept_algos = http::parse_accept_encoding(header)
436 .into_iter()
437 .filter_map(|(algo, _level)| algo.parse::<CompressionAlgo>().ok())
438 .collect::<HashSet<_>>();
439 for (algo, exts) in &self.compressed_variations {
440 if accept_algos.contains(algo) {
441 for zip_ext in exts {
442 let mut path = abs_path.clone();
443 path.as_mut_os_string().push(&*format!(".{zip_ext}"));
444 if path.is_file() {
445 new_abs_path = Some(path);
446 content_encoding = Some(algo.to_string());
447 break;
448 }
449 }
450 }
451 }
452 new_abs_path.unwrap_or(abs_path)
453 } else {
454 abs_path
455 }
456 } else {
457 abs_path
458 };
459
460 let builder = {
461 let mut builder = NamedFile::builder(named_path);
462 if let Some(content_encoding) = content_encoding {
463 builder = builder.content_encoding(content_encoding);
464 }
465 if let Some(size) = self.chunk_size {
466 builder = builder.buffer_size(size);
467 }
468 if let Some(content_type) = content_type {
469 builder = builder.content_type(content_type);
470 }
471 builder
472 };
473 if let Ok(named_file) = builder.build().await {
474 let headers = req.headers();
475 named_file.send(headers, res).await;
476 } else {
477 res.render(StatusError::internal_server_error().brief("Read file failed."));
478 }
479 } else if abs_path.is_dir() {
480 if let Ok(mut entries) = tokio::fs::read_dir(&abs_path).await {
482 while let Ok(Some(entry)) = entries.next_entry().await {
483 let file_name = entry.file_name().to_string_lossy().to_string();
484 if self.include_dot_files || !file_name.starts_with('.') {
485 let raw_path = join_path!(&abs_path, &file_name);
486 if self.exclude_filters.iter().any(|filter| filter(&raw_path)) {
487 continue;
488 }
489 if let Ok(metadata) = entry.metadata().await {
490 if metadata.is_dir() {
491 dirs.entry(file_name).or_insert(metadata);
492 } else {
493 files.entry(file_name).or_insert(metadata);
494 }
495 }
496 }
497 }
498 }
499
500 let format = req.first_accept().unwrap_or(mime::TEXT_HTML);
501 let mut files: Vec<FileInfo> = files
502 .into_iter()
503 .map(|(name, metadata)| FileInfo::new(name, &metadata))
504 .collect();
505 files.sort_by(|a, b| a.name.cmp(&b.name));
506 let mut dirs: Vec<DirInfo> = dirs
507 .into_iter()
508 .map(|(name, metadata)| DirInfo::new(name, &metadata))
509 .collect();
510 dirs.sort_by(|a, b| a.name.cmp(&b.name));
511 let root = CurrentInfo::new(decode_url_path_safely(req_path), files, dirs);
512 res.status_code(StatusCode::OK);
513 match format.subtype().as_ref() {
514 "plain" => res.render(Text::Plain(list_text(&root))),
515 "json" => res.render(Text::Json(list_json(&root))),
516 "xml" => res.render(Text::Xml(list_xml(&root))),
517 _ => res.render(Text::Html(list_html(&root))),
518 };
519 }
520 }
521}
522
523#[inline]
524fn list_json(current: &CurrentInfo) -> String {
525 json!(current).to_string()
526}
527fn list_xml(current: &CurrentInfo) -> String {
528 let mut ftxt = "<list>".to_owned();
529 if current.dirs.is_empty() && current.files.is_empty() {
530 ftxt.push_str("No files");
531 } else {
532 let format = format_description!("[year]-[month]-[day] [hour]:[minute]:[second]");
533 for dir in ¤t.dirs {
534 let _ = write!(
535 ftxt,
536 "<dir><name>{}</name><modified>{}</modified><link>{}</link></dir>",
537 dir.name,
538 dir.modified.format(&format).expect("format time failed"),
539 encode_url_path(&dir.name),
540 );
541 }
542 for file in ¤t.files {
543 let _ = write!(
544 ftxt,
545 "<file><name>{}</name><modified>{}</modified><size>{}</size><link>{}</link></file>",
546 file.name,
547 file.modified.format(&format).expect("format time failed"),
548 file.size,
549 encode_url_path(&file.name),
550 );
551 }
552 }
553 ftxt.push_str("</list>");
554 ftxt
555}
556fn human_size(bytes: u64) -> String {
557 let units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
558 let mut index = 0;
559 let mut bytes = bytes as f64;
560
561 while bytes >= 1024.0 && index < units.len() - 1 {
562 bytes /= 1024.0;
563 index += 1;
564 }
565
566 bytes = (bytes * 100.0).round() / 100.0;
567 if bytes == 1024.0 && index < units.len() - 1 {
568 index += 1;
569 bytes = 1.0;
570 }
571 format!("{} {}", bytes, units[index])
572}
573fn list_html(current: &CurrentInfo) -> String {
574 fn header_links(path: &str) -> String {
575 let segments = path
576 .trim_start_matches('/')
577 .trim_end_matches('/')
578 .split('/');
579 let mut link = "".to_owned();
580 format!(
581 r#"<a href="/">{}</a>{}"#,
582 HOME_ICON,
583 segments
584 .map(|seg| {
585 link = format!("{link}/{seg}");
586 format!("/<a href=\"{link}\">{seg}</a>")
587 })
588 .collect::<Vec<_>>()
589 .join("")
590 )
591 }
592 let mut ftxt = format!(
593 r#"<!DOCTYPE html><html><head>
594 <meta charset="utf-8">
595 <meta name="viewport" content="width=device-width">
596 <title>{}</title>
597 <style>{}</style></head><body><header><h3>Index of: {}</h3></header><hr/>"#,
598 current.path,
599 HTML_STYLE,
600 header_links(¤t.path)
601 );
602 if current.dirs.is_empty() && current.files.is_empty() {
603 let _ = write!(ftxt, "<p>No files</p>");
604 } else {
605 let _ = write!(ftxt, "<table><tr><th>");
606 if !(current.path.is_empty() || current.path == "/") {
607 let _ = write!(ftxt, "<a href=\"../\">[..]</a>");
608 }
609 let _ = write!(
610 ftxt,
611 "</th><th>Name</th><th>Last modified</th><th>Size</th></tr>"
612 );
613 let format = format_description!("[year]-[month]-[day] [hour]:[minute]:[second]");
614 for dir in ¤t.dirs {
615 let _ = write!(
616 ftxt,
617 r#"<tr><td>{}</td><td><a href="./{}/">{}</a></td><td>{}</td><td></td></tr>"#,
618 DIR_ICON,
619 encode_url_path(&dir.name),
620 dir.name,
621 dir.modified.format(&format).expect("format time failed"),
622 );
623 }
624 for file in ¤t.files {
625 let _ = write!(
626 ftxt,
627 r#"<tr><td>{}</td><td><a href="./{}">{}</a></td><td>{}</td><td>{}</td></tr>"#,
628 FILE_ICON,
629 encode_url_path(&file.name),
630 file.name,
631 file.modified.format(&format).expect("format time failed"),
632 human_size(file.size)
633 );
634 }
635 let _ = write!(ftxt, "</table>");
636 }
637 let _ = write!(
638 ftxt,
639 r#"<hr/><footer><a href="https://salvo.rs" target="_blank">salvo</a></footer></body>"#
640 );
641 ftxt
642}
643#[inline]
644fn list_text(current: &CurrentInfo) -> String {
645 json!(current).to_string()
646}
647
648const HTML_STYLE: &str = r#"
649 :root {
650 --bg-color: #fff;
651 --text-color: #222;
652 --link-color: #0366d6;
653 --link-visited-color: #f22526;
654 --dir-icon-color: #79b8ff;
655 --file-icon-color: #959da5;
656 }
657 body {background: var(--bg-color); color: var(--text-color);}
658 a {text-decoration:none;color:var(--link-color);}
659 a:visited {color: var(--link-visited-color);}
660 a:hover {text-decoration:underline;}
661 header a {padding: 0 6px;}
662 footer {text-align:center;font-size:12px;}
663 table {text-align:left;border-collapse: collapse;}
664 tr {border-bottom: solid 1px #ccc;}
665 tr:last-child {border-bottom: none;}
666 th, td {padding: 5px;}
667 th:first-child,td:first-child {text-align: center;}
668 svg[data-icon="dir"] {vertical-align: text-bottom; color: var(--dir-icon-color); fill: currentColor;}
669 svg[data-icon="file"] {vertical-align: text-bottom; color: var(--file-icon-color); fill: currentColor;}
670 svg[data-icon="home"] {width:18px;}
671 @media (prefers-color-scheme: dark) {
672 :root {
673 --bg-color: #222;
674 --text-color: #ddd;
675 --link-color: #539bf5;
676 --link-visited-color: #f25555;
677 --dir-icon-color: #7da3d0;
678 --file-icon-color: #545d68;
679 }
680 }"#;
681const DIR_ICON: &str = r#"<svg aria-label="Directory" data-icon="dir" width="20" height="20" viewBox="0 0 512 512" version="1.1" role="img"><path fill="currentColor" d="M464 128H272l-64-64H48C21.49 64 0 85.49 0 112v288c0 26.51 21.49 48 48 48h416c26.51 0 48-21.49 48-48V176c0-26.51-21.49-48-48-48z"></path></svg>"#;
682const FILE_ICON: &str = r#"<svg aria-label="File" data-icon="file" width="20" height="20" viewBox="0 0 384 512" version="1.1" role="img"><path d="M369.9 97.9L286 14C277 5 264.8-.1 252.1-.1H48C21.5 0 0 21.5 0 48v416c0 26.5 21.5 48 48 48h288c26.5 0 48-21.5 48-48V131.9c0-12.7-5.1-25-14.1-34zM332.1 128H256V51.9l76.1 76.1zM48 464V48h160v104c0 13.3 10.7 24 24 24h104v288H48z"/></svg>"#;
683const HOME_ICON: &str = r#"<svg aria-hidden="true" data-icon="home" viewBox="0 0 576 512"><path fill="currentColor" d="M280.37 148.26L96 300.11V464a16 16 0 0 0 16 16l112.06-.29a16 16 0 0 0 15.92-16V368a16 16 0 0 1 16-16h64a16 16 0 0 1 16 16v95.64a16 16 0 0 0 16 16.05L464 480a16 16 0 0 0 16-16V300L295.67 148.26a12.19 12.19 0 0 0-15.3 0zM571.6 251.47L488 182.56V44.05a12 12 0 0 0-12-12h-56a12 12 0 0 0-12 12v72.61L318.47 43a48 48 0 0 0-61 0L4.34 251.47a12 12 0 0 0-1.6 16.9l25.5 31A12 12 0 0 0 45.15 301l235.22-193.74a12.19 12.19 0 0 1 15.3 0L530.9 301a12 12 0 0 0 16.9-1.6l25.5-31a12 12 0 0 0-1.7-16.93z"></path></svg>"#;
684
685#[cfg(test)]
686mod tests {
687 use crate::dir::human_size;
688
689 #[tokio::test]
690 async fn test_convert_bytes_to_units() {
691 assert_eq!("94.03 MB", human_size(98595176)); let unit = 1024;
694 assert_eq!("1 KB", human_size(unit));
695 assert_eq!("1023 B", human_size(unit - 1));
696
697 assert_eq!("1 MB", human_size(unit * unit));
698 assert_eq!("1 MB", human_size(unit * unit - 1));
699 assert_eq!("1023.99 KB", human_size(unit * unit - 10));
700
701 assert_eq!("1 GB", human_size(unit * unit * unit));
702 assert_eq!("1 GB", human_size(unit * unit * unit - 1));
703
704 assert_eq!("1 TB", human_size(unit * unit * unit * unit));
705 assert_eq!("1 TB", human_size(unit * unit * unit * unit - 1));
706
707 assert_eq!("1 PB", human_size(unit * unit * unit * unit * unit));
708 assert_eq!("1 PB", human_size(unit * unit * unit * unit * unit - 1));
709 }
710}