1use std::collections::{HashMap, HashSet};
4use std::ffi::OsStr;
5use std::fmt::{self, Display, Formatter, Write};
6use std::fs::Metadata;
7use std::path::{Path, PathBuf};
8use std::str::FromStr;
9use std::time::SystemTime;
10
11use salvo_core::fs::NamedFile;
12use salvo_core::handler::Handler;
13use salvo_core::http::header::ACCEPT_ENCODING;
14use salvo_core::http::{self, HeaderValue, Request, Response, StatusCode, StatusError};
15use salvo_core::writing::Text;
16use salvo_core::{Depot, FlowCtrl, IntoVecString, async_trait};
17use serde::{Deserialize, Serialize};
18use serde_json::json;
19use time::{OffsetDateTime, macros::format_description};
20
21use super::{
22 decode_url_path_safely, encode_url_path, format_url_path_safely, join_path, redirect_to_dir_url,
23};
24
25#[derive(Eq, PartialEq, Clone, Copy, Debug, Hash)]
27#[non_exhaustive]
28pub enum CompressionAlgo {
29 Brotli,
31 Deflate,
33 Gzip,
35 Zstd,
37}
38impl FromStr for CompressionAlgo {
39 type Err = String;
40
41 fn from_str(s: &str) -> Result<Self, Self::Err> {
42 match s {
43 "br" => Ok(Self::Brotli),
44 "brotli" => Ok(Self::Brotli),
45 "deflate" => Ok(Self::Deflate),
46 "gzip" => Ok(Self::Gzip),
47 "zstd" => Ok(Self::Zstd),
48 _ => Err(format!("unknown compression algorithm: {s}")),
49 }
50 }
51}
52
53impl Display for CompressionAlgo {
54 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
55 match self {
56 Self::Brotli => write!(f, "br"),
57 Self::Deflate => write!(f, "deflate"),
58 Self::Gzip => write!(f, "gzip"),
59 Self::Zstd => write!(f, "zstd"),
60 }
61 }
62}
63
64impl From<CompressionAlgo> for HeaderValue {
65 #[inline]
66 fn from(algo: CompressionAlgo) -> Self {
67 match algo {
68 CompressionAlgo::Brotli => HeaderValue::from_static("br"),
69 CompressionAlgo::Deflate => HeaderValue::from_static("deflate"),
70 CompressionAlgo::Gzip => HeaderValue::from_static("gzip"),
71 CompressionAlgo::Zstd => HeaderValue::from_static("zstd"),
72 }
73 }
74}
75
76pub trait StaticRoots {
78 fn collect(self) -> Vec<PathBuf>;
80}
81
82impl StaticRoots for &str {
83 #[inline]
84 fn collect(self) -> Vec<PathBuf> {
85 vec![PathBuf::from(self)]
86 }
87}
88impl StaticRoots for &String {
89 #[inline]
90 fn collect(self) -> Vec<PathBuf> {
91 vec![PathBuf::from(self)]
92 }
93}
94impl StaticRoots for String {
95 #[inline]
96 fn collect(self) -> Vec<PathBuf> {
97 vec![PathBuf::from(self)]
98 }
99}
100impl StaticRoots for PathBuf {
101 #[inline]
102 fn collect(self) -> Vec<PathBuf> {
103 vec![self]
104 }
105}
106impl<T> StaticRoots for Vec<T>
107where
108 T: Into<PathBuf> + AsRef<OsStr>,
109{
110 #[inline]
111 fn collect(self) -> Vec<PathBuf> {
112 self.iter().map(Into::into).collect()
113 }
114}
115impl<T, const N: usize> StaticRoots for [T; N]
116where
117 T: Into<PathBuf> + AsRef<OsStr>,
118{
119 #[inline]
120 fn collect(self) -> Vec<PathBuf> {
121 self.iter().map(Into::into).collect()
122 }
123}
124
125#[non_exhaustive]
144pub struct StaticDir {
145 pub roots: Vec<PathBuf>,
147 pub chunk_size: Option<u64>,
149 pub include_dot_files: bool,
151 #[allow(clippy::type_complexity)]
152 exclude_filters: Vec<Box<dyn Fn(&str) -> bool + Send + Sync>>,
153 pub auto_list: bool,
155 pub compressed_variations: HashMap<CompressionAlgo, Vec<String>>,
157 pub defaults: Vec<String>,
159 pub fallback: Option<String>,
161}
162impl StaticDir {
163 #[inline]
165 pub fn new<T: StaticRoots + Sized>(roots: T) -> Self {
166 let mut compressed_variations = HashMap::new();
167 compressed_variations.insert(CompressionAlgo::Brotli, vec!["br".to_owned()]);
168 compressed_variations.insert(CompressionAlgo::Zstd, vec!["zst".to_owned()]);
169 compressed_variations.insert(CompressionAlgo::Gzip, vec!["gz".to_owned()]);
170 compressed_variations.insert(CompressionAlgo::Deflate, vec!["deflate".to_owned()]);
171
172 Self {
173 roots: roots.collect(),
174 chunk_size: None,
175 include_dot_files: false,
176 exclude_filters: vec![],
177 auto_list: false,
178 compressed_variations,
179 defaults: vec![],
180 fallback: None,
181 }
182 }
183
184 #[inline]
186 pub fn include_dot_files(mut self, include_dot_files: bool) -> Self {
187 self.include_dot_files = include_dot_files;
188 self
189 }
190
191 #[inline]
195 pub fn exclude<F>(mut self, filter: F) -> Self
196 where
197 F: Fn(&str) -> bool + Send + Sync + 'static,
198 {
199 self.exclude_filters.push(Box::new(filter));
200 self
201 }
202
203 #[inline]
205 pub fn auto_list(mut self, auto_list: bool) -> Self {
206 self.auto_list = auto_list;
207 self
208 }
209
210 #[inline]
212 pub fn compressed_variation<A>(mut self, algo: A, exts: &str) -> Self
213 where
214 A: Into<CompressionAlgo>,
215 {
216 self.compressed_variations.insert(
217 algo.into(),
218 exts.split(',').map(|s| s.trim().to_string()).collect(),
219 );
220 self
221 }
222
223 #[inline]
225 pub fn defaults(mut self, defaults: impl IntoVecString) -> Self {
226 self.defaults = defaults.into_vec_string();
227 self
228 }
229
230 pub fn fallback(mut self, fallback: impl Into<String>) -> Self {
232 self.fallback = Some(fallback.into());
233 self
234 }
235
236 #[inline]
243 pub fn chunk_size(mut self, size: u64) -> Self {
244 self.chunk_size = Some(size);
245 self
246 }
247
248 #[inline]
249 fn is_compressed_ext(&self, ext: &str) -> bool {
250 for exts in self.compressed_variations.values() {
251 if exts.iter().any(|e| e == ext) {
252 return true;
253 }
254 }
255 false
256 }
257}
258#[derive(Serialize, Deserialize, Debug)]
259struct CurrentInfo {
260 path: String,
261 files: Vec<FileInfo>,
262 dirs: Vec<DirInfo>,
263}
264impl CurrentInfo {
265 #[inline]
266 fn new(path: String, files: Vec<FileInfo>, dirs: Vec<DirInfo>) -> CurrentInfo {
267 CurrentInfo { path, files, dirs }
268 }
269}
270#[derive(Serialize, Deserialize, Debug)]
271struct FileInfo {
272 name: String,
273 size: u64,
274 modified: OffsetDateTime,
275}
276impl FileInfo {
277 #[inline]
278 fn new(name: String, metadata: Metadata) -> FileInfo {
279 FileInfo {
280 name,
281 size: metadata.len(),
282 modified: metadata
283 .modified()
284 .unwrap_or_else(|_| SystemTime::now())
285 .into(),
286 }
287 }
288}
289#[derive(Serialize, Deserialize, Debug)]
290struct DirInfo {
291 name: String,
292 modified: OffsetDateTime,
293}
294impl DirInfo {
295 #[inline]
296 fn new(name: String, metadata: Metadata) -> DirInfo {
297 DirInfo {
298 name,
299 modified: metadata
300 .modified()
301 .unwrap_or_else(|_| SystemTime::now())
302 .into(),
303 }
304 }
305}
306
307#[async_trait]
308impl Handler for StaticDir {
309 async fn handle(
310 &self,
311 req: &mut Request,
312 _depot: &mut Depot,
313 res: &mut Response,
314 _ctrl: &mut FlowCtrl,
315 ) {
316 let req_path = req.uri().path();
317 let rel_path = if let Some(rest) = req.params().tail() {
318 rest
319 } else {
320 &*decode_url_path_safely(req_path)
321 };
322 let rel_path = format_url_path_safely(rel_path);
323 let mut files: HashMap<String, Metadata> = HashMap::new();
324 let mut dirs: HashMap<String, Metadata> = HashMap::new();
325 let is_dot_file = Path::new(&rel_path)
326 .file_name()
327 .and_then(|s| s.to_str())
328 .map(|s| s.starts_with('.'))
329 .unwrap_or(false);
330 let mut abs_path = None;
331 if self.include_dot_files || !is_dot_file {
332 for root in &self.roots {
333 let raw_path = join_path!(root, &rel_path);
334 if !Path::new(&raw_path).starts_with(root) {
336 continue;
337 }
338 for filter in &self.exclude_filters {
339 if filter(&raw_path) {
340 continue;
341 }
342 }
343 let path = Path::new(&raw_path);
344 if path.is_dir() {
345 if !req_path.ends_with('/') && !req_path.is_empty() {
346 redirect_to_dir_url(req.uri(), res);
347 return;
348 }
349
350 for ifile in &self.defaults {
351 let ipath = path.join(ifile);
352 if ipath.is_file() {
353 abs_path = Some(ipath);
354 break;
355 }
356 }
357
358 if self.auto_list && abs_path.is_none() {
359 abs_path = Some(path.to_path_buf());
360 }
361 if abs_path.is_some() {
362 break;
363 }
364 } else if path.is_file() {
365 abs_path = Some(path.to_path_buf());
366 }
367 }
368 }
369 let fallback = self.fallback.as_deref().unwrap_or_default();
370 if abs_path.is_none() && !fallback.is_empty() {
371 for root in &self.roots {
372 let raw_path = join_path!(root, fallback);
373 for filter in &self.exclude_filters {
374 if filter(&raw_path) {
375 continue;
376 }
377 }
378 let path = Path::new(&raw_path);
379 if path.is_file() {
380 abs_path = Some(path.to_path_buf());
381 break;
382 }
383 }
384 }
385
386 let abs_path = match abs_path {
387 Some(path) => path,
388 None => {
389 res.render(StatusError::not_found());
390 return;
391 }
392 };
393
394 if abs_path.is_file() {
395 let ext = abs_path
396 .extension()
397 .and_then(|s| s.to_str())
398 .map(|s| s.to_lowercase());
399 let is_compressed_ext = ext
400 .as_deref()
401 .map(|ext| self.is_compressed_ext(ext))
402 .unwrap_or(false);
403 let mut content_encoding = None;
404 let named_path = if !is_compressed_ext {
405 if !self.compressed_variations.is_empty() {
406 let mut new_abs_path = None;
407 let header = req
408 .headers()
409 .get(ACCEPT_ENCODING)
410 .and_then(|v| v.to_str().ok())
411 .unwrap_or_default();
412 let accept_algos = http::parse_accept_encoding(header)
413 .into_iter()
414 .filter_map(|(algo, _level)| algo.parse::<CompressionAlgo>().ok())
415 .collect::<HashSet<_>>();
416 for (algo, exts) in &self.compressed_variations {
417 if accept_algos.contains(algo) {
418 for zip_ext in exts {
419 let mut path = abs_path.clone();
420 path.as_mut_os_string().push(&*format!(".{}", zip_ext));
421 if path.is_file() {
422 new_abs_path = Some(path);
423 content_encoding = Some(algo.to_string());
424 break;
425 }
426 }
427 }
428 }
429 new_abs_path.unwrap_or(abs_path)
430 } else {
431 abs_path
432 }
433 } else {
434 abs_path
435 };
436
437 let builder = {
438 let mut builder = NamedFile::builder(named_path).content_type(
439 mime_infer::from_ext(ext.as_deref().unwrap_or_default())
440 .first_or_octet_stream(),
441 );
442 if let Some(content_encoding) = content_encoding {
443 builder = builder.content_encoding(content_encoding);
444 }
445 if let Some(size) = self.chunk_size {
446 builder = builder.buffer_size(size);
447 }
448 builder
449 };
450 if let Ok(named_file) = builder.build().await {
451 let headers = req.headers();
452 named_file.send(headers, res).await;
453 } else {
454 res.render(StatusError::internal_server_error().brief("Read file failed."));
455 }
456 } else if abs_path.is_dir() {
457 if let Ok(mut entries) = tokio::fs::read_dir(&abs_path).await {
459 while let Ok(Some(entry)) = entries.next_entry().await {
460 let file_name = entry.file_name().to_string_lossy().to_string();
461 if self.include_dot_files || !file_name.starts_with('.') {
462 let raw_path = join_path!(&abs_path, &file_name);
463 for filter in &self.exclude_filters {
464 if filter(&raw_path) {
465 continue;
466 }
467 }
468 if let Ok(metadata) = entry.metadata().await {
469 if metadata.is_dir() {
470 dirs.entry(file_name).or_insert(metadata);
471 } else {
472 files.entry(file_name).or_insert(metadata);
473 }
474 }
475 }
476 }
477 }
478
479 let format = req.first_accept().unwrap_or(mime::TEXT_HTML);
480 let mut files: Vec<FileInfo> = files
481 .into_iter()
482 .map(|(name, metadata)| FileInfo::new(name, metadata))
483 .collect();
484 files.sort_by(|a, b| a.name.cmp(&b.name));
485 let mut dirs: Vec<DirInfo> = dirs
486 .into_iter()
487 .map(|(name, metadata)| DirInfo::new(name, metadata))
488 .collect();
489 dirs.sort_by(|a, b| a.name.cmp(&b.name));
490 let root = CurrentInfo::new(decode_url_path_safely(req_path), files, dirs);
491 res.status_code(StatusCode::OK);
492 match format.subtype().as_ref() {
493 "plain" => res.render(Text::Plain(list_text(&root))),
494 "json" => res.render(Text::Json(list_json(&root))),
495 "xml" => res.render(Text::Xml(list_xml(&root))),
496 _ => res.render(Text::Html(list_html(&root))),
497 };
498 }
499 }
500}
501
502#[inline]
503fn list_json(current: &CurrentInfo) -> String {
504 json!(current).to_string()
505}
506fn list_xml(current: &CurrentInfo) -> String {
507 let mut ftxt = "<list>".to_owned();
508 if current.dirs.is_empty() && current.files.is_empty() {
509 ftxt.push_str("No files");
510 } else {
511 let format = format_description!("[year]-[month]-[day] [hour]:[minute]:[second]");
512 for dir in ¤t.dirs {
513 let _ = write!(
514 ftxt,
515 "<dir><name>{}</name><modified>{}</modified><link>{}</link></dir>",
516 dir.name,
517 dir.modified.format(&format).expect("format time failed"),
518 encode_url_path(&dir.name),
519 );
520 }
521 for file in ¤t.files {
522 let _ = write!(
523 ftxt,
524 "<file><name>{}</name><modified>{}</modified><size>{}</size><link>{}</link></file>",
525 file.name,
526 file.modified.format(&format).expect("format time failed"),
527 file.size,
528 encode_url_path(&file.name),
529 );
530 }
531 }
532 ftxt.push_str("</list>");
533 ftxt
534}
535fn human_size(bytes: u64) -> String {
536 let units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
537 let mut index = 0;
538 let mut bytes = bytes as f64;
539
540 while bytes >= 1024.0 && index < units.len() - 1 {
541 bytes /= 1024.0;
542 index += 1;
543 }
544
545 bytes = (bytes * 100.0).round() / 100.0;
546 if bytes == 1024.0 && index < units.len() - 1 {
547 index += 1;
548 bytes = 1.0;
549 }
550 format!("{} {}", bytes, units[index])
551}
552fn list_html(current: &CurrentInfo) -> String {
553 fn header_links(path: &str) -> String {
554 let segments = path
555 .trim_start_matches('/')
556 .trim_end_matches('/')
557 .split('/');
558 let mut link = "".to_string();
559 format!(
560 r#"<a href="/">{}</a>{}"#,
561 HOME_ICON,
562 segments
563 .map(|seg| {
564 link = format!("{link}/{seg}");
565 format!("/<a href=\"{link}\">{seg}</a>")
566 })
567 .collect::<Vec<_>>()
568 .join("")
569 )
570 }
571 let mut ftxt = format!(
572 r#"<!DOCTYPE html><html><head>
573 <meta charset="utf-8">
574 <meta name="viewport" content="width=device-width">
575 <title>{}</title>
576 <style>{}</style></head><body><header><h3>Index of: {}</h3></header><hr/>"#,
577 current.path,
578 HTML_STYLE,
579 header_links(¤t.path)
580 );
581 if current.dirs.is_empty() && current.files.is_empty() {
582 let _ = write!(ftxt, "<p>No files</p>");
583 } else {
584 let _ = write!(ftxt, "<table><tr><th>");
585 if !(current.path.is_empty() || current.path == "/") {
586 let _ = write!(ftxt, "<a href=\"../\">[..]</a>");
587 }
588 let _ = write!(
589 ftxt,
590 "</th><th>Name</th><th>Last modified</th><th>Size</th></tr>"
591 );
592 let format = format_description!("[year]-[month]-[day] [hour]:[minute]:[second]");
593 for dir in ¤t.dirs {
594 let _ = write!(
595 ftxt,
596 r#"<tr><td>{}</td><td><a href="./{}/">{}</a></td><td>{}</td><td></td></tr>"#,
597 DIR_ICON,
598 encode_url_path(&dir.name),
599 dir.name,
600 dir.modified.format(&format).expect("format time failed"),
601 );
602 }
603 for file in ¤t.files {
604 let _ = write!(
605 ftxt,
606 r#"<tr><td>{}</td><td><a href="./{}">{}</a></td><td>{}</td><td>{}</td></tr>"#,
607 FILE_ICON,
608 encode_url_path(&file.name),
609 file.name,
610 file.modified.format(&format).expect("format time failed"),
611 human_size(file.size)
612 );
613 }
614 let _ = write!(ftxt, "</table>");
615 }
616 let _ = write!(
617 ftxt,
618 r#"<hr/><footer><a href="https://salvo.rs" target="_blank">salvo</a></footer></body>"#
619 );
620 ftxt
621}
622#[inline]
623fn list_text(current: &CurrentInfo) -> String {
624 json!(current).to_string()
625}
626
627const HTML_STYLE: &str = r#"
628 :root {
629 --bg-color: #fff;
630 --text-color: #222;
631 --link-color: #0366d6;
632 --link-visited-color: #f22526;
633 --dir-icon-color: #79b8ff;
634 --file-icon-color: #959da5;
635 }
636 body {background: var(--bg-color); color: var(--text-color);}
637 a {text-decoration:none;color:var(--link-color);}
638 a:visited {color: var(--link-visited-color);}
639 a:hover {text-decoration:underline;}
640 header a {padding: 0 6px;}
641 footer {text-align:center;font-size:12px;}
642 table {text-align:left;border-collapse: collapse;}
643 tr {border-bottom: solid 1px #ccc;}
644 tr:last-child {border-bottom: none;}
645 th, td {padding: 5px;}
646 th:first-child,td:first-child {text-align: center;}
647 svg[data-icon="dir"] {vertical-align: text-bottom; color: var(--dir-icon-color); fill: currentColor;}
648 svg[data-icon="file"] {vertical-align: text-bottom; color: var(--file-icon-color); fill: currentColor;}
649 svg[data-icon="home"] {width:18px;}
650 @media (prefers-color-scheme: dark) {
651 :root {
652 --bg-color: #222;
653 --text-color: #ddd;
654 --link-color: #539bf5;
655 --link-visited-color: #f25555;
656 --dir-icon-color: #7da3d0;
657 --file-icon-color: #545d68;
658 }
659 }"#;
660const DIR_ICON: &str = r#"<svg aria-label="Directory" data-icon="dir" width="20" height="20" viewBox="0 0 512 512" version="1.1" role="img"><path fill="currentColor" d="M464 128H272l-64-64H48C21.49 64 0 85.49 0 112v288c0 26.51 21.49 48 48 48h416c26.51 0 48-21.49 48-48V176c0-26.51-21.49-48-48-48z"></path></svg>"#;
661const FILE_ICON: &str = r#"<svg aria-label="File" data-icon="file" width="20" height="20" viewBox="0 0 384 512" version="1.1" role="img"><path d="M369.9 97.9L286 14C277 5 264.8-.1 252.1-.1H48C21.5 0 0 21.5 0 48v416c0 26.5 21.5 48 48 48h288c26.5 0 48-21.5 48-48V131.9c0-12.7-5.1-25-14.1-34zM332.1 128H256V51.9l76.1 76.1zM48 464V48h160v104c0 13.3 10.7 24 24 24h104v288H48z"/></svg>"#;
662const HOME_ICON: &str = r#"<svg aria-hidden="true" data-icon="home" viewBox="0 0 576 512"><path fill="currentColor" d="M280.37 148.26L96 300.11V464a16 16 0 0 0 16 16l112.06-.29a16 16 0 0 0 15.92-16V368a16 16 0 0 1 16-16h64a16 16 0 0 1 16 16v95.64a16 16 0 0 0 16 16.05L464 480a16 16 0 0 0 16-16V300L295.67 148.26a12.19 12.19 0 0 0-15.3 0zM571.6 251.47L488 182.56V44.05a12 12 0 0 0-12-12h-56a12 12 0 0 0-12 12v72.61L318.47 43a48 48 0 0 0-61 0L4.34 251.47a12 12 0 0 0-1.6 16.9l25.5 31A12 12 0 0 0 45.15 301l235.22-193.74a12.19 12.19 0 0 1 15.3 0L530.9 301a12 12 0 0 0 16.9-1.6l25.5-31a12 12 0 0 0-1.7-16.93z"></path></svg>"#;
663
664#[cfg(test)]
665mod tests {
666 use crate::dir::human_size;
667
668 #[tokio::test]
669 async fn test_convert_bytes_to_units() {
670 assert_eq!("94.03 MB", human_size(98595176)); let unit = 1024;
673 assert_eq!("1 KB", human_size(unit));
674 assert_eq!("1023 B", human_size(unit - 1));
675
676 assert_eq!("1 MB", human_size(unit * unit));
677 assert_eq!("1 MB", human_size(unit * unit - 1));
678 assert_eq!("1023.99 KB", human_size(unit * unit - 10));
679
680 assert_eq!("1 GB", human_size(unit * unit * unit));
681 assert_eq!("1 GB", human_size(unit * unit * unit - 1));
682
683 assert_eq!("1 TB", human_size(unit * unit * unit * unit));
684 assert_eq!("1 TB", human_size(unit * unit * unit * unit - 1));
685
686 assert_eq!("1 PB", human_size(unit * unit * unit * unit * unit));
687 assert_eq!("1 PB", human_size(unit * unit * unit * unit * unit - 1));
688 }
689}