#![allow(clippy::format_push_string)]
use std::io;
use std::path::{Component, Path, PathBuf};
use std::time::SystemTime;
use actix_web::dev::ServiceResponse;
use actix_web::web::Query;
use actix_web::{HttpMessage, HttpRequest, HttpResponse};
use bytesize::ByteSize;
use comrak::{markdown_to_html, ComrakOptions};
use percent_encoding::{percent_decode_str, utf8_percent_encode};
use regex::Regex;
use serde::Deserialize;
use strum_macros::{Display, EnumString};
use crate::archive::ArchiveMethod;
use crate::auth::CurrentUser;
use crate::errors::{self, ContextualError};
use crate::renderer;
use self::percent_encode_sets::PATH_SEGMENT;
mod percent_encode_sets {
use percent_encoding::{AsciiSet, CONTROLS};
const BASE: &AsciiSet = &CONTROLS.add(b'%');
pub const QUERY: &AsciiSet = &BASE.add(b' ').add(b'"').add(b'#').add(b'<').add(b'>');
pub const PATH: &AsciiSet = &QUERY.add(b'?').add(b'`').add(b'{').add(b'}');
pub const PATH_SEGMENT: &AsciiSet = &PATH.add(b'/').add(b'\\');
}
#[derive(Deserialize, Default)]
pub struct QueryParameters {
pub path: Option<PathBuf>,
pub sort: Option<SortingMethod>,
pub order: Option<SortingOrder>,
pub raw: Option<bool>,
pub mkdir_name: Option<String>,
download: Option<ArchiveMethod>,
}
#[derive(Deserialize, Clone, EnumString, Display, Copy)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum SortingMethod {
Name,
Size,
Date,
}
#[derive(Deserialize, Clone, EnumString, Display, Copy)]
pub enum SortingOrder {
#[serde(alias = "asc")]
#[strum(serialize = "asc")]
Ascending,
#[serde(alias = "desc")]
#[strum(serialize = "desc")]
Descending,
}
#[derive(PartialEq, Eq)]
pub enum EntryType {
Directory,
File,
}
pub struct Entry {
pub name: String,
pub entry_type: EntryType,
pub link: String,
pub size: Option<bytesize::ByteSize>,
pub last_modification_date: Option<SystemTime>,
pub symlink_info: Option<String>,
}
impl Entry {
fn new(
name: String,
entry_type: EntryType,
link: String,
size: Option<bytesize::ByteSize>,
last_modification_date: Option<SystemTime>,
symlink_info: Option<String>,
) -> Self {
Entry {
name,
entry_type,
link,
size,
last_modification_date,
symlink_info,
}
}
pub fn is_dir(&self) -> bool {
self.entry_type == EntryType::Directory
}
pub fn is_file(&self) -> bool {
self.entry_type == EntryType::File
}
}
pub struct Breadcrumb {
pub name: String,
pub link: String,
}
impl Breadcrumb {
fn new(name: String, link: String) -> Self {
Breadcrumb { name, link }
}
}
pub async fn file_handler(req: HttpRequest) -> actix_web::Result<actix_files::NamedFile> {
let path = &req.app_data::<crate::MiniserveConfig>().unwrap().path;
actix_files::NamedFile::open(path).map_err(Into::into)
}
pub fn directory_listing(
dir: &actix_files::Directory,
req: &HttpRequest,
) -> io::Result<ServiceResponse> {
let extensions = req.extensions();
let current_user: Option<&CurrentUser> = extensions.get::<CurrentUser>();
let conf = req.app_data::<crate::MiniserveConfig>().unwrap();
let serve_path = req.path();
let base = Path::new(serve_path);
let random_route_abs = format!("/{}", conf.route_prefix);
let abs_url = format!(
"{}://{}{}",
req.connection_info().scheme(),
req.connection_info().host(),
req.uri()
);
let is_root = base.parent().is_none() || Path::new(&req.path()) == Path::new(&random_route_abs);
let encoded_dir = match base.strip_prefix(random_route_abs) {
Ok(c_d) => Path::new("/").join(c_d),
Err(_) => base.to_path_buf(),
}
.display()
.to_string();
let breadcrumbs = {
let title = conf
.title
.clone()
.unwrap_or_else(|| req.connection_info().host().into());
let decoded = percent_decode_str(&encoded_dir).decode_utf8_lossy();
let mut res: Vec<Breadcrumb> = Vec::new();
let mut link_accumulator = format!("{}/", &conf.route_prefix);
let mut components = Path::new(&*decoded).components().peekable();
while let Some(c) = components.next() {
let name;
match c {
Component::RootDir => {
name = title.clone();
}
Component::Normal(s) => {
name = s.to_string_lossy().to_string();
link_accumulator
.push_str(&(utf8_percent_encode(&name, PATH_SEGMENT).to_string() + "/"));
}
_ => name = "".to_string(),
};
res.push(Breadcrumb::new(
name,
if components.peek().is_some() {
link_accumulator.clone()
} else {
".".to_string()
},
));
}
res
};
let query_params = extract_query_parameters(req);
let mut entries: Vec<Entry> = Vec::new();
let mut readme: Option<(String, String)> = None;
let readme_rx: Regex = Regex::new("^readme([.](md|txt))?$").unwrap();
for entry in dir.path.read_dir()? {
if dir.is_visible(&entry) || conf.show_hidden {
let entry = entry?;
let file_name = entry.file_name().to_string_lossy().to_string();
let (is_symlink, metadata) = match entry.metadata() {
Ok(metadata) if metadata.file_type().is_symlink() => {
(true, std::fs::metadata(entry.path()))
}
res => (false, res),
};
let symlink_dest = (is_symlink && conf.show_symlink_info)
.then(|| entry.path())
.and_then(|path| std::fs::read_link(path).ok())
.map(|path| path.to_string_lossy().into_owned());
let file_url = base
.join(&utf8_percent_encode(&file_name, PATH_SEGMENT).to_string())
.to_string_lossy()
.to_string();
if let Ok(metadata) = metadata {
if conf.no_symlinks && is_symlink {
continue;
}
let last_modification_date = match metadata.modified() {
Ok(date) => Some(date),
Err(_) => None,
};
if metadata.is_dir() {
entries.push(Entry::new(
file_name,
EntryType::Directory,
file_url,
None,
last_modification_date,
symlink_dest,
));
} else if metadata.is_file() {
entries.push(Entry::new(
file_name.clone(),
EntryType::File,
file_url,
Some(ByteSize::b(metadata.len())),
last_modification_date,
symlink_dest,
));
if conf.readme && readme_rx.is_match(&file_name.to_lowercase()) {
let ext = file_name.split('.').last().unwrap().to_lowercase();
readme = Some((
file_name.to_string(),
if ext == "md" {
markdown_to_html(
&std::fs::read_to_string(entry.path())?,
&ComrakOptions::default(),
)
} else {
format!("<pre>{}</pre>", &std::fs::read_to_string(entry.path())?)
},
));
}
}
} else {
continue;
}
}
}
match query_params.sort.unwrap_or(SortingMethod::Name) {
SortingMethod::Name => entries.sort_by(|e1, e2| {
alphanumeric_sort::compare_str(e1.name.to_lowercase(), e2.name.to_lowercase())
}),
SortingMethod::Size => entries.sort_by(|e1, e2| {
e2.size
.unwrap_or_else(|| ByteSize::b(0))
.cmp(&e1.size.unwrap_or_else(|| ByteSize::b(0)))
}),
SortingMethod::Date => entries.sort_by(|e1, e2| {
e2.last_modification_date
.unwrap_or(SystemTime::UNIX_EPOCH)
.cmp(&e1.last_modification_date.unwrap_or(SystemTime::UNIX_EPOCH))
}),
};
if let Some(SortingOrder::Descending) = query_params.order {
entries.reverse()
}
if conf.dirs_first {
entries.sort_by_key(|e| !e.is_dir());
}
if let Some(archive_method) = query_params.download {
if !archive_method.is_enabled(conf.tar_enabled, conf.tar_gz_enabled, conf.zip_enabled) {
return Ok(ServiceResponse::new(
req.clone(),
HttpResponse::Forbidden()
.content_type(mime::TEXT_PLAIN_UTF_8)
.body("Archive creation is disabled."),
));
}
log::info!(
"Creating an archive ({extension}) of {path}...",
extension = archive_method.extension(),
path = &dir.path.display().to_string()
);
let file_name = format!(
"{}.{}",
dir.path.file_name().unwrap().to_str().unwrap(),
archive_method.extension()
);
let (tx, rx) = futures::channel::mpsc::channel::<io::Result<actix_web::web::Bytes>>(10);
let pipe = crate::pipe::Pipe::new(tx);
let dir = dir.path.to_path_buf();
let skip_symlinks = conf.no_symlinks;
std::thread::spawn(move || {
if let Err(err) = archive_method.create_archive(dir, skip_symlinks, pipe) {
log::error!("Error during archive creation: {:?}", err);
}
});
Ok(ServiceResponse::new(
req.clone(),
HttpResponse::Ok()
.content_type(archive_method.content_type())
.append_header(archive_method.content_encoding())
.append_header(("Content-Transfer-Encoding", "binary"))
.append_header((
"Content-Disposition",
format!("attachment; filename={:?}", file_name),
))
.body(actix_web::body::BodyStream::new(rx)),
))
} else {
Ok(ServiceResponse::new(
req.clone(),
HttpResponse::Ok().content_type(mime::TEXT_HTML_UTF_8).body(
renderer::page(
entries,
readme,
abs_url,
is_root,
query_params,
&breadcrumbs,
&encoded_dir,
conf,
current_user,
)
.into_string(),
),
))
}
}
pub fn extract_query_parameters(req: &HttpRequest) -> QueryParameters {
match Query::<QueryParameters>::from_query(req.query_string()) {
Ok(Query(query_params)) => query_params,
Err(e) => {
let err = ContextualError::ParseError("query parameters".to_string(), e.to_string());
errors::log_error_chain(err.to_string());
QueryParameters::default()
}
}
}