mod file_system;
mod files_map;
mod metadata;
mod realpath;
use crate::{
api::{
app::consts::*,
fetch::Range,
safeurl::{SafeContentType, SafeDataType, SafeUrl, XorUrl},
Safe,
},
Error, Result,
};
use file_system::{file_system_dir_walk, file_system_single_file, normalise_path_separator};
use files_map::add_or_update_file_item;
use log::{debug, info, warn};
use relative_path::RelativePath;
use std::{collections::BTreeMap, fs, path::Path};
pub(crate) use metadata::FileMeta;
pub(crate) use realpath::RealPath;
pub use files_map::{FileItem, FilesMap, GetAttr};
pub type ProcessedFiles = BTreeMap<String, (String, String)>;
const FILES_CONTAINER_TYPE_TAG: u64 = 1_100;
const ERROR_MSG_NO_FILES_CONTAINER_FOUND: &str = "No FilesContainer found at this address";
impl Safe {
pub async fn files_container_create(
&mut self,
location: Option<&str>,
dest: Option<&str>,
recursive: bool,
follow_links: bool,
dry_run: bool,
) -> Result<(XorUrl, ProcessedFiles, FilesMap)> {
let (processed_files, files_map) = match location {
Some(path) => {
let mut processed_files =
file_system_dir_walk(self, path, recursive, follow_links, dry_run).await?;
let files_map = files_map_create(
self,
&mut processed_files,
path,
dest,
follow_links,
dry_run,
)
.await?;
(processed_files, files_map)
}
None => (ProcessedFiles::default(), FilesMap::default()),
};
let xorurl = if dry_run {
"".to_string()
} else {
let files_map_xorurl = self.store_files_map(&files_map).await?;
let xorname = self
.safe_client
.store_sequence(
files_map_xorurl.as_bytes(),
None,
FILES_CONTAINER_TYPE_TAG,
None,
false,
)
.await?;
SafeUrl::encode_sequence_data(
xorname,
FILES_CONTAINER_TYPE_TAG,
SafeContentType::FilesContainer,
self.xorurl_base,
false,
)?
};
Ok((xorurl, processed_files, files_map))
}
pub async fn files_container_get(&mut self, url: &str) -> Result<(u64, FilesMap)> {
debug!("Getting files container from: {:?}", url);
let (safe_url, _) = self.parse_and_resolve_url(url).await?;
self.fetch_files_container(&safe_url).await
}
pub(crate) async fn fetch_files_container(
&mut self,
safe_url: &SafeUrl,
) -> Result<(u64, FilesMap)> {
match self.fetch_sequence(safe_url).await {
Ok((version, serialised_files_map)) => {
debug!("Files map retrieved.... v{:?}", &version);
let files_map_xorurl = SafeUrl::from_url(
&String::from_utf8(serialised_files_map).map_err(|err| {
Error::ContentError(format!(
"Couldn't parse the FilesMap link stored in the FilesContainer: {:?}",
err
))
})?,
)?;
let serialised_files_map = self.fetch_public_blob(&files_map_xorurl, None).await?;
let files_map =
serde_json::from_slice(serialised_files_map.as_slice()).map_err(|err| {
Error::ContentError(format!(
"Couldn't deserialise the FilesMap stored in the FilesContainer: {:?}",
err
))
})?;
Ok((version, files_map))
}
Err(Error::EmptyContent(_)) => {
warn!("FilesContainer found at \"{:?}\" was empty", safe_url);
Ok((0, FilesMap::default()))
}
Err(Error::ContentNotFound(_)) => Err(Error::ContentNotFound(
ERROR_MSG_NO_FILES_CONTAINER_FOUND.to_string(),
)),
Err(Error::VersionNotFound(_)) => Err(Error::VersionNotFound(format!(
"Version '{}' is invalid for FilesContainer found at \"{}\"",
safe_url.content_version().unwrap_or(0),
safe_url,
))),
Err(err) => Err(Error::NetDataError(format!(
"Failed to get current version: {}",
err
))),
}
}
#[allow(clippy::too_many_arguments)]
pub async fn files_container_sync(
&mut self,
location: &str,
url: &str,
recursive: bool,
follow_links: bool,
delete: bool,
update_nrs: bool,
dry_run: bool,
) -> Result<(u64, ProcessedFiles, FilesMap)> {
if delete && !recursive {
return Err(Error::InvalidInput(
"'delete' is not allowed if 'recursive' is not set".to_string(),
));
}
let safe_url = Safe::parse_url(url)?;
if safe_url.content_version().is_some() {
return Err(Error::InvalidInput(format!(
"The target URL cannot contain a version: {}",
url
)));
};
if update_nrs && safe_url.content_type() != SafeContentType::NrsMapContainer {
return Err(Error::InvalidInput(
"'update-nrs' is not allowed since the URL provided is not an NRS URL".to_string(),
));
}
let (mut safe_url, _) = self.parse_and_resolve_url(url).await?;
safe_url.set_content_version(None);
let (current_version, current_files_map): (u64, FilesMap) =
self.fetch_files_container(&safe_url).await?;
let processed_files =
file_system_dir_walk(self, location, recursive, follow_links, true).await?;
let dest_path = Some(safe_url.path());
let (processed_files, new_files_map, success_count): (ProcessedFiles, FilesMap, u64) =
files_map_sync(
self,
current_files_map,
location,
processed_files,
dest_path,
delete,
dry_run,
false,
true,
follow_links,
)
.await?;
let version = self
.append_version_to_files_container(
success_count,
current_version,
&new_files_map,
url,
safe_url,
dry_run,
update_nrs,
)
.await?;
Ok((version, processed_files, new_files_map))
}
pub async fn files_container_add(
&mut self,
source_file: &str,
url: &str,
force: bool,
update_nrs: bool,
follow_links: bool,
dry_run: bool,
) -> Result<(u64, ProcessedFiles, FilesMap)> {
let (safe_url, current_version, current_files_map) =
validate_files_add_params(self, source_file, url, update_nrs).await?;
let dest_path = safe_url.path();
let (processed_files, new_files_map, success_count) = if source_file.starts_with("safe://")
{
files_map_add_link(self, current_files_map, source_file, dest_path, force).await?
} else {
let processed_files = file_system_single_file(self, source_file, true).await?;
files_map_sync(
self,
current_files_map,
source_file,
processed_files,
Some(dest_path),
false,
dry_run,
force,
false,
follow_links,
)
.await?
};
let version = self
.append_version_to_files_container(
success_count,
current_version,
&new_files_map,
url,
safe_url,
dry_run,
update_nrs,
)
.await?;
Ok((version, processed_files, new_files_map))
}
pub async fn files_container_add_from_raw(
&mut self,
data: &[u8],
url: &str,
force: bool,
update_nrs: bool,
dry_run: bool,
) -> Result<(u64, ProcessedFiles, FilesMap)> {
let (safe_url, current_version, current_files_map) =
validate_files_add_params(self, "", url, update_nrs).await?;
let dest_path = safe_url.path();
let new_file_xorurl = self.files_store_public_blob(data, None, false).await?;
let (processed_files, new_files_map, success_count) =
files_map_add_link(self, current_files_map, &new_file_xorurl, dest_path, force).await?;
let version = self
.append_version_to_files_container(
success_count,
current_version,
&new_files_map,
url,
safe_url,
dry_run,
update_nrs,
)
.await?;
Ok((version, processed_files, new_files_map))
}
pub async fn files_container_remove_path(
&mut self,
url: &str,
recursive: bool,
update_nrs: bool,
dry_run: bool,
) -> Result<(u64, ProcessedFiles, FilesMap)> {
let safe_url = Safe::parse_url(url)?;
if safe_url.content_version().is_some() {
return Err(Error::InvalidInput(format!(
"The target URL cannot contain a version: {}",
url
)));
};
let dest_path = safe_url.path();
if dest_path.is_empty() {
return Err(Error::InvalidInput(
"The destination URL should include a target file path".to_string(),
));
}
if update_nrs && safe_url.content_type() != SafeContentType::NrsMapContainer {
return Err(Error::InvalidInput(
"'update-nrs' is not allowed since the URL provided is not an NRS URL".to_string(),
));
}
let (mut safe_url, _) = self.parse_and_resolve_url(url).await?;
safe_url.set_content_version(None);
let (current_version, files_map): (u64, FilesMap) =
self.fetch_files_container(&safe_url).await?;
let (processed_files, new_files_map, success_count) =
files_map_remove_path(dest_path, files_map, recursive)?;
let version = self
.append_version_to_files_container(
success_count,
current_version,
&new_files_map,
url,
safe_url,
dry_run,
update_nrs,
)
.await?;
Ok((version, processed_files, new_files_map))
}
#[allow(clippy::too_many_arguments)]
async fn append_version_to_files_container(
&mut self,
success_count: u64,
current_version: u64,
new_files_map: &FilesMap,
url: &str,
mut safe_url: SafeUrl,
dry_run: bool,
update_nrs: bool,
) -> Result<u64> {
let version = if success_count == 0 {
current_version
} else if dry_run {
current_version + 1
} else {
let files_map_xorurl = self.store_files_map(new_files_map).await?;
let xorname = safe_url.xorname();
let type_tag = safe_url.type_tag();
self.safe_client
.append_to_sequence(files_map_xorurl.as_bytes(), xorname, type_tag, false)
.await?;
let new_version = current_version + 1;
if update_nrs {
safe_url.set_content_version(Some(new_version));
let new_link_for_nrs = safe_url.to_string();
let _ = self
.nrs_map_container_add(url, &new_link_for_nrs, false, true, false)
.await?;
}
new_version
};
Ok(version)
}
pub async fn files_store_public_blob(
&mut self,
data: &[u8],
media_type: Option<&str>,
dry_run: bool,
) -> Result<XorUrl> {
let content_type = media_type.map_or_else(
|| Ok(SafeContentType::Raw),
|media_type_str| {
if SafeUrl::is_media_type_supported(media_type_str) {
Ok(SafeContentType::MediaType(media_type_str.to_string()))
} else {
Err(Error::InvalidMediaType(format!(
"Media-type '{}' not supported. You can pass 'None' as the 'media_type' for this content to be treated as raw",
media_type_str
)))
}
},
)?;
let xorname = self.safe_client.store_public_blob(&data, dry_run).await?;
SafeUrl::encode_blob(xorname, content_type, self.xorurl_base)
}
pub async fn files_get_public_blob(&mut self, url: &str, range: Range) -> Result<Vec<u8>> {
let (safe_url, _) = self.parse_and_resolve_url(url).await?;
self.fetch_public_blob(&safe_url, range).await
}
pub(crate) async fn fetch_public_blob(
&mut self,
safe_url: &SafeUrl,
range: Range,
) -> Result<Vec<u8>> {
self.safe_client
.get_public_blob(safe_url.xorname(), range)
.await
}
async fn store_files_map(&mut self, files_map: &FilesMap) -> Result<String> {
let serialised_files_map = serde_json::to_string(&files_map).map_err(|err| {
Error::Serialisation(format!(
"Couldn't serialise the FilesMap generated: {:?}",
err
))
})?;
let files_map_xorurl = self
.files_store_public_blob(serialised_files_map.as_bytes(), None, false)
.await?;
Ok(files_map_xorurl)
}
}
async fn validate_files_add_params(
safe: &mut Safe,
source_file: &str,
url: &str,
update_nrs: bool,
) -> Result<(SafeUrl, u64, FilesMap)> {
let safe_url = Safe::parse_url(url)?;
if safe_url.content_version().is_some() {
return Err(Error::InvalidInput(format!(
"The target URL cannot contain a version: {}",
url
)));
};
if update_nrs && safe_url.content_type() != SafeContentType::NrsMapContainer {
return Err(Error::InvalidInput(
"'update-nrs' is not allowed since the URL provided is not an NRS URL".to_string(),
));
}
let (mut safe_url, _) = safe.parse_and_resolve_url(url).await?;
safe_url.set_content_version(None);
let (current_version, current_files_map): (u64, FilesMap) =
safe.fetch_files_container(&safe_url).await?;
let dest_path = safe_url.path().to_string();
if source_file.starts_with("safe://") {
let source_safe_url = Safe::parse_url(source_file)?;
if source_safe_url.data_type() != SafeDataType::PublicBlob {
return Err(Error::InvalidInput(format!(
"The source URL should target a file ('{}'), but the URL provided targets a '{}'",
SafeDataType::PublicBlob,
source_safe_url.content_type()
)));
}
if dest_path.is_empty() {
return Err(Error::InvalidInput(
"The destination URL should include a target file path since we are adding a link"
.to_string(),
));
}
}
Ok((safe_url, current_version, current_files_map))
}
fn get_base_paths(location: &str, dest_path: Option<&str>) -> (String, String) {
let location_base_path = if location == "." {
"./".to_string()
} else {
normalise_path_separator(location)
};
let new_dest_path = match dest_path {
Some(path) => {
if path.is_empty() {
"/".to_string()
} else {
path.to_string()
}
}
None => "/".to_string(),
};
let dest_base_path = if new_dest_path.ends_with('/') {
if location_base_path.ends_with('/') {
new_dest_path
} else {
let parts_vec: Vec<&str> = location_base_path.split('/').collect();
let dir_name = parts_vec[parts_vec.len() - 1];
format!("{}{}", new_dest_path, dir_name)
}
} else {
format!("{}/", new_dest_path)
};
(location_base_path, dest_base_path)
}
#[allow(clippy::too_many_arguments)]
async fn files_map_sync(
safe: &mut Safe,
mut current_files_map: FilesMap,
location: &str,
new_content: ProcessedFiles,
dest_path: Option<&str>,
delete: bool,
dry_run: bool,
force: bool,
compare_file_content: bool,
follow_links: bool,
) -> Result<(ProcessedFiles, FilesMap, u64)> {
let (location_base_path, dest_base_path) = get_base_paths(location, dest_path);
let mut updated_files_map = FilesMap::new();
let mut processed_files = ProcessedFiles::new();
let mut success_count = 0;
for (local_file_name, _) in new_content
.iter()
.filter(|(_, (change, _))| change != CONTENT_ERROR_SIGN)
{
let file_path = Path::new(&local_file_name);
let file_name = RelativePath::new(
&local_file_name
.to_string()
.replace(&location_base_path, &dest_base_path),
)
.normalize();
let mut normalised_file_name = format!("/{}", normalise_path_separator(file_name.as_str()))
.trim_end_matches('/')
.to_string();
if normalised_file_name.is_empty() {
normalised_file_name = "/".to_string();
}
match current_files_map.get(&normalised_file_name) {
None => {
if add_or_update_file_item(
safe,
&local_file_name,
&normalised_file_name,
&file_path,
&FileMeta::from_path(&local_file_name, follow_links)?,
None,
false,
dry_run,
&mut updated_files_map,
&mut processed_files,
)
.await
{
success_count += 1;
let mut trail = Vec::<&str>::new();
for part in normalised_file_name.split('/') {
trail.push(part);
let ancestor = if trail.len() > 1 {
trail.join("/")
} else {
"/".to_string()
};
if ancestor != normalised_file_name {
if let Some(fi) = current_files_map.get(&ancestor) {
updated_files_map.insert(ancestor.clone(), fi.clone());
current_files_map.remove(&ancestor);
}
}
}
}
}
Some(file_item) => {
let is_modified =
is_file_item_modified(safe, &Path::new(local_file_name), file_item).await;
if force || (compare_file_content && is_modified) {
if add_or_update_file_item(
safe,
&local_file_name,
&normalised_file_name,
&file_path,
&FileMeta::from_path(&local_file_name, follow_links)?,
None,
true,
dry_run,
&mut updated_files_map,
&mut processed_files,
)
.await
{
success_count += 1;
}
} else {
updated_files_map.insert(normalised_file_name.to_string(), file_item.clone());
if !force && !compare_file_content {
let comp_str = if is_modified { "different" } else { "same" };
processed_files.insert(
local_file_name.to_string(),
(
CONTENT_ERROR_SIGN.to_string(),
format!(
"File named \"{}\" with {} content already exists on target. Use the 'force' flag to replace it",
normalised_file_name, comp_str
),
),
);
info!("Skipping file \"{}\" since a file named \"{}\" with {} content already exists on target. You can use the 'force' flag to replace the existing file with the new one", local_file_name, normalised_file_name, comp_str);
}
}
current_files_map.remove(&normalised_file_name);
let mut trail = Vec::<&str>::new();
for part in normalised_file_name.split('/') {
trail.push(part);
let ancestor = if trail.len() > 1 {
trail.join("/")
} else {
"/".to_string()
};
if ancestor != normalised_file_name {
if let Some(fi) = current_files_map.get(&ancestor) {
updated_files_map.insert(ancestor.clone(), fi.clone());
current_files_map.remove(&ancestor);
}
}
}
}
}
}
current_files_map.iter().for_each(|(file_name, file_item)| {
if !delete {
updated_files_map.insert(file_name.to_string(), file_item.clone());
} else {
processed_files.insert(
file_name.to_string(),
(
CONTENT_DELETED_SIGN.to_string(),
file_item
.get(PREDICATE_LINK)
.unwrap_or(&String::default())
.to_string(),
),
);
success_count += 1;
}
});
Ok((processed_files, updated_files_map, success_count))
}
async fn is_file_item_modified(
safe: &mut Safe,
local_filename: &Path,
file_item: &FileItem,
) -> bool {
if FileMeta::filetype_is_file(&file_item[PREDICATE_TYPE]) {
match upload_file_to_net(safe, local_filename, true ).await {
Ok(local_xorurl) => file_item[PREDICATE_LINK] != local_xorurl,
Err(_err) => false,
}
} else {
false
}
}
async fn files_map_add_link(
safe: &mut Safe,
mut files_map: FilesMap,
file_link: &str,
file_name: &str,
force: bool,
) -> Result<(ProcessedFiles, FilesMap, u64)> {
let mut processed_files = ProcessedFiles::new();
let mut success_count = 0;
match SafeUrl::from_url(file_link) {
Err(err) => {
processed_files.insert(
file_link.to_string(),
(CONTENT_ERROR_SIGN.to_string(), format!("<{}>", err)),
);
info!("Skipping file \"{}\". {}", file_link, err);
Ok((processed_files, files_map, success_count))
}
Ok(safe_url) => {
let file_path = Path::new("");
let file_type = match safe_url.content_type() {
SafeContentType::MediaType(media_type) => media_type,
other => format!("{}", other),
};
let file_size = "";
match files_map.get(file_name) {
Some(current_file_item) => {
let mut file_meta = FileMeta::from_file_item(¤t_file_item);
file_meta.file_type = file_type;
file_meta.file_size = file_size.to_string();
let is_modified = if file_meta.is_file() {
current_file_item[PREDICATE_LINK] != file_link
} else {
false
};
if is_modified {
if force {
if add_or_update_file_item(
safe,
file_name,
file_name,
&file_path,
&file_meta,
Some(file_link),
true,
true,
&mut files_map,
&mut processed_files,
)
.await
{
success_count += 1;
}
} else {
processed_files.insert(file_name.to_string(), (CONTENT_ERROR_SIGN.to_string(), format!("File named \"{}\" already exists on target. Use the 'force' flag to replace it", file_name)));
info!("Skipping file \"{}\" since a file with name \"{}\" already exists on target. You can use the 'force' flag to replace the existing file with the new one", file_link, file_name);
}
} else {
processed_files.insert(
file_link.to_string(),
(
CONTENT_ERROR_SIGN.to_string(),
format!(
"File named \"{}\" already exists on target with same link",
file_name
),
),
);
info!("Skipping file \"{}\" since a file with name \"{}\" already exists on target with the same link", file_link, file_name);
}
}
None => {
if add_or_update_file_item(
safe,
file_name,
file_name,
&file_path,
&FileMeta::from_type_and_size(&file_type, &file_size),
Some(file_link),
false,
true,
&mut files_map,
&mut processed_files,
)
.await
{
success_count += 1;
}
}
};
Ok((processed_files, files_map, success_count))
}
}
}
fn files_map_remove_path(
dest_path: &str,
mut files_map: FilesMap,
recursive: bool,
) -> Result<(ProcessedFiles, FilesMap, u64)> {
let mut processed_files = ProcessedFiles::default();
let (success_count, new_files_map) = if recursive {
let mut success_count = 0;
let mut new_files_map = FilesMap::default();
let folder_path = if !dest_path.ends_with('/') {
format!("{}/", dest_path)
} else {
dest_path.to_string()
};
files_map.iter().for_each(|(file_path, file_item)| {
if file_path.starts_with(&folder_path) {
processed_files.insert(
file_path.to_string(),
(
CONTENT_DELETED_SIGN.to_string(),
file_item
.get(PREDICATE_LINK)
.unwrap_or(&String::default())
.to_string(),
),
);
success_count += 1;
} else {
new_files_map.insert(file_path.to_string(), file_item.clone());
}
});
(success_count, new_files_map)
} else {
let file_item = files_map
.remove(dest_path)
.ok_or_else(|| Error::ContentError(format!(
"No content found matching the \"{}\" path on the target FilesContainer. If you are trying to remove a folder rather than a file, you need to pass the 'recursive' flag",
dest_path
)))?;
processed_files.insert(
dest_path.to_string(),
(
CONTENT_DELETED_SIGN.to_string(),
file_item
.get(PREDICATE_LINK)
.unwrap_or(&String::default())
.to_string(),
),
);
(1, files_map)
};
Ok((processed_files, new_files_map, success_count))
}
async fn upload_file_to_net(safe: &mut Safe, path: &Path, dry_run: bool) -> Result<XorUrl> {
let data = fs::read(path).map_err(|err| {
Error::InvalidInput(format!("Failed to read file from local location: {}", err))
})?;
let mime_type = mime_guess::from_path(&path);
match safe
.files_store_public_blob(&data, mime_type.first_raw(), dry_run)
.await
{
Ok(xorurl) => Ok(xorurl),
Err(err) => {
if let Error::InvalidMediaType(_) = err {
safe.files_store_public_blob(&data, None, dry_run).await
} else {
Err(err)
}
}
}
}
async fn files_map_create(
safe: &mut Safe,
mut content: &mut ProcessedFiles,
location: &str,
dest_path: Option<&str>,
follow_links: bool,
dry_run: bool,
) -> Result<FilesMap> {
let mut files_map = FilesMap::default();
let (location_base_path, dest_base_path) = get_base_paths(location, dest_path);
let keys = content.keys().cloned().collect::<Vec<_>>();
for file_name in keys {
let (change, link) = &content[&file_name].clone();
if change == CONTENT_ERROR_SIGN {
continue;
}
let new_file_name = RelativePath::new(
&file_name
.to_string()
.replace(&location_base_path, &dest_base_path),
)
.normalize();
let final_name = format!("/{}", normalise_path_separator(new_file_name.as_str()))
.trim_end_matches('/')
.to_string();
debug!("FileItem item name: {:?}", &file_name);
add_or_update_file_item(
safe,
&file_name,
&final_name,
&Path::new(&file_name),
&FileMeta::from_path(&file_name, follow_links)?,
if link.is_empty() { None } else { Some(&link) },
false,
dry_run,
&mut files_map,
&mut content,
)
.await;
}
Ok(files_map)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
api::app::test_helpers::{new_safe_instance, random_nrs_name},
retry_loop, retry_loop_for_pattern,
};
use anyhow::{anyhow, bail, Result};
use rand::{distributions::Alphanumeric, thread_rng, Rng};
const TESTDATA_PUT_FILEITEM_COUNT: usize = 11;
const TESTDATA_NO_SLASH_PUT_FILEITEM_COUNT: usize = 12;
const SUBFOLDER_PUT_FILEITEM_COUNT: usize = 2;
const SUBFOLDER_NO_SLASH_PUT_FILEITEM_COUNT: usize = 3;
#[tokio::test]
async fn test_files_map_create() -> Result<()> {
let mut safe = new_safe_instance().await?;
let mut processed_files = ProcessedFiles::new();
let first_xorurl = SafeUrl::from_url("safe://top_xorurl")?.to_xorurl_string();
let second_xorurl = SafeUrl::from_url("safe://second_xorurl")?.to_xorurl_string();
processed_files.insert(
"../testdata/test.md".to_string(),
(CONTENT_ADDED_SIGN.to_string(), first_xorurl.clone()),
);
processed_files.insert(
"../testdata/subfolder/subexists.md".to_string(),
(CONTENT_ADDED_SIGN.to_string(), second_xorurl.clone()),
);
let files_map = files_map_create(
&mut safe,
&mut processed_files,
"../testdata",
Some(""),
true,
false,
)
.await?;
assert_eq!(files_map.len(), 2);
let file_item1 = &files_map["/testdata/test.md"];
assert_eq!(file_item1[PREDICATE_LINK], first_xorurl);
assert_eq!(file_item1[PREDICATE_TYPE], "text/markdown");
assert_eq!(file_item1[PREDICATE_SIZE], "12");
let file_item2 = &files_map["/testdata/subfolder/subexists.md"];
assert_eq!(file_item2[PREDICATE_LINK], second_xorurl);
assert_eq!(file_item2[PREDICATE_TYPE], "text/markdown");
assert_eq!(file_item2[PREDICATE_SIZE], "23");
Ok(())
}
#[tokio::test]
async fn test_files_container_create_empty() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(None, None, false, false, false)
.await?;
assert!(xorurl.starts_with("safe://"));
assert_eq!(processed_files.len(), 0);
assert_eq!(files_map.len(), 0);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let (version, new_processed_files, new_files_map) = safe
.files_container_add("../testdata/test.md", &xorurl, false, false, false, false)
.await?;
assert_eq!(version, 1);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), 1);
let filename = "../testdata/test.md";
assert_eq!(new_processed_files[filename].0, CONTENT_ADDED_SIGN);
assert_eq!(
new_processed_files[filename].1,
new_files_map["/test.md"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_store_pub_blob() -> Result<()> {
let mut safe = new_safe_instance().await?;
let random_blob_content: String =
thread_rng().sample_iter(&Alphanumeric).take(20).collect();
let file_xorurl = safe
.files_store_public_blob(random_blob_content.as_bytes(), None, false)
.await?;
let retrieved = retry_loop!(safe.files_get_public_blob(&file_xorurl, None));
assert_eq!(retrieved, random_blob_content.as_bytes());
Ok(())
}
#[tokio::test]
async fn test_files_container_create_file() -> Result<()> {
let mut safe = new_safe_instance().await?;
let filename = "../testdata/test.md";
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some(filename), None, false, false, false)
.await?;
assert!(xorurl.starts_with("safe://"));
assert_eq!(processed_files.len(), 1);
assert_eq!(files_map.len(), 1);
let file_path = "/test.md";
assert_eq!(processed_files[filename].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename].1,
files_map[file_path][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_create_dry_run() -> Result<()> {
let mut safe = new_safe_instance().await?;
let filename = "../testdata/";
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some(filename), None, true, false, true)
.await?;
assert!(xorurl.is_empty());
assert_eq!(processed_files.len(), TESTDATA_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_PUT_FILEITEM_COUNT);
let filename1 = "../testdata/test.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert!(!processed_files[filename1].1.is_empty());
assert_eq!(
processed_files[filename1].1,
files_map["/test.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/another.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert!(!processed_files[filename2].1.is_empty());
assert_eq!(
processed_files[filename2].1,
files_map["/another.md"][PREDICATE_LINK]
);
let filename3 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename3].0, CONTENT_ADDED_SIGN);
assert!(!processed_files[filename3].1.is_empty());
assert_eq!(
processed_files[filename3].1,
files_map["/subfolder/subexists.md"][PREDICATE_LINK]
);
let filename4 = "../testdata/noextension";
assert_eq!(processed_files[filename4].0, CONTENT_ADDED_SIGN);
assert!(!processed_files[filename4].1.is_empty());
assert_eq!(
processed_files[filename4].1,
files_map["/noextension"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_create_folder_without_trailing_slash() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata"), None, true, true, false)
.await?;
assert!(xorurl.starts_with("safe://"));
assert_eq!(processed_files.len(), TESTDATA_NO_SLASH_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_NO_SLASH_PUT_FILEITEM_COUNT);
let filename1 = "../testdata/test.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
files_map["/testdata/test.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/another.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename2].1,
files_map["/testdata/another.md"][PREDICATE_LINK]
);
let filename3 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename3].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename3].1,
files_map["/testdata/subfolder/subexists.md"][PREDICATE_LINK]
);
let filename4 = "../testdata/noextension";
assert_eq!(processed_files[filename4].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename4].1,
files_map["/testdata/noextension"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_create_folder_with_trailing_slash() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
assert!(xorurl.starts_with("safe://"));
assert_eq!(processed_files.len(), TESTDATA_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_PUT_FILEITEM_COUNT);
let filename1 = "../testdata/test.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
files_map["/test.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/another.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename2].1,
files_map["/another.md"][PREDICATE_LINK]
);
let filename3 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename3].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename3].1,
files_map["/subfolder/subexists.md"][PREDICATE_LINK]
);
let filename4 = "../testdata/noextension";
assert_eq!(processed_files[filename4].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename4].1,
files_map["/noextension"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_create_dest_path_without_trailing_slash() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata"), Some("/myroot"), true, true, false)
.await?;
assert!(xorurl.starts_with("safe://"));
assert_eq!(processed_files.len(), TESTDATA_NO_SLASH_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_NO_SLASH_PUT_FILEITEM_COUNT);
let filename1 = "../testdata/test.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
files_map["/myroot/test.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/another.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename2].1,
files_map["/myroot/another.md"][PREDICATE_LINK]
);
let filename3 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename3].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename3].1,
files_map["/myroot/subfolder/subexists.md"][PREDICATE_LINK]
);
let filename4 = "../testdata/noextension";
assert_eq!(processed_files[filename4].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename4].1,
files_map["/myroot/noextension"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_create_dest_path_with_trailing_slash() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata"), Some("/myroot/"), true, true, false)
.await?;
assert!(xorurl.starts_with("safe://"));
assert_eq!(processed_files.len(), TESTDATA_NO_SLASH_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_NO_SLASH_PUT_FILEITEM_COUNT);
let filename1 = "../testdata/test.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
files_map["/myroot/testdata/test.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/another.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename2].1,
files_map["/myroot/testdata/another.md"][PREDICATE_LINK]
);
let filename3 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename3].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename3].1,
files_map["/myroot/testdata/subfolder/subexists.md"][PREDICATE_LINK]
);
let filename4 = "../testdata/noextension";
assert_eq!(processed_files[filename4].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename4].1,
files_map["/myroot/testdata/noextension"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_sync() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
assert_eq!(processed_files.len(), TESTDATA_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_PUT_FILEITEM_COUNT);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let (version, new_processed_files, new_files_map) = safe
.files_container_sync(
"../testdata/subfolder/",
&xorurl,
true,
true,
false,
false,
false,
)
.await?;
assert_eq!(version, 1);
assert_eq!(new_processed_files.len(), 2);
assert_eq!(
new_files_map.len(),
TESTDATA_PUT_FILEITEM_COUNT + SUBFOLDER_PUT_FILEITEM_COUNT
);
let filename1 = "../testdata/test.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
new_files_map["/test.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/another.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename2].1,
new_files_map["/another.md"][PREDICATE_LINK]
);
let filename3 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename3].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename3].1,
new_files_map["/subfolder/subexists.md"][PREDICATE_LINK]
);
let filename4 = "../testdata/noextension";
assert_eq!(processed_files[filename4].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename4].1,
new_files_map["/noextension"][PREDICATE_LINK]
);
let filename5 = "../testdata/subfolder/subexists.md";
assert_eq!(new_processed_files[filename5].0, CONTENT_ADDED_SIGN);
assert_eq!(
new_processed_files[filename5].1,
new_files_map["/subexists.md"][PREDICATE_LINK]
);
let filename6 = "../testdata/subfolder/sub2.md";
assert_eq!(new_processed_files[filename6].0, CONTENT_ADDED_SIGN);
assert_eq!(
new_processed_files[filename6].1,
new_files_map["/sub2.md"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_sync_dry_run() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
assert_eq!(processed_files.len(), TESTDATA_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_PUT_FILEITEM_COUNT);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let (version, new_processed_files, new_files_map) = safe
.files_container_sync(
"../testdata/subfolder/",
&xorurl,
true,
true,
false,
false,
true,
)
.await?;
assert_eq!(version, 1);
assert_eq!(new_processed_files.len(), 2);
assert_eq!(
new_files_map.len(),
TESTDATA_PUT_FILEITEM_COUNT + SUBFOLDER_PUT_FILEITEM_COUNT
);
let filename1 = "../testdata/test.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
new_files_map["/test.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/another.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename2].1,
new_files_map["/another.md"][PREDICATE_LINK]
);
let filename3 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename3].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename3].1,
new_files_map["/subfolder/subexists.md"][PREDICATE_LINK]
);
let filename4 = "../testdata/noextension";
assert_eq!(processed_files[filename4].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename4].1,
new_files_map["/noextension"][PREDICATE_LINK]
);
let filename5 = "../testdata/subfolder/subexists.md";
assert_eq!(new_processed_files[filename5].0, CONTENT_ADDED_SIGN);
assert!(!new_processed_files[filename5].1.is_empty());
assert_eq!(
new_processed_files[filename5].1,
new_files_map["/subexists.md"][PREDICATE_LINK]
);
let filename6 = "../testdata/subfolder/sub2.md";
assert_eq!(new_processed_files[filename6].0, CONTENT_ADDED_SIGN);
assert!(!new_processed_files[filename6].1.is_empty());
assert_eq!(
new_processed_files[filename6].1,
new_files_map["/sub2.md"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_sync_same_size() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/test.md"), None, false, false, false)
.await?;
assert_eq!(processed_files.len(), 1);
assert_eq!(files_map.len(), 1);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let (version, new_processed_files, new_files_map) = safe
.files_container_sync(
"../testdata/.subhidden/test.md",
&xorurl,
false,
false,
false,
false,
false,
)
.await?;
assert_eq!(version, 1);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), 1);
let filename1 = "../testdata/test.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
files_map["/test.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/.subhidden/test.md";
assert_eq!(new_processed_files[filename2].0, CONTENT_UPDATED_SIGN);
assert_eq!(
new_processed_files[filename2].1,
new_files_map["/test.md"][PREDICATE_LINK]
);
assert_eq!(
files_map["/test.md"][PREDICATE_SIZE],
new_files_map["/test.md"][PREDICATE_SIZE]
);
assert_ne!(
files_map["/test.md"][PREDICATE_LINK],
new_files_map["/test.md"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_sync_with_versioned_target() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, _, _) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
let _ = retry_loop!(safe.fetch(&xorurl, None));
let versioned_xorurl = format!("{}?v=5", xorurl);
match safe
.files_container_sync(
"../testdata/subfolder/",
&versioned_xorurl,
false,
false,
false,
true,
false,
)
.await
{
Ok(_) => Err(anyhow!("Sync was unexpectedly successful".to_string(),)),
Err(Error::InvalidInput(msg)) => {
assert_eq!(
msg,
format!(
"The target URL cannot contain a version: {}",
versioned_xorurl
)
);
Ok(())
}
other => Err(anyhow!(
"Error returned is not the expected one: {:?}",
other
)),
}
}
#[tokio::test]
async fn test_files_container_sync_with_delete() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
assert_eq!(processed_files.len(), TESTDATA_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_PUT_FILEITEM_COUNT);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let (version, new_processed_files, new_files_map) = safe
.files_container_sync(
"../testdata/subfolder/",
&xorurl,
true,
false,
true,
false,
false,
)
.await?;
assert_eq!(version, 1);
assert_eq!(
new_processed_files.len(),
TESTDATA_PUT_FILEITEM_COUNT + SUBFOLDER_PUT_FILEITEM_COUNT
);
assert_eq!(new_files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
let file_path1 = "/test.md";
assert_eq!(new_processed_files[file_path1].0, CONTENT_DELETED_SIGN);
assert_eq!(
new_processed_files[file_path1].1,
files_map[file_path1][PREDICATE_LINK]
);
let file_path2 = "/another.md";
assert_eq!(new_processed_files[file_path2].0, CONTENT_DELETED_SIGN);
assert_eq!(
new_processed_files[file_path2].1,
files_map[file_path2][PREDICATE_LINK]
);
let file_path3 = "/subfolder/subexists.md";
assert_eq!(new_processed_files[file_path3].0, CONTENT_DELETED_SIGN);
assert_eq!(
new_processed_files[file_path3].1,
files_map[file_path3][PREDICATE_LINK]
);
let file_path4 = "/noextension";
assert_eq!(new_processed_files[file_path4].0, CONTENT_DELETED_SIGN);
assert_eq!(
new_processed_files[file_path4].1,
files_map[file_path4][PREDICATE_LINK]
);
let filename5 = "../testdata/subfolder/subexists.md";
assert_eq!(new_processed_files[filename5].0, CONTENT_ADDED_SIGN);
assert_eq!(
new_processed_files[filename5].1,
new_files_map["/subexists.md"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_sync_delete_without_recursive() -> Result<()> {
let mut safe = new_safe_instance().await?;
match safe
.files_container_sync(
"../testdata/subfolder/",
"some-url",
false,
false,
true,
false,
false,
)
.await
{
Ok(_) => Err(anyhow!("Sync was unexpectedly successful".to_string(),)),
Err(Error::InvalidInput(msg)) => {
assert_eq!(
msg,
"'delete' is not allowed if 'recursive' is not set".to_string()
);
Ok(())
}
other => Err(anyhow!(
"Error returned is not the expected one: {:?}",
other
)),
}
}
#[tokio::test]
async fn test_files_container_sync_update_nrs_unversioned_link() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, _, _) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
let nrsurl = random_nrs_name();
let mut safe_url = SafeUrl::from_url(&xorurl)?;
safe_url.set_content_version(None);
let unversioned_link = safe_url.to_string();
match safe
.nrs_map_container_create(&nrsurl, &unversioned_link, false, true, false)
.await
{
Ok(_) => Err(anyhow!(
"NRS create was unexpectedly successful".to_string(),
)),
Err(Error::InvalidInput(msg)) => {
assert_eq!(
msg,
format!(
"The linked content (FilesContainer) is versionable, therefore NRS requires the link to specify a version: \"{}\"",
unversioned_link
)
);
Ok(())
}
other => Err(anyhow!(
"Error returned is not the expected one: {:?}",
other
)),
}
}
#[tokio::test]
async fn test_files_container_sync_update_nrs_with_xorurl() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, _, _) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
let _ = retry_loop!(safe.fetch(&xorurl, None));
match safe
.files_container_sync(
"../testdata/subfolder/",
&xorurl,
false,
false,
false,
true,
false,
)
.await
{
Ok(_) => Err(anyhow!("Sync was unexpectedly successful".to_string(),)),
Err(Error::InvalidInput(msg)) => {
assert_eq!(
msg,
"'update-nrs' is not allowed since the URL provided is not an NRS URL"
.to_string()
);
Ok(())
}
other => Err(anyhow!(
"Error returned is not the expected one: {:?}",
other
)),
}
}
#[tokio::test]
async fn test_files_container_sync_update_nrs_versioned_link() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, _, _) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
let _ = retry_loop!(safe.fetch(&xorurl, None));
let nrsurl = random_nrs_name();
let mut safe_url = SafeUrl::from_url(&xorurl)?;
safe_url.set_content_version(Some(0));
let (nrs_xorurl, _, _) = safe
.nrs_map_container_create(&nrsurl, &safe_url.to_string(), false, true, false)
.await?;
let _ = retry_loop!(safe.fetch(&nrs_xorurl, None));
let _ = safe
.files_container_sync(
"../testdata/subfolder/",
&nrsurl,
false,
false,
false,
true,
false,
)
.await?;
let mut safe_url = SafeUrl::from_url(&xorurl)?;
safe_url.set_content_version(Some(1));
let (new_link, _) = safe.parse_and_resolve_url(&nrsurl).await?;
assert_eq!(new_link.to_string(), safe_url.to_string());
Ok(())
}
#[tokio::test]
async fn test_files_container_sync_target_path_without_trailing_slash() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
let _ = retry_loop!(safe.fetch(&xorurl, None));
assert_eq!(processed_files.len(), TESTDATA_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_PUT_FILEITEM_COUNT);
let mut safe_url = SafeUrl::from_url(&xorurl)?;
safe_url.set_path("path/when/sync");
let (version, new_processed_files, new_files_map) = safe
.files_container_sync(
"../testdata/subfolder",
&safe_url.to_string(),
true,
false,
false,
false,
false,
)
.await?;
assert_eq!(version, 1);
assert_eq!(
new_processed_files.len(),
SUBFOLDER_NO_SLASH_PUT_FILEITEM_COUNT
);
assert_eq!(
new_files_map.len(),
TESTDATA_PUT_FILEITEM_COUNT + SUBFOLDER_NO_SLASH_PUT_FILEITEM_COUNT
);
let filename1 = "../testdata/test.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
new_files_map["/test.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/another.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename2].1,
new_files_map["/another.md"][PREDICATE_LINK]
);
let filename3 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename3].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename3].1,
new_files_map["/subfolder/subexists.md"][PREDICATE_LINK]
);
let filename4 = "../testdata/noextension";
assert_eq!(processed_files[filename4].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename4].1,
new_files_map["/noextension"][PREDICATE_LINK]
);
let filename5 = "../testdata/subfolder/subexists.md";
assert_eq!(new_processed_files[filename5].0, CONTENT_ADDED_SIGN);
assert_eq!(
new_processed_files[filename5].1,
new_files_map["/path/when/sync/subexists.md"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_sync_target_path_with_trailing_slash() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
let _ = retry_loop!(safe.fetch(&xorurl, None));
assert_eq!(processed_files.len(), TESTDATA_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_PUT_FILEITEM_COUNT);
let mut safe_url = SafeUrl::from_url(&xorurl)?;
safe_url.set_path("/path/when/sync/");
let (version, new_processed_files, new_files_map) = safe
.files_container_sync(
"../testdata/subfolder",
&safe_url.to_string(),
true,
false,
false,
false,
false,
)
.await?;
assert_eq!(version, 1);
assert_eq!(
new_processed_files.len(),
SUBFOLDER_NO_SLASH_PUT_FILEITEM_COUNT
);
assert_eq!(
new_files_map.len(),
TESTDATA_PUT_FILEITEM_COUNT + SUBFOLDER_NO_SLASH_PUT_FILEITEM_COUNT
);
let filename1 = "../testdata/test.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
new_files_map["/test.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/another.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename2].1,
new_files_map["/another.md"][PREDICATE_LINK]
);
let filename3 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename3].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename3].1,
new_files_map["/subfolder/subexists.md"][PREDICATE_LINK]
);
let filename4 = "../testdata/noextension";
assert_eq!(processed_files[filename4].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename4].1,
new_files_map["/noextension"][PREDICATE_LINK]
);
let filename5 = "../testdata/subfolder/subexists.md";
assert_eq!(new_processed_files[filename5].0, CONTENT_ADDED_SIGN);
assert_eq!(
new_processed_files[filename5].1,
new_files_map["/path/when/sync/subfolder/subexists.md"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_get() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, _, files_map) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
let (version, fetched_files_map) = retry_loop!(safe.files_container_get(&xorurl));
assert_eq!(version, 0);
assert_eq!(fetched_files_map.len(), TESTDATA_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), fetched_files_map.len());
assert_eq!(files_map["/test.md"], fetched_files_map["/test.md"]);
assert_eq!(files_map["/another.md"], fetched_files_map["/another.md"]);
assert_eq!(
files_map["/subfolder/subexists.md"],
fetched_files_map["/subfolder/subexists.md"]
);
assert_eq!(files_map["/noextension"], fetched_files_map["/noextension"]);
Ok(())
}
#[tokio::test]
async fn test_files_container_version() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, _, _) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
let (version, _) = retry_loop!(safe.files_container_get(&xorurl));
assert_eq!(version, 0);
let (version, _, _) = safe
.files_container_sync(
"../testdata/subfolder/",
&xorurl,
true,
false,
true,
false,
false,
)
.await?;
assert_eq!(version, 1);
let mut safe_url = SafeUrl::from_url(&xorurl)?;
safe_url.set_content_version(None);
let (version, _) = retry_loop_for_pattern!(safe
.files_container_get(&safe_url.to_string()), Ok((version, _)) if *version == 1)?;
assert_eq!(version, 1);
Ok(())
}
#[tokio::test]
async fn test_files_container_get_with_version() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, _processed_files, files_map) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
let _ = retry_loop!(safe.fetch(&xorurl, None));
let (_version, _new_processed_files, new_files_map) = safe
.files_container_sync(
"../testdata/subfolder/",
&xorurl,
true,
false,
true,
false,
false,
)
.await?;
let mut safe_url = SafeUrl::from_url(&xorurl)?;
safe_url.set_content_version(Some(0));
let (version, v0_files_map) = safe.files_container_get(&safe_url.to_string()).await?;
assert_eq!(version, 0);
assert_eq!(files_map, v0_files_map);
let file_path1 = "/test.md";
assert_eq!(
files_map[file_path1][PREDICATE_LINK],
v0_files_map[file_path1][PREDICATE_LINK]
);
safe_url.set_content_version(Some(1));
let (version, v1_files_map) = retry_loop_for_pattern!(safe
.files_container_get(&safe_url.to_string()), Ok((version, _)) if *version == 1)?;
assert_eq!(version, 1);
assert_eq!(new_files_map, v1_files_map);
let file_path2 = "/another.md";
let file_path3 = "/subfolder/subexists.md";
let file_path4 = "/noextension";
assert!(v1_files_map.get(file_path1).is_none());
assert!(v1_files_map.get(file_path2).is_none());
assert!(v1_files_map.get(file_path3).is_none());
assert!(v1_files_map.get(file_path4).is_none());
safe_url.set_content_version(Some(2));
match safe.files_container_get(&safe_url.to_string()).await {
Ok(_) => Err(anyhow!(
"Unexpectedly retrieved verion 2 of container".to_string(),
)),
Err(Error::VersionNotFound(msg)) => {
assert_eq!(
msg,
format!(
"Version '2' is invalid for FilesContainer found at \"{}\"",
safe_url
)
);
Ok(())
}
other => Err(anyhow!(
"Error returned is not the expected one: {:?}",
other
)),
}
}
#[tokio::test]
async fn test_files_container_create_get_empty_folder() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, _processed_files, files_map) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
let (_, files_map_get) = retry_loop!(safe.files_container_get(&xorurl.to_string()));
assert_eq!(files_map, files_map_get);
assert_eq!(files_map_get["/emptyfolder"], files_map["/emptyfolder"]);
assert_eq!(
files_map_get["/emptyfolder"]["type"],
MIMETYPE_FILESYSTEM_DIR
);
Ok(())
}
#[tokio::test]
async fn test_files_container_sync_with_nrs_url() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, _, _) = safe
.files_container_create(Some("../testdata/test.md"), None, false, true, false)
.await?;
let _ = retry_loop!(safe.fetch(&xorurl, None));
let nrsurl = random_nrs_name();
let mut safe_url = SafeUrl::from_url(&xorurl)?;
safe_url.set_content_version(Some(0));
let (nrs_xorurl, _, _) = safe
.nrs_map_container_create(&nrsurl, &safe_url.to_string(), false, true, false)
.await?;
let _ = retry_loop!(safe.fetch(&nrs_xorurl, None));
let _ = safe
.files_container_sync(
"../testdata/subfolder/",
&xorurl,
false,
false,
false,
false,
false,
)
.await?;
let _ = safe
.files_container_sync(
"../testdata/",
&nrsurl,
false,
false,
false,
true,
false,
)
.await?;
let (version, fetched_files_map) = retry_loop_for_pattern!(safe.files_container_get(&xorurl), Ok((version, _)) if *version == 2)?;
assert_eq!(version, 2);
assert_eq!(fetched_files_map.len(), 6);
Ok(())
}
#[tokio::test]
async fn test_files_container_add() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/subfolder/"), None, false, true, false)
.await?;
assert_eq!(processed_files.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let (version, new_processed_files, new_files_map) = safe
.files_container_add(
"../testdata/test.md",
&format!("{}/new_filename_test.md", xorurl),
false,
false,
false,
false,
)
.await?;
assert_eq!(version, 1);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT + 1);
let filename1 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
new_files_map["/subexists.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/subfolder/sub2.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename2].1,
new_files_map["/sub2.md"][PREDICATE_LINK]
);
let filename3 = "../testdata/test.md";
assert_eq!(new_processed_files[filename3].0, CONTENT_ADDED_SIGN);
assert_eq!(
new_processed_files[filename3].1,
new_files_map["/new_filename_test.md"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_add_dry_run() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/subfolder/"), None, false, true, false)
.await?;
assert_eq!(processed_files.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let (version, new_processed_files, new_files_map) = safe
.files_container_add(
"../testdata/test.md",
&format!("{}/new_filename_test.md", xorurl),
false,
false,
false,
true,
)
.await?;
assert_eq!(version, 1);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT + 1);
let (version2, new_processed_files2, new_files_map2) = safe
.files_container_add(
"../testdata/test.md",
&format!("{}/new_filename_test.md", xorurl),
false,
false,
false,
true,
)
.await?;
assert_eq!(version, version2);
assert_eq!(new_processed_files.len(), new_processed_files2.len());
assert_eq!(new_files_map.len(), new_files_map2.len());
let filename = "../testdata/test.md";
assert_eq!(new_processed_files[filename].0, CONTENT_ADDED_SIGN);
assert_eq!(new_processed_files2[filename].0, CONTENT_ADDED_SIGN);
assert_eq!(
new_processed_files[filename].1,
new_files_map["/new_filename_test.md"][PREDICATE_LINK]
);
assert_eq!(
new_processed_files2[filename].1,
new_files_map2["/new_filename_test.md"][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_add_dir() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/subfolder/"), None, false, true, false)
.await?;
assert_eq!(processed_files.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
let _ = retry_loop!(safe.fetch(&xorurl, None));
match safe
.files_container_add("../testdata", &xorurl, false, false, false, false)
.await
{
Ok(_) => Err(anyhow!(
"Unexpectedly added a folder to files container".to_string(),
)),
Err(Error::InvalidInput(msg)) => {
assert_eq!(
msg,
"'../testdata' is a directory, only individual files can be added. Use files sync operation for uploading folders".to_string(),
);
Ok(())
}
other => Err(anyhow!(
"Error returned is not the expected one: {:?}",
other
)),
}
}
#[tokio::test]
async fn test_files_container_add_existing_name() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/subfolder/"), None, false, true, false)
.await?;
assert_eq!(processed_files.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let (version, new_processed_files, new_files_map) = safe
.files_container_add(
"../testdata/subfolder/sub2.md",
&format!("{}/sub2.md", xorurl),
false,
false,
false,
false,
)
.await?;
assert_eq!(version, 0);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
assert_eq!(
new_processed_files["../testdata/subfolder/sub2.md"].1,
"File named \"/sub2.md\" with same content already exists on target. Use the \'force\' flag to replace it"
);
assert_eq!(files_map, new_files_map);
let (version, new_processed_files, new_files_map) = safe
.files_container_add(
"../testdata/test.md",
&format!("{}/sub2.md", xorurl),
false,
false,
false,
false,
)
.await?;
assert_eq!(version, 0);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
assert_eq!(
new_processed_files["../testdata/test.md"].1,
"File named \"/sub2.md\" with different content already exists on target. Use the \'force\' flag to replace it"
);
assert_eq!(files_map, new_files_map);
let (version, new_processed_files, new_files_map) = safe
.files_container_add(
"../testdata/test.md",
&format!("{}/sub2.md", xorurl),
true,
false,
false,
false,
)
.await?;
assert_eq!(version, 1);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
assert_eq!(
new_processed_files["../testdata/test.md"].0,
CONTENT_UPDATED_SIGN
);
assert_eq!(
new_processed_files["../testdata/test.md"].1,
new_files_map["/sub2.md"]["link"]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_fail_add_or_sync_invalid_path() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/test.md"), None, false, true, false)
.await?;
assert_eq!(processed_files.len(), 1);
assert_eq!(files_map.len(), 1);
let _ = retry_loop!(safe.fetch(&xorurl, None));
match safe
.files_container_sync(
"/non-existing-path",
&xorurl,
false,
false,
false,
false,
false,
)
.await
{
Ok(_) => {
bail!("Unexpectedly added a folder to files container".to_string(),)
}
Err(Error::FileSystemError(msg)) => {
assert!(msg
.starts_with("Couldn't read metadata from source path ('/non-existing-path')"))
}
other => {
bail!("Error returned is not the expected one: {:?}", other)
}
}
match safe
.files_container_add(
"/non-existing-path",
&format!("{}/test.md", xorurl),
true,
false,
false,
false,
)
.await
{
Ok(_) => Err(anyhow!(
"Unexpectedly added a folder to files container".to_string(),
)),
Err(Error::FileSystemError(msg)) => {
assert!(msg
.starts_with("Couldn't read metadata from source path ('/non-existing-path')"));
Ok(())
}
other => Err(anyhow!(
"Error returned is not the expected one: {:?}",
other
)),
}
}
#[tokio::test]
async fn test_files_container_add_a_url() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/subfolder/"), None, false, true, false)
.await?;
assert_eq!(processed_files.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let data = b"0123456789";
let file_xorurl = safe.files_store_public_blob(data, None, false).await?;
let new_filename = "/new_filename_test.md";
let (version, new_processed_files, new_files_map) = safe
.files_container_add(
&file_xorurl,
&format!("{}{}", xorurl, new_filename),
false,
false,
false,
false,
)
.await?;
assert_eq!(version, 1);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT + 1);
let filename1 = "../testdata/subfolder/subexists.md";
assert_eq!(processed_files[filename1].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename1].1,
new_files_map["/subexists.md"][PREDICATE_LINK]
);
let filename2 = "../testdata/subfolder/sub2.md";
assert_eq!(processed_files[filename2].0, CONTENT_ADDED_SIGN);
assert_eq!(
processed_files[filename2].1,
new_files_map["/sub2.md"][PREDICATE_LINK]
);
assert_eq!(new_processed_files[new_filename].0, CONTENT_ADDED_SIGN);
assert_eq!(
new_processed_files[new_filename].1,
new_files_map[new_filename][PREDICATE_LINK]
);
assert_eq!(new_files_map[new_filename][PREDICATE_LINK], file_xorurl);
let data = b"9876543210";
let other_file_xorurl = safe.files_store_public_blob(data, None, false).await?;
let (version, new_processed_files, new_files_map) = safe
.files_container_add(
&other_file_xorurl,
&format!("{}{}", xorurl, new_filename),
true,
false,
false,
false,
)
.await?;
assert_eq!(version, 2);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT + 1);
assert_eq!(new_processed_files[new_filename].0, CONTENT_UPDATED_SIGN);
assert_eq!(
new_processed_files[new_filename].1,
new_files_map[new_filename][PREDICATE_LINK]
);
assert_eq!(
new_files_map[new_filename][PREDICATE_LINK],
other_file_xorurl
);
Ok(())
}
#[tokio::test]
async fn test_files_container_add_from_raw() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/subfolder/"), None, false, true, false)
.await?;
assert_eq!(processed_files.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let data = b"0123456789";
let new_filename = "/new_filename_test.md";
let (version, new_processed_files, new_files_map) = safe
.files_container_add_from_raw(
data,
&format!("{}{}", xorurl, new_filename),
false,
false,
false,
)
.await?;
assert_eq!(version, 1);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT + 1);
assert_eq!(new_processed_files[new_filename].0, CONTENT_ADDED_SIGN);
assert_eq!(
new_processed_files[new_filename].1,
new_files_map[new_filename][PREDICATE_LINK]
);
let data = b"9876543210";
let (version, new_processed_files, new_files_map) = safe
.files_container_add_from_raw(
data,
&format!("{}{}", xorurl, new_filename),
true,
false,
false,
)
.await?;
assert_eq!(version, 2);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), SUBFOLDER_PUT_FILEITEM_COUNT + 1);
assert_eq!(new_processed_files[new_filename].0, CONTENT_UPDATED_SIGN);
assert_eq!(
new_processed_files[new_filename].1,
new_files_map[new_filename][PREDICATE_LINK]
);
Ok(())
}
#[tokio::test]
async fn test_files_container_remove_path() -> Result<()> {
let mut safe = new_safe_instance().await?;
let (xorurl, processed_files, files_map) = safe
.files_container_create(Some("../testdata/"), None, true, true, false)
.await?;
assert_eq!(processed_files.len(), TESTDATA_PUT_FILEITEM_COUNT);
assert_eq!(files_map.len(), TESTDATA_PUT_FILEITEM_COUNT);
let _ = retry_loop!(safe.fetch(&xorurl, None));
let (version, new_processed_files, new_files_map) = safe
.files_container_remove_path(&format!("{}/test.md", xorurl), false, false, false)
.await?;
assert_eq!(version, 1);
assert_eq!(new_processed_files.len(), 1);
assert_eq!(new_files_map.len(), TESTDATA_PUT_FILEITEM_COUNT - 1);
let filepath = "/test.md";
assert_eq!(new_processed_files[filepath].0, CONTENT_DELETED_SIGN);
assert_eq!(
new_processed_files[filepath].1,
files_map[filepath][PREDICATE_LINK]
);
let (version, new_processed_files, new_files_map) = safe
.files_container_remove_path(&format!("{}/subfolder", xorurl), true, false, false)
.await?;
assert_eq!(version, 2);
assert_eq!(new_processed_files.len(), 2);
assert_eq!(
new_files_map.len(),
TESTDATA_PUT_FILEITEM_COUNT - SUBFOLDER_PUT_FILEITEM_COUNT - 1
);
let filename1 = "/subfolder/subexists.md";
assert_eq!(new_processed_files[filename1].0, CONTENT_DELETED_SIGN);
assert_eq!(
new_processed_files[filename1].1,
files_map[filename1][PREDICATE_LINK]
);
let filename2 = "/subfolder/sub2.md";
assert_eq!(new_processed_files[filename2].0, CONTENT_DELETED_SIGN);
assert_eq!(
new_processed_files[filename2].1,
files_map[filename2][PREDICATE_LINK]
);
Ok(())
}
}