mod change_tracking;
use change_tracking::*;
use super::{
files::{download_file, FilesUploader},
ChunkManager,
};
use sn_client::{
acc_packet::load_account_wallet_or_create_with_mnemonic,
protocol::storage::{Chunk, RegisterAddress, RetryStrategy},
registers::EntryHash,
transfers::{DerivationIndex, MainSecretKey},
Client, FilesApi, FolderEntry, FoldersApi, Metadata, UploadCfg, WalletClient,
};
use bls::PublicKey;
use color_eyre::{
eyre::{bail, eyre},
Result,
};
use std::{
collections::{
btree_map::{Entry, OccupiedEntry},
BTreeMap,
},
ffi::OsString,
fs::{create_dir_all, remove_dir_all, remove_file, File},
io::Write,
path::{Path, PathBuf},
};
use tokio::task::JoinSet;
use tracing::trace;
use walkdir::{DirEntry, WalkDir};
use xor_name::XorName;
const ACC_PACKET_ADDR_DERIVATION_INDEX: DerivationIndex = DerivationIndex([0x0; 32]);
const ACC_PACKET_OWNER_DERIVATION_INDEX: DerivationIndex = DerivationIndex([0x1; 32]);
pub struct AccountPacket {
client: Client,
wallet_dir: PathBuf,
files_dir: PathBuf,
meta_dir: PathBuf,
tracking_info_dir: PathBuf,
curr_tracking_info: BTreeMap<PathBuf, MetadataTrackingInfo>,
root_folder_addr: RegisterAddress,
root_folder_created: bool,
}
impl AccountPacket {
pub fn init(
client: Client,
wallet_dir: &Path,
path: &Path,
root_sk: &MainSecretKey,
password: Option<&[u8]>,
) -> Result<Self> {
let (_, tracking_info_dir, meta_dir) = build_tracking_info_paths(path)?;
if let Ok((addr, _)) = read_root_folder_addr(&meta_dir) {
bail!(
"The local path {path:?} is already being tracked with Folder address: {}",
addr.to_hex()
);
}
let (client, root_folder_addr) = derive_keys_and_address(client, root_sk);
store_root_folder_tracking_info(&meta_dir, root_folder_addr, false)?;
store_root_sk(&tracking_info_dir, root_sk, password)?;
Self::from_path(client, wallet_dir, path, password)
}
pub fn from_path(
client: Client,
wallet_dir: &Path,
path: &Path,
password: Option<&[u8]>,
) -> Result<Self> {
let (files_dir, tracking_info_dir, meta_dir) = build_tracking_info_paths(path)?;
let root_sk = read_root_sk(&tracking_info_dir, password)?;
let (client, root_folder_addr) = derive_keys_and_address(client, &root_sk);
let curr_tracking_info = read_tracking_info_from_disk(&meta_dir)?;
let (read_folder_addr, root_folder_created) = read_root_folder_addr(&meta_dir)
.map_err(|_| eyre!("Root Folder address not found, make sure the directory {path:?} is initialised."))?;
if read_folder_addr != root_folder_addr {
bail!(
"The path is already tracking another Folder with address: {}",
read_folder_addr.to_hex()
);
}
Ok(Self {
client,
wallet_dir: wallet_dir.to_path_buf(),
files_dir,
meta_dir,
tracking_info_dir,
curr_tracking_info,
root_folder_addr,
root_folder_created,
})
}
pub fn root_folder_addr(&self) -> RegisterAddress {
self.root_folder_addr
}
pub async fn retrieve_folders(
client: &Client,
wallet_dir: &Path,
root_sk: &MainSecretKey,
password: Option<&[u8]>,
download_path: &Path,
batch_size: usize,
retry_strategy: RetryStrategy,
) -> Result<Self> {
create_dir_all(download_path)?;
let (files_dir, tracking_info_dir, meta_dir) = build_tracking_info_paths(download_path)?;
let (client, root_folder_addr) = derive_keys_and_address(client.clone(), root_sk);
if let Ok((addr, _)) = read_root_folder_addr(&meta_dir) {
if addr == root_folder_addr {
bail!("The download path is already tracking that Folder, use 'sync' instead.");
} else {
bail!(
"The download path is already tracking another Folder with address: {}",
addr.to_hex()
);
}
} else {
store_root_folder_tracking_info(&meta_dir, root_folder_addr, true)?;
store_root_sk(&tracking_info_dir, root_sk, password)?;
}
let mut acc_packet = Self {
client: client.clone(),
wallet_dir: wallet_dir.to_path_buf(),
files_dir,
meta_dir,
tracking_info_dir,
curr_tracking_info: BTreeMap::default(),
root_folder_addr,
root_folder_created: true,
};
let folder_name: OsString = download_path.file_name().unwrap_or_default().into();
let folders_api =
FoldersApi::retrieve(client.clone(), wallet_dir, root_folder_addr).await?;
let folders_to_download = vec![(folder_name, folders_api, download_path.to_path_buf())];
let _ = acc_packet
.download_folders_and_files(folders_to_download, batch_size, retry_strategy)
.await?;
acc_packet.curr_tracking_info = read_tracking_info_from_disk(&acc_packet.meta_dir)?;
Ok(acc_packet)
}
pub fn status(&self) -> Result<()> {
println!("Looking for local changes made to files/folders compared to version on network at: {} ...", self.root_folder_addr().to_hex());
let changes = self.scan_files_and_folders_for_changes(false)?;
if changes.mutations.is_empty() {
println!("No local changes made to files/folders.");
} else {
println!("Local changes made to files/folders:");
changes.mutations.iter().for_each(|m| println!("{m}"));
let num_of_changes = changes.mutations.len();
println!("\nChanges found to local files/folders: {num_of_changes}");
}
Ok(())
}
pub async fn sync(&mut self, upload_cfg: UploadCfg, make_data_public: bool) -> Result<()> {
let ChangesToApply { folders, mutations } =
self.scan_files_and_folders_for_changes(make_data_public)?;
if mutations.is_empty() {
println!("No local changes made to files/folders to be pushed to network.");
} else {
println!("Local changes made to files/folders to be synced with network:");
mutations.iter().for_each(|m| println!("{m}"));
}
println!("Paying for folders hierarchy and uploading...");
let synced_folders = self
.pay_and_sync_folders(folders, upload_cfg, make_data_public)
.await?;
if !self.root_folder_created {
self.root_folder_created = true;
store_root_folder_tracking_info(
&self.meta_dir,
self.root_folder_addr,
self.root_folder_created,
)?;
}
for mutation in mutations {
match mutation {
Mutation::NewFile(tracking_info) | Mutation::NewFolder(tracking_info) => {
self.store_tracking_info(tracking_info)?;
}
Mutation::FileRemoved((_, meta_xorname))
| Mutation::FolderRemoved((_, meta_xorname)) => {
self.remove_tracking_info(meta_xorname);
}
Mutation::FileContentChanged((meta_xorname, tracking_info)) => {
self.store_tracking_info(tracking_info)?;
self.remove_tracking_info(meta_xorname);
}
}
}
let folders_to_download: Vec<_> = synced_folders
.iter()
.map(|(path, (folders_api, _))| {
let folder_name: OsString = path.file_name().unwrap_or_default().into();
(folder_name, folders_api.clone(), path.clone())
})
.collect();
let mut updated_folders = self
.download_folders_and_files(
folders_to_download,
upload_cfg.batch_size,
upload_cfg.retry_strategy,
)
.await?;
let mut curr_tracking_info = read_tracking_info_from_disk(&self.meta_dir)?;
curr_tracking_info.retain(|_, tracking_info| {
if let FolderEntry::Folder(_) = tracking_info.metadata.content {
!self.remove_tracking_if_not_found_in_folders(tracking_info, &mut updated_folders)
} else {
true
}
});
curr_tracking_info.retain(|_, tracking_info| {
if let FolderEntry::File(_) = tracking_info.metadata.content {
!self.remove_tracking_if_not_found_in_folders(tracking_info, &mut updated_folders)
} else {
true
}
});
self.curr_tracking_info = curr_tracking_info;
Ok(())
}
fn get_relative_path(&self, path: &Path) -> Result<PathBuf> {
let relative_path = path
.to_path_buf()
.canonicalize()?
.strip_prefix(&self.files_dir)?
.to_path_buf();
Ok(relative_path)
}
fn store_tracking_info(
&self,
MetadataTrackingInfo {
file_path,
meta_xorname,
metadata,
entry_hash,
}: MetadataTrackingInfo,
) -> Result<()> {
let metadata_file_path = self.meta_dir.join(hex::encode(meta_xorname));
let mut meta_file = File::create(metadata_file_path)?;
let tracking_info = MetadataTrackingInfo {
file_path: self.get_relative_path(&file_path)?,
meta_xorname,
metadata,
entry_hash,
};
meta_file.write_all(&rmp_serde::to_vec(&tracking_info)?)?;
Ok(())
}
fn remove_tracking_info(&self, meta_xorname: XorName) {
let metadata_file_path = self.meta_dir.join(hex::encode(meta_xorname));
if let Err(err) = remove_file(&metadata_file_path) {
println!("Failed to remove tracking info file {metadata_file_path:?}: {err}");
}
}
fn remove_tracking_if_not_found_in_folders(
&self,
tracking_info: &MetadataTrackingInfo,
folders: &mut Folders,
) -> bool {
let mut removed = false;
let abs_path = self.files_dir.join(&tracking_info.file_path);
match tracking_info.metadata.content {
FolderEntry::Folder(_) => {
match find_by_name_in_parent_folder(
&tracking_info.metadata.name,
&abs_path,
folders,
) {
Some(meta_xorname) => {
if meta_xorname != tracking_info.meta_xorname {
self.remove_tracking_info(tracking_info.meta_xorname);
removed = true;
}
}
None => {
if let Err(err) = remove_dir_all(&abs_path) {
trace!("Failed to remove directory {abs_path:?}: {err:?}");
}
self.remove_tracking_info(tracking_info.meta_xorname);
folders.remove(&abs_path);
removed = true;
}
}
}
FolderEntry::File(_) => {
match find_by_name_in_parent_folder(
&tracking_info.metadata.name,
&abs_path,
folders,
) {
Some(meta_xorname) => {
if meta_xorname != tracking_info.meta_xorname {
self.remove_tracking_info(tracking_info.meta_xorname);
removed = true;
}
}
None => {
if let Err(err) = remove_file(&abs_path) {
trace!("Failed to remove file {abs_path:?}: {err:?}");
}
self.remove_tracking_info(tracking_info.meta_xorname);
removed = true;
}
}
}
}
removed
}
fn scan_files_and_folders_for_changes(&self, make_data_public: bool) -> Result<ChangesToApply> {
let mut chunk_manager = ChunkManager::new(&self.tracking_info_dir);
chunk_manager.chunk_with_iter(self.iter_only_files(), false, false)?;
let encryption_pk = if make_data_public {
None
} else {
Some(self.client.signer_pk())
};
let mut changes = self.read_folders_hierarchy_from_disk(encryption_pk)?;
let folders = &mut changes.folders;
for chunked_file in chunk_manager.iter_chunked_files() {
let file_path = &chunked_file.file_path;
if let Some(Entry::Occupied(mut parent_folder)) = file_path
.parent()
.map(|parent| folders.entry(parent.to_path_buf()))
{
match self.get_tracking_info(file_path) {
Ok(Some(tracking_info)) => match &tracking_info.metadata.content {
FolderEntry::File(chunk) => {
if chunk.address() != &chunked_file.head_chunk_address {
let (entry_hash, meta_xorname, metadata) = replace_item_in_folder(
&mut parent_folder,
tracking_info.entry_hash,
chunked_file.file_name.clone(),
chunked_file.data_map.clone(),
encryption_pk,
)?;
changes.mutations.push(Mutation::FileContentChanged((
tracking_info.meta_xorname,
MetadataTrackingInfo {
file_path: file_path.to_path_buf(),
meta_xorname,
metadata,
entry_hash,
},
)));
}
}
FolderEntry::Folder(_) => {
let (entry_hash, meta_xorname, metadata) = replace_item_in_folder(
&mut parent_folder,
tracking_info.entry_hash,
chunked_file.file_name.clone(),
chunked_file.data_map.clone(),
encryption_pk,
)?;
changes
.mutations
.push(Mutation::NewFile(MetadataTrackingInfo {
file_path: file_path.to_path_buf(),
meta_xorname,
metadata,
entry_hash,
}));
}
},
Ok(None) => {
let (entry_hash, meta_xorname, metadata) =
parent_folder.get_mut().0.add_file(
chunked_file.file_name.clone(),
chunked_file.data_map.clone(),
encryption_pk,
)?;
parent_folder.get_mut().1.has_new_entries();
changes
.mutations
.push(Mutation::NewFile(MetadataTrackingInfo {
file_path: file_path.to_path_buf(),
meta_xorname,
metadata,
entry_hash,
}));
}
Err(err) => {
println!("Skipping file {file_path:?}: {err:?}");
}
}
}
}
for (item_path, tracking_info) in self.curr_tracking_info.iter() {
let abs_path = self.files_dir.join(item_path);
match tracking_info.metadata.content {
FolderEntry::Folder(_) => {
if !folders.contains_key(&abs_path) {
remove_from_parent(folders, &abs_path, tracking_info.entry_hash)?;
changes.mutations.push(Mutation::FolderRemoved((
abs_path,
tracking_info.meta_xorname,
)));
}
}
FolderEntry::File(_) => {
if chunk_manager
.iter_chunked_files()
.all(|chunked_file| chunked_file.file_path != abs_path)
{
remove_from_parent(folders, &abs_path, tracking_info.entry_hash)?;
changes.mutations.push(Mutation::FileRemoved((
abs_path,
tracking_info.meta_xorname,
)));
}
}
}
}
Ok(changes)
}
fn read_folders_hierarchy_from_disk(
&self,
encryption_pk: Option<PublicKey>,
) -> Result<ChangesToApply> {
let mut changes = ChangesToApply::default();
for (dir_path, depth, parent, dir_name) in self.iter_only_dirs().filter_map(|entry| {
entry.path().parent().map(|parent| {
(
entry.path().to_path_buf(),
entry.depth(),
parent.to_owned(),
entry.file_name().to_owned(),
)
})
}) {
let (folder, folder_change) = changes
.folders
.entry(dir_path.clone())
.or_insert(self.find_folder_in_tracking_info(&dir_path)?)
.clone();
let curr_folder_addr = *folder.address();
if depth > 0 {
let (parent_folder, parent_folder_change) = changes
.folders
.entry(parent.clone())
.or_insert(self.find_folder_in_tracking_info(&parent)?);
if folder_change.is_new_folder() {
let (entry_hash, meta_xorname, metadata) =
parent_folder.add_folder(dir_name, curr_folder_addr, encryption_pk)?;
parent_folder_change.has_new_entries();
changes
.mutations
.push(Mutation::NewFolder(MetadataTrackingInfo {
file_path: dir_path,
meta_xorname,
metadata,
entry_hash,
}));
}
}
}
Ok(changes)
}
fn get_tracking_info(&self, path: &Path) -> Result<Option<&MetadataTrackingInfo>> {
let path = self.get_relative_path(path)?;
Ok(self.curr_tracking_info.get(&path))
}
fn find_folder_in_tracking_info(&self, path: &Path) -> Result<(FoldersApi, FolderChange)> {
let mut folder_change = FolderChange::NewFolder;
let address = if path == self.files_dir {
if self.root_folder_created {
folder_change = FolderChange::NoChange;
}
Some(self.root_folder_addr)
} else {
self.get_tracking_info(path)?.and_then(|tracking_info| {
match tracking_info.metadata.content {
FolderEntry::Folder(addr) => {
folder_change = FolderChange::NoChange;
Some(addr)
}
FolderEntry::File(_) => None,
}
})
};
let folders_api = FoldersApi::new(self.client.clone(), &self.wallet_dir, address)?;
Ok((folders_api, folder_change))
}
fn iter_only_dirs(&self) -> impl Iterator<Item = DirEntry> {
WalkDir::new(&self.files_dir)
.into_iter()
.filter_entry(|e| e.file_type().is_dir() && e.file_name() != SAFE_TRACKING_CHANGES_DIR)
.flatten()
}
fn iter_only_files(&self) -> impl Iterator<Item = DirEntry> {
WalkDir::new(&self.files_dir)
.into_iter()
.filter_entry(|e| e.file_type().is_file() || e.file_name() != SAFE_TRACKING_CHANGES_DIR)
.flatten()
.filter(|e| e.file_type().is_file())
}
async fn pay_and_sync_folders(
&self,
folders: Folders,
upload_cfg: UploadCfg,
make_data_public: bool,
) -> Result<Folders> {
let files_uploader = FilesUploader::new(self.client.clone(), self.wallet_dir.clone())
.set_upload_cfg(upload_cfg)
.set_make_data_public(make_data_public)
.insert_entries(self.iter_only_files());
let _summary = files_uploader.start_upload().await?;
let wallet = load_account_wallet_or_create_with_mnemonic(&self.wallet_dir, None)?;
let mut wallet_client = WalletClient::new(self.client.clone(), wallet);
let mut net_addresses = vec![];
let mut new_folders = 0;
folders.iter().for_each(|(_, (folder, folder_change))| {
if folder_change.is_new_folder() {
net_addresses.push(folder.as_net_addr());
new_folders += 1;
}
net_addresses.extend(folder.meta_addrs_to_pay());
});
let payment_result = wallet_client
.pay_for_storage(net_addresses.into_iter())
.await?;
match payment_result
.storage_cost
.checked_add(payment_result.royalty_fees)
{
Some(cost) => {
let balance = wallet_client.balance();
println!("Made payment of {cost} for {new_folders} Folders. New balance: {balance}",)
}
None => bail!("Failed to calculate total payment cost"),
}
let mut tasks = JoinSet::new();
for (path, (mut folder, folder_change)) in folders {
let op = if folder_change.is_new_folder() {
"Creation"
} else {
"Syncing"
};
tasks.spawn(async move {
match folder.sync(upload_cfg).await {
Ok(()) => {
println!(
"{op} of Folder (for {path:?}) succeeded. Address: {}",
folder.address().to_hex()
);
}
Err(err) => {
println!("{op} of Folder (for {path:?}) failed: {err}")
}
}
(path, folder, folder_change)
});
}
let mut synced_folders = Folders::new();
while let Some(res) = tasks.join_next().await {
match res {
Ok((path, folder, c)) => {
synced_folders.insert(path, (folder, c));
}
Err(err) => {
println!("Failed to sync/create a Folder with/on the network: {err:?}");
}
}
}
Ok(synced_folders)
}
async fn download_folders_and_files(
&self,
mut folders_to_download: Vec<(OsString, FoldersApi, PathBuf)>,
batch_size: usize,
retry_strategy: RetryStrategy,
) -> Result<Folders> {
let mut files_to_download = vec![];
let mut updated_folders = Folders::new();
while let Some((name, mut folders_api, target_path)) = folders_to_download.pop() {
if updated_folders.contains_key(&target_path) {
continue;
}
println!(
"Downloading Folder {name:?} from {}",
folders_api.address().to_hex()
);
self.download_folder_from_network(
&target_path,
&mut folders_api,
&mut files_to_download,
&mut folders_to_download,
)
.await?;
updated_folders.insert(target_path, (folders_api, FolderChange::NoChange));
}
let files_api: FilesApi = FilesApi::new(self.client.clone(), self.files_dir.clone());
for (file_name, data_map_chunk, path) in files_to_download {
download_file(
files_api.clone(),
*data_map_chunk.name(),
(file_name, Some(data_map_chunk)),
&path,
false,
batch_size,
retry_strategy,
)
.await;
}
Ok(updated_folders)
}
async fn download_folder_from_network(
&self,
target_path: &Path,
folders_api: &mut FoldersApi,
files_to_download: &mut Vec<(OsString, Chunk, PathBuf)>,
folders_to_download: &mut Vec<(OsString, FoldersApi, PathBuf)>,
) -> Result<()> {
for (entry_hash, (meta_xorname, metadata)) in folders_api.entries().await?.into_iter() {
let name = metadata.name.clone();
let item_path = target_path.join(name.clone());
if let Ok(Some(tracking_info)) = self.get_tracking_info(&item_path) {
if tracking_info.meta_xorname == meta_xorname {
continue;
}
}
match &metadata.content {
FolderEntry::File(data_map_chunk) => {
files_to_download.push((
name.clone().into(),
data_map_chunk.clone(),
target_path.to_path_buf(),
));
let _ = File::create(&item_path)?;
}
FolderEntry::Folder(subfolder_addr) => {
let folders_api = FoldersApi::retrieve(
self.client.clone(),
&self.wallet_dir,
*subfolder_addr,
)
.await?;
folders_to_download.push((name.clone().into(), folders_api, item_path.clone()));
create_dir_all(&item_path)?;
}
};
self.store_tracking_info(MetadataTrackingInfo {
file_path: item_path,
meta_xorname,
metadata,
entry_hash,
})?;
}
Ok(())
}
}
fn remove_from_parent(folders: &mut Folders, path: &Path, entry_hash: EntryHash) -> Result<()> {
if let Some((parent_folder, folder_change)) = path.parent().and_then(|p| folders.get_mut(p)) {
folder_change.has_new_entries();
parent_folder.remove_item(entry_hash)?;
}
Ok(())
}
fn replace_item_in_folder(
folder: &mut OccupiedEntry<'_, PathBuf, (FoldersApi, FolderChange)>,
entry_hash: EntryHash,
file_name: OsString,
data_map: Chunk,
encryption_pk: Option<PublicKey>,
) -> Result<(EntryHash, XorName, Metadata)> {
let (ref mut folders_api, ref mut folder_change) = folder.get_mut();
folder_change.has_new_entries();
let res = folders_api.replace_file(
entry_hash,
file_name.clone(),
data_map.clone(),
encryption_pk,
)?;
Ok(res)
}
fn find_by_name_in_parent_folder(name: &str, path: &Path, folders: &Folders) -> Option<XorName> {
path.parent()
.and_then(|parent| folders.get(parent))
.and_then(|(folder, _)| folder.find_by_name(name))
.map(|(meta_xorname, _)| *meta_xorname)
}
fn derive_keys_and_address(
mut client: Client,
root_sk: &MainSecretKey,
) -> (Client, RegisterAddress) {
let signer_sk = root_sk
.derive_key(&ACC_PACKET_OWNER_DERIVATION_INDEX)
.secret_key();
client.set_signer_key(signer_sk);
let derived_pk = root_sk
.derive_key(&ACC_PACKET_ADDR_DERIVATION_INDEX)
.secret_key()
.public_key();
let root_folder_addr = RegisterAddress::new(
XorName::from_content(&derived_pk.to_bytes()),
client.signer_pk(),
);
(client, root_folder_addr)
}
#[cfg(test)]
mod tests {
use crate::acc_packet::{
derive_keys_and_address, RECOVERY_SEED_FILENAME, SAFE_TRACKING_CHANGES_DIR,
};
use super::{
read_root_folder_addr, read_tracking_info_from_disk, AccountPacket, Metadata,
MetadataTrackingInfo, Mutation, ACC_PACKET_ADDR_DERIVATION_INDEX,
ACC_PACKET_OWNER_DERIVATION_INDEX,
};
use rand::{thread_rng, Rng};
use sn_client::{
protocol::storage::{Chunk, RetryStrategy},
registers::{EntryHash, RegisterAddress},
test_utils::{get_funded_wallet, get_new_client, random_file_chunk},
transfers::MainSecretKey,
FolderEntry, UploadCfg, BATCH_SIZE,
};
use bls::SecretKey;
use bytes::Bytes;
use eyre::{bail, eyre, Result};
use std::{
collections::{BTreeMap, BTreeSet},
fs::{create_dir_all, remove_dir_all, remove_file, File, OpenOptions},
io::{Read, Write},
path::{Path, PathBuf},
};
use xor_name::XorName;
const SYNC_OPTS: (UploadCfg, bool) = {
let cfg = UploadCfg {
verify_store: true,
batch_size: BATCH_SIZE,
retry_strategy: RetryStrategy::Quick,
show_holders: false,
max_repayments_for_failed_data: 1,
collect_registers: false,
};
let make_data_public = false;
(cfg, make_data_public)
};
#[tokio::test]
async fn test_acc_packet_private_helpers() -> Result<()> {
let client = get_new_client(SecretKey::random()).await?;
let root_sk = MainSecretKey::random();
let tmp_dir = tempfile::tempdir()?;
let wallet_dir = tmp_dir.path();
let files_path = tmp_dir.path().join("myfiles");
create_dir_all(&files_path)?;
let owner_pk = root_sk
.derive_key(&ACC_PACKET_OWNER_DERIVATION_INDEX)
.secret_key()
.public_key();
let xorname = XorName::from_content(
&root_sk
.derive_key(&ACC_PACKET_ADDR_DERIVATION_INDEX)
.secret_key()
.public_key()
.to_bytes(),
);
let expected_folder_addr = RegisterAddress::new(xorname, owner_pk);
let acc_packet =
AccountPacket::init(client.clone(), wallet_dir, &files_path, &root_sk, None)?;
assert_eq!(
derive_keys_and_address(client, &root_sk).1,
expected_folder_addr
);
assert_eq!(acc_packet.root_folder_addr(), expected_folder_addr);
let mut test_files = create_test_files_on_disk(&files_path)?;
let mut rng = rand::thread_rng();
let dummy_metadata = Metadata {
name: "dummy".to_string(),
content: FolderEntry::File(Chunk::new(Bytes::new())),
};
for (relative_path, _) in test_files.iter() {
let abs_path = files_path.join(relative_path);
assert!(
matches!(acc_packet.get_relative_path(&abs_path), Ok(p) if &p == relative_path),
"AccountPacket::get_relative_path helper returned invalid path"
);
let meta_xorname = XorName::random(&mut rng);
acc_packet.store_tracking_info(MetadataTrackingInfo {
file_path: abs_path,
meta_xorname,
metadata: dummy_metadata.clone(),
entry_hash: EntryHash::default(),
})?;
assert!(acc_packet.meta_dir.join(hex::encode(meta_xorname)).exists());
}
let tracking_info = read_tracking_info_from_disk(&acc_packet.meta_dir)?;
assert_eq!(tracking_info.len(), test_files.len());
for (abs_path, info) in tracking_info.iter() {
assert!(test_files.remove(abs_path).is_some());
acc_packet.remove_tracking_info(info.meta_xorname);
assert!(!acc_packet
.meta_dir
.join(hex::encode(info.meta_xorname))
.exists());
}
Ok(())
}
#[tokio::test]
async fn test_acc_packet_from_empty_dir() -> Result<()> {
let client = get_new_client(SecretKey::random()).await?;
let root_sk = MainSecretKey::random();
let tmp_dir = tempfile::tempdir()?;
let wallet_dir = tmp_dir.path();
let _ = get_funded_wallet(&client, wallet_dir).await?;
let src_files_path = tmp_dir.path().join("myaccpacketempty");
create_dir_all(&src_files_path)?;
let mut acc_packet =
AccountPacket::init(client.clone(), wallet_dir, &src_files_path, &root_sk, None)?;
acc_packet.sync(SYNC_OPTS.0, SYNC_OPTS.1).await?;
let clone_files_path = tmp_dir.path().join("myaccpacketempty-clone");
let cloned_acc_packet = AccountPacket::retrieve_folders(
&client,
wallet_dir,
&root_sk,
None,
&clone_files_path,
BATCH_SIZE,
RetryStrategy::Quick,
)
.await?;
check_files_and_dirs_match(&acc_packet, &cloned_acc_packet, BTreeMap::new())?;
check_tracking_info_match(&acc_packet, &cloned_acc_packet, BTreeMap::new())?;
Ok(())
}
#[tokio::test]
async fn test_acc_packet_upload_download() -> Result<()> {
let client = get_new_client(SecretKey::random()).await?;
let root_sk = MainSecretKey::random();
let tmp_dir = tempfile::tempdir()?;
let wallet_dir = tmp_dir.path();
let _ = get_funded_wallet(&client, wallet_dir).await?;
let src_files_path = tmp_dir.path().join("myaccpacket");
let expected_files = create_test_files_on_disk(&src_files_path)?;
let mut acc_packet =
AccountPacket::init(client.clone(), wallet_dir, &src_files_path, &root_sk, None)?;
acc_packet.sync(SYNC_OPTS.0, SYNC_OPTS.1).await?;
let download_files_path = tmp_dir.path().join("myaccpacket-downloaded");
let downloaded_acc_packet = AccountPacket::retrieve_folders(
&client,
wallet_dir,
&root_sk,
None,
&download_files_path,
BATCH_SIZE,
RetryStrategy::Quick,
)
.await?;
check_files_and_dirs_match(&acc_packet, &downloaded_acc_packet, expected_files.clone())?;
check_tracking_info_match(&acc_packet, &downloaded_acc_packet, expected_files)?;
Ok(())
}
#[tokio::test]
async fn test_acc_packet_scan_files_and_folders_changes() -> Result<()> {
let client = get_new_client(SecretKey::random()).await?;
let root_sk = MainSecretKey::random();
let tmp_dir = tempfile::tempdir()?;
let wallet_dir = tmp_dir.path();
let _ = get_funded_wallet(&client, wallet_dir).await?;
let files_path = tmp_dir.path().join("myaccpacket-to-scan");
let mut test_files = create_test_files_on_disk(&files_path)?;
let files_path = files_path.canonicalize()?;
let mut acc_packet =
AccountPacket::init(client.clone(), wallet_dir, &files_path, &root_sk, None)?;
let changes = acc_packet.scan_files_and_folders_for_changes(false)?;
assert_eq!(changes.mutations.len(), 4);
assert!(changes.mutations.iter().all(|mutation| {
matches!(mutation, Mutation::NewFile(i) if i.file_path == files_path.join("file0.txt"))
|| matches!(mutation, Mutation::NewFile(i) if i.file_path == files_path.join("dir1").join("file1.txt"))
|| matches!(mutation, Mutation::NewFolder(i) if i.file_path == files_path.join("dir1"))
|| matches!(mutation, Mutation::NewFolder(i) if i.file_path == files_path.join("dir2"))
}), "at least one of the mutations detected was unexpected/incorrect");
acc_packet.sync(SYNC_OPTS.0, SYNC_OPTS.1).await?;
mutate_test_files_on_disk(&files_path, &mut test_files)?;
let changes = acc_packet.scan_files_and_folders_for_changes(false)?;
assert_eq!(changes.mutations.len(), 8);
assert!(changes.mutations.iter().all(|mutation| {
matches!(mutation, Mutation::FileContentChanged((_,i)) if i.file_path == files_path.join("file0.txt"))
|| matches!(mutation, Mutation::FileRemoved((p, _)) if p == &files_path.join("dir1").join("file1.txt"))
|| matches!(mutation, Mutation::FolderRemoved((p,_)) if p == &files_path.join("dir2"))
|| matches!(mutation, Mutation::NewFolder(i) if i.file_path == files_path.join("dir3"))
|| matches!(mutation, Mutation::NewFolder(i) if i.file_path == files_path.join("dir3").join("dir3_1"))
|| matches!(mutation, Mutation::NewFile(i) if i.file_path == files_path.join("dir3").join("dir3_1").join("file3.txt"))
|| matches!(mutation, Mutation::NewFolder(i) if i.file_path == files_path.join("dir4"))
|| matches!(mutation, Mutation::NewFolder(i) if i.file_path == files_path.join("dir4").join("dir4_1"))
}), "at least one of the mutations detected was unexpected/incorrect");
Ok(())
}
#[tokio::test]
async fn test_acc_packet_sync_mutations() -> Result<()> {
let client = get_new_client(SecretKey::random()).await?;
let root_sk = MainSecretKey::random();
let tmp_dir = tempfile::tempdir()?;
let wallet_dir = tmp_dir.path();
let _ = get_funded_wallet(&client, wallet_dir).await?;
let src_files_path = tmp_dir.path().join("myaccpackettosync");
let mut expected_files = create_test_files_on_disk(&src_files_path)?;
let mut acc_packet =
AccountPacket::init(client.clone(), wallet_dir, &src_files_path, &root_sk, None)?;
acc_packet.sync(SYNC_OPTS.0, SYNC_OPTS.1).await?;
let clone_files_path = tmp_dir.path().join("myaccpackettosync-clone");
let mut cloned_acc_packet = AccountPacket::retrieve_folders(
&client,
wallet_dir,
&root_sk,
None,
&clone_files_path,
BATCH_SIZE,
RetryStrategy::Quick,
)
.await?;
mutate_test_files_on_disk(&clone_files_path, &mut expected_files)?;
cloned_acc_packet.sync(SYNC_OPTS.0, SYNC_OPTS.1).await?;
acc_packet.sync(SYNC_OPTS.0, SYNC_OPTS.1).await?;
check_files_and_dirs_match(&acc_packet, &cloned_acc_packet, expected_files.clone())?;
check_tracking_info_match(&acc_packet, &cloned_acc_packet, expected_files)?;
Ok(())
}
#[cfg(any(target_os = "linux", target_os = "linux"))]
#[tokio::test]
async fn test_acc_packet_moved_folder() -> Result<()> {
let client = get_new_client(SecretKey::random()).await?;
let root_sk = MainSecretKey::random();
let tmp_dir = tempfile::tempdir()?;
let wallet_dir = tmp_dir.path();
let _ = get_funded_wallet(&client, wallet_dir).await?;
let src_files_path = tmp_dir.path().join("myaccpacket-to-move");
let mut test_files = create_test_files_on_disk(&src_files_path)?;
let mut acc_packet =
AccountPacket::init(client.clone(), wallet_dir, &src_files_path, &root_sk, None)?;
acc_packet.sync(SYNC_OPTS.0, SYNC_OPTS.1).await?;
let new_chunk = random_file_chunk();
let file2modify = Path::new("dir1").join("file1.txt");
OpenOptions::new()
.write(true)
.open(src_files_path.join(&file2modify))?
.write_all(new_chunk.value())?;
test_files.insert(file2modify, Some(new_chunk));
let moved_files_path = tmp_dir.path().join("myaccpacket-moved");
create_dir_all(&moved_files_path)?;
std::fs::rename(src_files_path, &moved_files_path)?;
let moved_files_path = moved_files_path.canonicalize()?;
let moved_acc_packet =
AccountPacket::from_path(client.clone(), wallet_dir, &moved_files_path, None)?;
let changes = moved_acc_packet.scan_files_and_folders_for_changes(false)?;
assert_eq!(changes.mutations.len(), 1);
assert_eq!(changes.mutations.first().map(|mutation| {
matches!(mutation, Mutation::FileContentChanged((_,i)) if i.file_path == moved_files_path.join("dir1").join("file1.txt"))
}), Some(true));
check_tracking_info_match(&moved_acc_packet, &moved_acc_packet, test_files)?;
Ok(())
}
#[tokio::test]
async fn test_acc_packet_derived_address() -> Result<()> {
let client = get_new_client(SecretKey::random()).await?;
let root_sk = MainSecretKey::random();
let tmp_dir = tempfile::tempdir()?;
let wallet_dir = tmp_dir.path();
let _ = get_funded_wallet(&client, wallet_dir).await?;
let files_path = tmp_dir.path().join("myaccpacket-unencrypted-metadata");
let _ = create_test_files_on_disk(&files_path)?;
let mut acc_packet =
AccountPacket::init(client.clone(), wallet_dir, &files_path, &root_sk, None)?;
acc_packet.sync(SYNC_OPTS.0, SYNC_OPTS.1).await?;
let download_files_path = tmp_dir.path().join("myaccpacket-downloaded");
let other_root_sk = MainSecretKey::random();
if AccountPacket::retrieve_folders(
&client,
wallet_dir,
&other_root_sk,
None,
&download_files_path,
BATCH_SIZE,
RetryStrategy::Quick,
)
.await
.is_ok()
{
bail!("acc-packet retrieval succeeded unexpectedly");
}
Ok(())
}
#[tokio::test]
async fn test_acc_packet_recovery_seed_encryption() -> Result<()> {
let client = get_new_client(SecretKey::random()).await?;
let root_sk = MainSecretKey::random();
let tmp_dir = tempfile::tempdir()?;
let wallet_dir = tmp_dir.path();
let src_files_path = tmp_dir.path().join("myaccpacket_unencrypted_seed");
create_dir_all(&src_files_path)?;
let _ = AccountPacket::init(client.clone(), wallet_dir, &src_files_path, &root_sk, None)?;
let _ = AccountPacket::from_path(client.clone(), wallet_dir, &src_files_path, None)?;
let bytes = std::fs::read(
src_files_path
.join(SAFE_TRACKING_CHANGES_DIR)
.join(RECOVERY_SEED_FILENAME),
)?;
assert_eq!(bytes, root_sk.to_bytes());
if AccountPacket::from_path(
client.clone(),
wallet_dir,
&src_files_path,
Some(b"123456789"),
)
.is_ok()
{
bail!("acc-packet loading with a password succeeded unexpectedly");
}
let src_files_path = tmp_dir.path().join("myaccpacket_encrypted_seed");
create_dir_all(&src_files_path)?;
let mut rng = thread_rng();
let password: [u8; 32] = rng.gen();
let incorrect_password: [u8; 32] = rng.gen();
let _ = AccountPacket::init(
client.clone(),
wallet_dir,
&src_files_path,
&root_sk,
Some(&password),
)?;
if AccountPacket::from_path(client.clone(), wallet_dir, &src_files_path, None).is_ok() {
bail!("acc-packet loading without a password succeeded unexpectedly");
}
if AccountPacket::from_path(
client.clone(),
wallet_dir,
&src_files_path,
Some(&incorrect_password),
)
.is_ok()
{
bail!("acc-packet loading with incorrect password succeeded unexpectedly");
}
let _ =
AccountPacket::from_path(client.clone(), wallet_dir, &src_files_path, Some(&password))?;
let bytes = std::fs::read(
src_files_path
.join(SAFE_TRACKING_CHANGES_DIR)
.join(RECOVERY_SEED_FILENAME),
)?;
assert!(!bytes.is_empty());
assert_ne!(bytes, root_sk.to_bytes());
Ok(())
}
fn create_test_files_on_disk(base_path: &Path) -> Result<BTreeMap<PathBuf, Option<Chunk>>> {
let mut files = BTreeMap::new();
files.insert(
Path::new("file0.txt").to_path_buf(),
Some(random_file_chunk()),
);
files.insert(
Path::new("dir1").join("file1.txt"),
Some(random_file_chunk()),
);
files.insert(Path::new("dir2").to_path_buf(), None);
for (path, chunk) in files.iter() {
let full_path = base_path.join(path);
if let Some(chunk) = chunk {
create_dir_all(full_path.parent().expect("invalid path for test file"))?;
let mut file = File::create(full_path)?;
file.write_all(chunk.value())?;
} else {
create_dir_all(full_path)?;
}
}
Ok(files)
}
fn mutate_test_files_on_disk(
path: &Path,
test_files: &mut BTreeMap<PathBuf, Option<Chunk>>,
) -> Result<()> {
let new_chunk = random_file_chunk();
let file2modify = Path::new("file0.txt");
OpenOptions::new()
.write(true)
.open(path.join(file2modify))?
.write_all(new_chunk.value())?;
test_files.insert(file2modify.to_path_buf(), Some(new_chunk));
let file2remove = Path::new("dir1").join("file1.txt");
remove_file(path.join(&file2remove))?;
test_files.remove(&file2remove);
test_files.insert(Path::new("dir1").to_path_buf(), None);
let dir2remove = Path::new("dir2");
remove_dir_all(path.join(dir2remove))?;
test_files.remove(dir2remove);
create_dir_all(path.join("dir3").join("dir3_1"))?;
let file2create = Path::new("dir3").join("dir3_1").join("file3.txt");
let mut file = File::create(path.join(&file2create))?;
let new_chunk = random_file_chunk();
file.write_all(new_chunk.value())?;
test_files.insert(file2create, Some(new_chunk));
let dir2create = Path::new("dir4").join("dir4_1");
create_dir_all(path.join(&dir2create))?;
test_files.insert(dir2create.to_path_buf(), None);
Ok(())
}
fn is_empty_dir(path: &Path) -> bool {
path.read_dir()
.map(|mut i| i.next().is_none())
.unwrap_or(false)
}
fn list_of_files_and_empty_dirs(acc_packet: &AccountPacket) -> BTreeSet<PathBuf> {
acc_packet
.iter_only_files()
.chain(acc_packet.iter_only_dirs())
.flat_map(|file_entry| {
let path = file_entry.path();
if path.is_dir() && !is_empty_dir(path) {
bail!("we skip non empty dirs");
}
acc_packet.get_relative_path(path)
})
.collect()
}
fn check_tracking_info_match(
src_packet: &AccountPacket,
target_packet: &AccountPacket,
mut expected_files: BTreeMap<PathBuf, Option<Chunk>>,
) -> Result<()> {
let root_addr = src_packet.root_folder_addr();
assert_eq!(
read_root_folder_addr(&src_packet.meta_dir)?,
(root_addr, true),
"Root folder address doesn't match in source directory tracking info."
);
assert_eq!(
read_root_folder_addr(&target_packet.meta_dir)?,
(root_addr, true),
"Root folder address doesn't match in target directory tracking info."
);
let src_tracking_info = read_tracking_info_from_disk(&src_packet.meta_dir)?;
let mut target_tracking_info = read_tracking_info_from_disk(&target_packet.meta_dir)?;
for (path, src_tracking_info) in src_tracking_info {
match target_tracking_info.remove(&path) {
None => {
bail!("Tracking info found in source is missing in target directory for file/dir: {path:?}")
}
Some(info) => {
if info != src_tracking_info {
bail!("Different tracking info kept in source and target for file/dir: {path:?}");
}
}
}
let abs_path = src_packet.files_dir.join(&path);
if abs_path.is_dir() {
assert_eq!(src_tracking_info.file_path, path,
"Incorrect path in tracking info found in source and target directories for dir: {path:?}");
assert!(matches!(src_tracking_info.metadata.content, FolderEntry::Folder(_)),
"Incorrect tracking info found in source and target directories for dir: {path:?}");
if is_empty_dir(&abs_path) {
let _ = expected_files.remove(&path).ok_or_else(|| {
eyre!(
"Unexpected tracking info found on source and target directories for dir: {path:?}"
)
})?;
}
} else {
let chunk = expected_files.remove(&path).ok_or_else(|| {
eyre!(
"Unexpected tracking info found on source and target directories for file: {path:?}"
)
})?;
if chunk.is_some() {
assert!(matches!(src_tracking_info.metadata.content, FolderEntry::File(_)),
"Tracking info found in source and target directories don't match the file: {path:?}");
} else {
assert!(matches!(src_tracking_info.metadata.content, FolderEntry::Folder(_)),
"Tracking info found in source and target directories don't match the dir: {path:?}");
}
}
}
if !target_tracking_info.is_empty() {
bail!("Tracking info found in target directory but missing in source directory: {target_tracking_info:?}");
}
if !expected_files.is_empty() {
bail!("Some expected file/dir/s are lacking their tracking info in source or target directories: {expected_files:?}");
}
Ok(())
}
fn check_files_and_dirs_match(
src_packet: &AccountPacket,
target_packet: &AccountPacket,
mut expected_files: BTreeMap<PathBuf, Option<Chunk>>,
) -> Result<()> {
let mut target_packet_files: BTreeSet<PathBuf> =
list_of_files_and_empty_dirs(target_packet);
for relative_path in list_of_files_and_empty_dirs(src_packet) {
if !target_packet_files.remove(&relative_path) {
bail!("File/dir found in source is missing in target directory: {relative_path:?}");
}
let src_path = src_packet.files_dir.join(&relative_path);
let target_path = target_packet.files_dir.join(&relative_path);
let chunk = expected_files.remove(&relative_path).ok_or_else(|| {
eyre!("Unexpected file/dir found on source and target directories: {src_path:?}")
})?;
if let Some(chunk) = chunk {
let mut src_file = File::open(&src_path)
.map_err(|err| eyre!("couldn't open source file {src_path:?}: {err:?}"))?;
let mut target_file = File::open(&target_path)
.map_err(|err| eyre!("couldn't open target file {target_path:?}: {err:?}"))?;
let mut src_content = Vec::new();
src_file
.read_to_end(&mut src_content)
.expect("couldn't read source file");
let mut target_content = Vec::new();
target_file
.read_to_end(&mut target_content)
.expect("couldn't read target file");
assert_eq!(
src_content,
chunk.value().slice(..),
"source file content doesn't match with expected"
);
assert_eq!(
target_content,
chunk.value().slice(..),
"target file content doesn't match with expected"
);
} else {
assert!(src_path.is_dir(), "source path is not a dir {src_path:?}");
assert!(
target_path.is_dir(),
"target path is not a dir {target_path:?}"
);
}
}
if !target_packet_files.is_empty() {
bail!("File/dir/s found in target directory but missing in source directory: {target_packet_files:?}");
}
if !expected_files.is_empty() {
bail!("Some expected file/dir/s were not found in source or target directories: {expected_files:?}");
}
Ok(())
}
}