use ic_agent::{Agent, identity::Secp256k1Identity};
use ic_agent::agent::http_transport::ReqwestHttpReplicaV2Transport;
use garcon::Delay;
use std::fs;
use std::path::Path;
use std::ffi::OsStr;
use sha256::digest_bytes;
use rayon::prelude::*;
use candid::{Encode, Decode, Nat, Principal};
mod databox_did;
pub use databox_did::{ClearAllResult, DeleteKeyResult, UploadResult, Avatar, PUT, Chunk, FilePut, PutResult, DataErr, FileExt, GetAssetExtKeyResult, GET, GetPlainResult, CanisterStateResult, CycleBalanceResult, AvlSMResult, GetAssetExtsResult};
const UPDATE_SIZE: usize = 1992288;
#[derive(Debug)]
pub enum UploadStatus {
Ok,
Err(DataErr),
}
#[derive(Debug)]
pub struct PutPlainFileResult {
pub file_name: String,
pub file_extension: String,
pub file_key: String,
pub upload_status: UploadStatus,
pub databox_canister_id: Principal,
pub total_size: u64,
pub chunk_number: u64,
}
pub async fn put_plain_files(pem_identity_path: &str, folder_path: &str, data_box_canister_id_text: &str,) -> Vec<PutPlainFileResult> {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let mut ans: Vec<PutPlainFileResult> = Vec::new();
let paths = fs::read_dir(&folder_path).unwrap();
for path in paths {
let file_path = path.unwrap().file_name().into_string().unwrap();
let pos: Vec<&str> = file_path.split(".").collect();
let file_name = String::from(pos[0]);
let file_extension = String::from(get_file_type(&String::from(pos[1])));
let s = folder_path.to_owned() + &file_path;
let (file_size, slice_size, data_slice) = get_file_from_source(&s);
let puts = build_put_plain_args(
file_name.clone(),
file_extension.clone(),
file_size.try_into().unwrap(),
slice_size.try_into().unwrap(),
&data_slice,
);
let file_key = match &puts[0] {
FilePut::PlainFilePut(put) => {
match put {
PUT::segment {file_extension, order, chunk_number, chunk, aes_pub_key, file_name, file_key, total_size} => {
file_key.clone()
}
_ => {"".to_string()}
}
}
_ => {"".to_string()}
};
let mut flag = false;
for put in &puts {
let _response_blob = build_agent(pem_identity_path)
.update(&canister_id, "put")
.with_arg(Encode!(&put).expect("encode piece failed"))
.call_and_wait(get_waiter())
.await
.expect("response error");
let _response = Decode!(&_response_blob, PutResult).unwrap();
match _response {
PutResult::ok(..) => {
},
PutResult::err(data_err) => {
ans.push(PutPlainFileResult {
file_name: file_name.clone(),
file_extension: file_extension.clone(),
file_key: file_key.clone(),
upload_status: UploadStatus::Err(data_err),
databox_canister_id: canister_id,
total_size: file_size.try_into().unwrap(),
chunk_number: slice_size.try_into().unwrap(),
});
flag = true;
break;
}
}
}
if !flag { ans.push(PutPlainFileResult {
file_name: file_name.clone(),
file_extension: file_extension.clone(),
file_key: file_key.clone(),
upload_status: UploadStatus::Ok,
databox_canister_id: canister_id,
total_size: file_size.try_into().unwrap(),
chunk_number: slice_size.try_into().unwrap(),
}); }
}
ans
}
pub async fn put_plain_file(pem_identity_path: &str, file_path_str: &str, data_box_canister_id_text: &str,) -> PutPlainFileResult {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let file_path = Path::new(file_path_str);
let file_name = file_path.file_stem().unwrap().to_str().unwrap().to_owned();
let file_extension = String::from(get_file_type(file_path.extension().unwrap().to_str().unwrap()));
let (file_size, slice_size, data_slice) = get_file_from_source(file_path_str);
let puts = build_put_plain_args(
file_name.clone(),
file_extension.clone(),
file_size.try_into().unwrap(),
slice_size.try_into().unwrap(),
&data_slice,
);
let file_key = match &puts[0] {
FilePut::PlainFilePut(put) => {
match put {
PUT::segment {file_extension, order, chunk_number, chunk, aes_pub_key, file_name, file_key, total_size} => {
file_key.clone()
}
_ => {"".to_string()}
}
}
_ => {"".to_string()}
};
for put in &puts {
let _response_blob = build_agent(pem_identity_path)
.update(&canister_id, "put")
.with_arg(Encode!(&put).expect("encode piece failed"))
.call_and_wait(get_waiter())
.await
.expect("response error");
let _response = Decode!(&_response_blob, PutResult).unwrap();
match _response {
PutResult::ok(..) => {
},
PutResult::err(data_err) => {
return PutPlainFileResult {
file_name: file_name.clone(),
file_extension: file_extension.clone(),
file_key: file_key.clone(),
upload_status: UploadStatus::Err(data_err),
databox_canister_id: canister_id,
total_size: file_size.try_into().unwrap(),
chunk_number: slice_size.try_into().unwrap(),
};
}
}
}
PutPlainFileResult {
file_name: file_name.clone(),
file_extension: file_extension.clone(),
file_key: file_key.clone(),
upload_status: UploadStatus::Ok,
databox_canister_id: canister_id,
total_size: file_size.try_into().unwrap(),
chunk_number: slice_size.try_into().unwrap(),
}
}
pub async fn upload_avatar(pem_identity_path: &str, data_box_canister_id_text: &str, avatar_file_path: &str) -> UploadResult {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let context = fs::read(avatar_file_path).expect("read file failed");
let file_extension = String::from(get_file_type(Path::new(avatar_file_path).extension().unwrap().to_str().unwrap()));
let upload_args = Avatar {
data: context,
data_type: file_extension,
};
let response_blob = build_agent(pem_identity_path)
.update(&canister_id, "upload")
.with_arg(Encode!(&upload_args).expect("encode piece failed"))
.call_and_wait(get_waiter())
.await
.expect("response error");
let response = Decode!(&response_blob, UploadResult).unwrap();
response
}
pub async fn delete_file(pem_identity_path: &str, data_box_canister_id_text: &str, file_key: String) -> DeleteKeyResult {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let response_blob = build_agent(pem_identity_path)
.update(&canister_id, "deletekey")
.with_arg(Encode!(&file_key).expect("encode piece failed"))
.call_and_wait(get_waiter())
.await
.expect("response error");
let response = Decode!(&response_blob, DeleteKeyResult).unwrap();
response
}
pub async fn clear_data_box(pem_identity_path: &str, data_box_canister_id_text: &str,) -> ClearAllResult {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let response_blob = build_agent(pem_identity_path)
.update(&canister_id, "clearall")
.with_arg(Encode!().expect("encode piece failed"))
.call_and_wait(get_waiter())
.await
.expect("response error");
let response = Decode!(&response_blob, ClearAllResult).unwrap();
response
}
pub async fn get_plain_file(pem_identity_path: &str, data_box_canister_id_text: &str, file_key: &str) -> Result<Vec<u8>, DataErr> {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let agent = build_agent(pem_identity_path);
let file_ext = get_file_info(pem_identity_path, data_box_canister_id_text, file_key).await.unwrap();
match file_ext {
FileExt::PlainFileExt(asset_ext) => {
let waiter = get_waiter();
let mut i = 0;
let need_query_times = asset_ext.need_query_times;
let mut ans: Vec<u8> = Vec::new();
while Nat::from(i) < need_query_times {
let arg = GET {
flag: Nat::from(i),
file_key: file_key.to_string(),
};
let response_blob = agent
.update(&canister_id, "getPlain")
.with_arg(Encode!(&arg).expect("encode piece failed"))
.call_and_wait(waiter.clone())
.await
.expect("response error");
i += 1;
let response = Decode!(&response_blob, GetPlainResult).unwrap();
match response {
GetPlainResult::ok(mut payload) => {
ans.append(&mut payload)
},
GetPlainResult::err(data_err) => {
return Err(data_err);
},
}
}
Ok(ans)
},
_ => {
Err(DataErr::FileKeyErr)
}
}
}
pub async fn get_file_info(pem_identity_path: &str, data_box_canister_id_text: &str, file_key: &str) -> Result<FileExt, DataErr> {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let response_blob = build_agent(pem_identity_path)
.query(&canister_id, "getAssetextkey")
.with_arg(Encode!(&file_key).expect("encode piece failed"))
.call()
.await
.expect("response error");
let response = Decode!(&response_blob, GetAssetExtKeyResult).unwrap();
match response {
GetAssetExtKeyResult::ok(file_ext) => Ok(file_ext),
GetAssetExtKeyResult::err(data_err) => Err(data_err),
}
}
pub async fn get_all_plain_files_info(pem_identity_path: &str, data_box_canister_id_text: &str) -> Result<Vec<FileExt>, DataErr> {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let response_blob = build_agent(pem_identity_path)
.query(&canister_id, "getAssetexts")
.with_arg(Encode!().expect("encode piece failed"))
.call()
.await
.expect("response error");
let response = Decode!(&response_blob, GetAssetExtsResult).unwrap();
match response {
GetAssetExtsResult::ok(plain_assets, ..) => {
return Ok(plain_assets);
},
GetAssetExtsResult::err(data_err) => {
return Err(data_err);
},
}
}
pub async fn get_version(pem_identity_path: &str, data_box_canister_id_text: &str,) -> Nat {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let response_blob = build_agent(pem_identity_path)
.query(&canister_id, "getVersion")
.with_arg(Encode!().expect("encode piece failed"))
.call()
.await
.expect("response error");
let response = Decode!(&response_blob, Nat).unwrap();
response
}
pub async fn get_canister_state(pem_identity_path: &str, data_box_canister_id_text: &str,) -> CanisterStateResult {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let response_blob = build_agent(pem_identity_path)
.query(&canister_id, "canisterState")
.with_arg(Encode!().expect("encode piece failed"))
.call()
.await
.expect("response error");
let response = Decode!(&response_blob, CanisterStateResult).unwrap();
response
}
pub async fn get_cycle_balance(pem_identity_path: &str, data_box_canister_id_text: &str,) -> CycleBalanceResult {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let response_blob = build_agent(pem_identity_path)
.query(&canister_id, "cycleBalance")
.with_arg(Encode!().expect("encode piece failed"))
.call()
.await
.expect("response error");
let response = Decode!(&response_blob, CycleBalanceResult).unwrap();
response
}
pub async fn get_avl_sm(pem_identity_path: &str, data_box_canister_id_text: &str,) -> AvlSMResult {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let response_blob = build_agent(pem_identity_path)
.query(&canister_id, "avlSM")
.with_arg(Encode!().expect("encode piece failed"))
.call()
.await
.expect("response error");
let response = Decode!(&response_blob, AvlSMResult).unwrap();
response
}
pub async fn get_owner(pem_identity_path: &str, data_box_canister_id_text: &str,) -> candid::Principal {
let canister_id = Principal::from_text(data_box_canister_id_text).unwrap();
let response_blob = build_agent(pem_identity_path)
.query(&canister_id, "getOwner")
.with_arg(Encode!().expect("encode piece failed"))
.call()
.await
.expect("response error");
let response = Decode!(&response_blob, candid::Principal).unwrap();
response
}
fn get_file_from_source(path: &str) -> (usize, usize, Vec<Vec<u8>>) {
let context = fs::read(path).expect("read file failed");
let size = context.len();
let slice_size = if context.len() % UPDATE_SIZE == 0 {
context.len() / UPDATE_SIZE
} else {
context.len() / UPDATE_SIZE + 1
};
let mut res = Vec::new();
for index in 0..slice_size {
if index == slice_size - 1 {
res.push(context[index * UPDATE_SIZE..context.len()].to_owned())
} else {
res.push(context[index * UPDATE_SIZE..(index + 1) * UPDATE_SIZE].to_owned())
}
}
(size, slice_size, res)
}
fn build_put_plain_args(
file_name: String,
file_extension: String,
total_size: u64,
chunk_number: u64,
data_slice: &Vec<Vec<u8>>,
) -> Vec<FilePut> {
let mut order = 0;
let mut puts: Vec<FilePut> = Vec::new();
let file_key = get_file_key(&get_file_sha256_digest(data_slice));
for data in data_slice {
puts.push(FilePut::PlainFilePut(PUT::segment {
aes_pub_key: None,
file_key: file_key.clone(),
file_name: file_name.clone(),
file_extension: file_extension.clone(),
chunk: Chunk {
data: data.clone(),
},
chunk_number: Nat::from(chunk_number),
order: Nat::from(order),
total_size: total_size.clone(),
}));
order += 1;
}
puts
}
fn get_file_sha256_digest(context: &Vec<Vec<u8>>) -> Vec<Vec<u8>> {
let mut digests = vec![vec![0x00 as u8]; context.len()];
let mut contents = digests.iter_mut().zip(context.iter()).collect::<Vec<_>>();
contents
.par_iter_mut()
.for_each(|(d, text)| **d = digest_bytes(*text).into_bytes()[..32].to_vec());
digests
}
fn get_file_key(digests: &Vec<Vec<u8>>) -> String {
let mut digest = vec![0x00 as u8; 32 * digests.len()];
let mut _index = 0;
for bytes in digests {
for byte in bytes {
digest.push(*byte);
_index += 1;
}
}
digest_bytes(&digest)
}
fn get_file_type(file_type: &str) -> &str {
if file_type == "pdf" {
return "application/pdf";
} else if file_type == "jpg" || file_type == "jpeg" {
return "image/jpg";
} else if file_type == "png" {
return "image/png";
} else if file_type == "mp4" {
return "video/mp4";
} else if file_type == "mp3" {
return "audio/mp3";
} else if file_type == "gif" {
return "image/gif";
} else if file_type == "txt" {
return "text/plain";
} else if file_type == "ppt" || file_type == "pptx" {
return "application/vnd.ms-powerpoint";
} else if file_type == "html" || file_type == "xhtml" {
return "text/html";
} else if file_type == "doc" || file_type == "docx" {
return "application/msword";
} else if file_type == "xls" {
return "application/x-xls";
} else if file_type == "apk" {
return "application/vnd.android.package-archive";
} else if file_type == "svg" {
return "text/xml";
} else if file_type == "wmv" {
return "video/x-ms-wmv";
} else {
return "application/octet-stream";
}
}
fn get_waiter() -> Delay {
let waiter = garcon::Delay::builder()
.throttle(std::time::Duration::from_millis(500))
.timeout(std::time::Duration::from_secs(60 * 5))
.build();
waiter
}
fn build_agent(pem_identity_path: &str) -> Agent {
let url = "https://ic0.app".to_string();
let identity = Secp256k1Identity::from_pem_file(String::from(pem_identity_path)).unwrap();
let transport = ReqwestHttpReplicaV2Transport::create(url).expect("transport error");
let agent = Agent::builder()
.with_transport(transport)
.with_identity(identity)
.build()
.expect("build agent error");
agent
}