use crate::agent::AGENT_AGREEMENT_FIELDNAME;
use crate::agent::Agent;
use crate::agent::DOCUMENT_AGENT_SIGNATURE_FIELDNAME;
use crate::agent::SHA256_FIELDNAME;
use crate::agent::agreement::subtract_vecs;
use crate::agent::boilerplate::BoilerPlate;
use crate::agent::loaders::{FileLoader, fetch_public_key_from_hai};
use crate::agent::security::SecurityTraits;
use crate::config::{KeyResolutionSource, get_key_resolution_order};
use crate::error::JacsError;
use crate::storage::StorageDocumentTraits;
use base64::{Engine as _, engine::general_purpose::STANDARD};
use crate::crypt::hash::{hash_bytes, hash_string};
use crate::schema::utils::ValueExt;
use crate::time_utils;
use chrono::Local;
use difference::{Changeset, Difference};
use flate2::read::GzDecoder;
use regex::Regex;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use serde_json::json;
use std::collections::HashMap;
use std::error::Error;
use std::fmt;
use std::io::Read;
use std::path::Path;
use tracing::{debug, error, info, warn};
use uuid::Uuid;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct JACSDocument {
pub id: String,
pub version: String,
pub value: Value,
pub jacs_type: String,
}
pub const EDITABLE_JACS_DOCS: &[&str] = &["config", "artifact"];
pub const DEFAULT_JACS_DOC_LEVEL: &str = "raw";
impl JACSDocument {
pub fn getkey(&self) -> String {
format!("{}:{}", &self.id, &self.version)
}
pub fn getvalue(&self) -> &Value {
&self.value
}
pub fn getschema(&self) -> Result<String, Box<dyn Error>> {
let schemafield = "$schema";
if let Some(schema) = self.value.get(schemafield)
&& let Some(schema_str) = schema.as_str()
{
return Ok(schema_str.to_string());
}
Err("Schema extraction failed: no schema in doc or schema is not a string".into())
}
pub fn getshortschema(&self) -> Result<String, Box<dyn Error>> {
let longschema = self.getschema()?;
let re = Regex::new(r"/([^/]+)\.schema\.json$")
.map_err(|e| format!("Invalid regex pattern: {}", e))?;
if let Some(caps) = re.captures(&longschema)
&& let Some(matched) = caps.get(1)
{
return Ok(matched.as_str().to_string());
}
Err("Failed to extract schema name from URL".into())
}
pub fn agreement_unsigned_agents(
&self,
agreement_fieldname: Option<String>,
) -> Result<Vec<String>, Box<dyn Error>> {
let all_requested_agents = self.agreement_requested_agents(agreement_fieldname.clone())?;
let all_agreement_signed_agents = self.agreement_signed_agents(agreement_fieldname)?;
let normalized_requested_agents: Vec<String> = all_requested_agents
.iter()
.map(|id| {
if let Some(pos) = id.find(':') {
id[0..pos].to_string()
} else {
id.clone()
}
})
.collect();
let normalized_signed_agents: Vec<String> = all_agreement_signed_agents
.iter()
.map(|id| {
if let Some(pos) = id.find(':') {
id[0..pos].to_string()
} else {
id.clone()
}
})
.collect();
Ok(subtract_vecs(
&normalized_requested_agents,
&normalized_signed_agents,
))
}
pub fn agreement_requested_agents(
&self,
agreement_fieldname: Option<String>,
) -> Result<Vec<String>, Box<dyn Error>> {
let agreement_fieldname_key = match agreement_fieldname {
Some(key) => key,
_ => AGENT_AGREEMENT_FIELDNAME.to_string(),
};
let value: &serde_json::Value = &self.value;
if let Some(jacs_agreement) = value.get(agreement_fieldname_key)
&& let Some(agents) = jacs_agreement.get("agentIDs")
&& let Some(agents_array) = agents.as_array()
{
return Ok(agents_array
.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect());
}
Err("Agreement lookup failed: no agreement or agents in agreement".into())
}
pub fn signing_agent(&self) -> Result<String, Box<dyn Error>> {
let value: &serde_json::Value = &self.value;
if let Some(jacs_signature) = value.get(DOCUMENT_AGENT_SIGNATURE_FIELDNAME) {
return Ok(jacs_signature
.get("agentID")
.ok_or_else(|| "Missing 'agentID' in signature".to_string())?
.as_str() .ok_or_else(|| "'agentID' in signature is not a string".to_string())?
.to_string());
}
Err("Agreement lookup failed: no agreement or signatures in agreement".into())
}
pub fn signing_agent_str(&self) -> Result<&str, Box<dyn Error>> {
let value: &serde_json::Value = &self.value;
if let Some(jacs_signature) = value.get(DOCUMENT_AGENT_SIGNATURE_FIELDNAME) {
return Ok(jacs_signature
.get("agentID")
.ok_or_else(|| "Missing 'agentID' in signature".to_string())?
.as_str()
.ok_or_else(|| "'agentID' in signature is not a string".to_string())?);
}
Err("Agreement lookup failed: no agreement or signatures in agreement".into())
}
pub fn agreement_signed_agents(
&self,
agreement_fieldname: Option<String>,
) -> Result<Vec<String>, Box<dyn Error>> {
let agreement_fieldname_key = match agreement_fieldname {
Some(key) => key,
_ => AGENT_AGREEMENT_FIELDNAME.to_string(),
};
let value: &serde_json::Value = &self.value;
if let Some(jacs_agreement) = value.get(agreement_fieldname_key)
&& let Some(signatures) = jacs_agreement.get("signatures")
&& let Some(signatures_array) = signatures.as_array()
{
let mut signed_agents: Vec<String> = Vec::<String>::new();
for signature in signatures_array {
let agentid: String = signature["agentID"]
.as_str()
.ok_or_else(|| {
format!("'agentID' in signature {:?} is not a string", signature)
})?
.to_string();
signed_agents.push(agentid);
}
return Ok(signed_agents);
}
Err("Agreement lookup failed: no agreement or signatures in agreement".into())
}
}
impl fmt::Display for JACSDocument {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let json_string = serde_json::to_string_pretty(&self.value).map_err(|_| fmt::Error)?;
write!(f, "{}", json_string)
}
}
pub trait DocumentTraits {
fn verify_document_signature(
&mut self,
document_key: &str,
signature_key_from: Option<&str>,
fields: Option<&[String]>,
public_key: Option<Vec<u8>>,
public_key_enc_type: Option<String>,
) -> Result<(), Box<dyn Error>>;
fn archive_old_version(
&mut self,
original_document: &JACSDocument,
) -> Result<(), Box<dyn Error>>;
fn validate_document_with_custom_schema(
&self,
schema_path: &str,
json: &Value,
) -> Result<(), String>;
fn create_document_and_load(
&mut self,
json: &str,
attachments: Option<Vec<String>>,
embed: Option<bool>,
) -> Result<JACSDocument, Box<dyn std::error::Error + 'static>>;
fn load_all(
&mut self,
store: bool,
load_only_recent: bool,
) -> Result<Vec<JACSDocument>, Vec<Box<dyn Error>>>;
fn load_document(&mut self, document_string: &str) -> Result<JACSDocument, Box<dyn Error>>;
fn remove_document(&mut self, document_key: &str) -> Result<JACSDocument, Box<dyn Error>>;
fn copy_document(&mut self, document_key: &str) -> Result<JACSDocument, Box<dyn Error>>;
fn store_jacs_document(&mut self, value: &Value) -> Result<JACSDocument, Box<dyn Error>>;
fn hash_doc(&self, doc: &Value) -> Result<String, Box<dyn Error>>;
fn get_document(&self, document_key: &str) -> Result<JACSDocument, Box<dyn Error>>;
fn get_document_keys(&mut self) -> Vec<String>;
fn get_document_signature_date(&mut self, document_key: &str)
-> Result<String, Box<dyn Error>>;
fn get_document_signature_agent_id(
&mut self,
document_key: &str,
) -> Result<String, Box<dyn Error>>;
fn verify_external_document_signature(
&mut self,
document_key: &str,
) -> Result<(), Box<dyn Error>>;
fn diff_json_strings(
&self,
json1: &str,
json2: &str,
) -> Result<(String, String), Box<dyn Error>>;
fn save_document(
&mut self,
document_key: &str,
output_filename: Option<String>,
export_embedded: Option<bool>,
extract_only: Option<bool>,
) -> Result<(), Box<dyn Error>>;
fn update_document(
&mut self,
document_key: &str,
new_document_string: &str,
attachments: Option<Vec<String>>,
embed: Option<bool>,
) -> Result<JACSDocument, Box<dyn Error>>;
fn create_file_json(
&mut self,
filepath: &str,
embed: bool,
) -> Result<serde_json::Value, Box<dyn Error>>;
fn verify_document_files(&mut self, document: &Value) -> Result<(), Box<dyn Error>>;
fn parse_attachement_arg(&mut self, attachments: Option<&str>) -> Option<Vec<String>>;
fn diff_strings(&self, string_one: &str, string_two: &str) -> (String, String, String);
fn create_documents_batch(
&mut self,
documents: &[&str],
) -> Result<Vec<JACSDocument>, Box<dyn std::error::Error + 'static>>;
}
impl DocumentTraits for Agent {
fn validate_document_with_custom_schema(
&self,
schema_path: &str,
json: &Value,
) -> Result<(), String> {
let schemas = self
.document_schemas
.lock()
.map_err(|e| format!("Failed to acquire schema lock: {}", e))?;
let validator = schemas.get(schema_path).ok_or_else(|| {
format!(
"Validator not found for schema path: '{}'. Ensure the schema is registered.",
schema_path
)
})?;
let validation_result = validator.validate(json);
validation_result.map_err(|error| {
let doc_id = json.get("jacsId").and_then(|v| v.as_str()).unwrap_or("<unknown>");
let doc_type = json.get("jacsType").and_then(|v| v.as_str()).unwrap_or("<unknown>");
format!(
"Custom schema validation failed for document '{}' (type: '{}') against schema '{}': {}",
doc_id, doc_type, schema_path, error
)
})?;
Ok(())
}
fn create_file_json(
&mut self,
filepath: &str,
embed: bool,
) -> Result<serde_json::Value, Box<dyn Error>> {
let base64_contents = self.fs_get_document_content(filepath.to_string())?;
let mime_type = mime_guess::from_path(filepath)
.first_or_octet_stream()
.to_string();
let sha256_hash = hash_bytes(base64_contents.as_bytes());
let file_json = json!({
"mimetype": mime_type,
"path": filepath,
"embed": embed,
"sha256": sha256_hash
});
let file_json = if embed {
match file_json.as_object() {
Some(obj) => obj
.clone()
.into_iter()
.chain(vec![("contents".to_string(), json!(base64_contents))])
.collect(),
None => file_json, }
} else {
file_json
};
Ok(file_json)
}
fn verify_document_files(&mut self, document: &Value) -> Result<(), Box<dyn Error>> {
if let Some(files_array) = document.get("jacsFiles").and_then(|files| files.as_array()) {
for file_obj in files_array {
let file_path = file_obj
.get("path")
.and_then(|path| path.as_str())
.ok_or("Missing file path")?;
let expected_hash = file_obj
.get("sha256")
.and_then(|hash| hash.as_str())
.ok_or("Missing SHA256 hash")?;
let base64_contents = self.fs_get_document_content(file_path.to_string())?;
let actual_hash = hash_bytes(base64_contents.as_bytes());
if actual_hash != expected_hash {
return Err(JacsError::HashMismatch {
expected: expected_hash.to_string(),
got: actual_hash,
}
.into());
}
}
}
Ok(())
}
fn create_document_and_load(
&mut self,
json: &str,
attachments: Option<Vec<String>>,
embed: Option<bool>,
) -> Result<JACSDocument, Box<dyn std::error::Error + 'static>> {
let mut instance = self.schema.create(json)?;
if let Some(attachment_list) = attachments {
let mut files_array: Vec<Value> = Vec::new();
for attachment_string in &attachment_list {
if let Some(file_paths) = self.parse_attachement_arg(Some(attachment_string)) {
for file in &file_paths {
let final_embed = embed.unwrap_or(false);
let file_json = self.create_file_json(file, final_embed)?;
files_array.push(file_json);
}
}
}
let instance_map = instance.as_object_mut()
.ok_or("Invalid document structure: expected a JSON object but got a different type. \
Ensure your document JSON is a valid object (starts with '{' and ends with '}').")?;
instance_map.insert("jacsFiles".to_string(), Value::Array(files_array));
}
instance[DOCUMENT_AGENT_SIGNATURE_FIELDNAME] =
self.signing_procedure(&instance, None, DOCUMENT_AGENT_SIGNATURE_FIELDNAME)?;
let document_hash = self.hash_doc(&instance)?;
instance[SHA256_FIELDNAME] = json!(format!("{}", document_hash));
self.store_jacs_document(&instance)
}
fn load_document(&mut self, document_string: &str) -> Result<JACSDocument, Box<dyn Error>> {
match &self.validate_header(document_string) {
Ok(value) => self.store_jacs_document(value),
Err(e) => {
error!("ERROR document ERROR {}", e);
Err(e.to_string().into())
}
}
}
fn load_all(
&mut self,
store: bool,
load_only_recent: bool,
) -> Result<Vec<JACSDocument>, Vec<Box<dyn Error>>> {
let mut errors: Vec<Box<dyn Error>> = Vec::new();
let mut documents: Vec<JACSDocument> = Vec::new();
let mut doc_strings = self.fs_docs_load_all()?;
let mut most_recent_docs = HashMap::new();
if load_only_recent {
for doc_string in &doc_strings {
if let Ok(doc) = serde_json::from_str::<Value>(doc_string)
&& let (Some(jacs_id), Some(jacs_version_date)) =
(doc["jacsId"].as_str(), doc["jacsVersionDate"].as_str())
{
let timestamp = time_utils::parse_rfc3339_to_timestamp(jacs_version_date)
.unwrap_or_else(|e| {
println!("Failed to parse timestamp: {}", e);
time_utils::now_timestamp()
});
let entry = most_recent_docs
.entry(jacs_id.to_string())
.or_insert_with(|| (timestamp, doc_string));
if timestamp > entry.0 {
*entry = (timestamp, doc_string);
}
}
}
doc_strings = most_recent_docs
.values()
.map(|&(_, doc)| doc.clone())
.collect();
}
for doc_string in doc_strings {
match self.validate_header(&doc_string) {
Ok(doc) => {
let document = self.store_jacs_document(&doc);
match document {
Ok(document) => {
if store {
documents.push(document);
}
}
Err(e) => {
errors.push(e);
}
}
}
Err(e) => {
errors.push(e);
}
}
}
if !errors.is_empty() {
error!("errors loading documents {:?}", errors);
}
Ok(documents)
}
fn hash_doc(&self, doc: &Value) -> Result<String, Box<dyn Error>> {
let mut doc_copy = doc.clone();
doc_copy
.as_object_mut()
.map(|obj| obj.remove(SHA256_FIELDNAME));
let doc_string = serde_json::to_string(&doc_copy)?;
Ok(hash_string(&doc_string))
}
fn store_jacs_document(&mut self, value: &Value) -> Result<JACSDocument, Box<dyn Error>> {
let id = value
.get_str("jacsId")
.ok_or_else(|| {
"Invalid document: missing required field 'jacsId'. \
Documents must have jacsId, jacsVersion, and jacsType fields. \
Use create_document_and_load() to create a properly structured document."
.to_string()
})?
.to_string();
let version = value
.get_str("jacsVersion")
.ok_or_else(|| {
"Invalid document: missing required field 'jacsVersion'. \
Documents must have jacsId, jacsVersion, and jacsType fields. \
Use create_document_and_load() to create a properly structured document."
.to_string()
})?
.to_string();
let jacs_type = value
.get_str("jacsType")
.ok_or_else(|| {
"Invalid document: missing required field 'jacsType'. \
Documents must have jacsId, jacsVersion, and jacsType fields. \
Use create_document_and_load() to create a properly structured document."
.to_string()
})?
.to_string();
let doc = JACSDocument {
id,
version,
value: value.clone(), jacs_type,
};
self.storage.store_document(&doc)?;
Ok(doc)
}
fn get_document(&self, document_key: &str) -> Result<JACSDocument, Box<dyn Error>> {
self.storage.get_document(document_key)
}
fn remove_document(&mut self, document_key: &str) -> Result<JACSDocument, Box<dyn Error>> {
self.storage.remove_document(document_key)
}
fn get_document_keys(&mut self) -> Vec<String> {
self.storage
.list_documents("")
.unwrap_or_else(|_| Vec::new())
}
fn update_document(
&mut self,
document_key: &str,
new_document_string: &str,
attachments: Option<Vec<String>>,
embed: Option<bool>,
) -> Result<JACSDocument, Box<dyn Error>> {
let mut new_document: Value = self.schema.validate_header(new_document_string)?;
let original_document = self.get_document(document_key)?;
let value = original_document.value.clone();
let jacs_level = new_document
.get_str("jacsLevel")
.unwrap_or(DEFAULT_JACS_DOC_LEVEL.to_string());
if !EDITABLE_JACS_DOCS.contains(&jacs_level.as_str()) {
return Err(JacsError::DocumentError(format!(
"JACS docs of type {} are not editable",
jacs_level
))
.into());
};
let mut files_array: Vec<Value> = new_document
.get("jacsFiles")
.and_then(|files| files.as_array())
.cloned()
.unwrap_or_else(Vec::new);
self.verify_document_files(&new_document)?;
if let Some(attachment_list) = attachments {
for attachment_path in attachment_list {
let final_embed = embed.unwrap_or(false);
let file_json = self.create_file_json(&attachment_path, final_embed)?;
files_array.push(file_json);
}
}
if let Some(instance_map) = new_document.as_object_mut() {
instance_map.insert("jacsFiles".to_string(), Value::Array(files_array));
}
let orginal_id = &value.get_str("jacsId");
let orginal_version = &value.get_str("jacsVersion");
let new_doc_orginal_id = &new_document.get_str("jacsId");
let new_doc_orginal_version = &new_document.get_str("jacsVersion");
if (orginal_id != new_doc_orginal_id) || (orginal_version != new_doc_orginal_version) {
return Err(JacsError::DocumentMalformed {
field: "jacsId/jacsVersion".to_string(),
reason: format!(
"The id/versions do not match found for key: {}. {:?}{:?}",
document_key, new_doc_orginal_id, new_doc_orginal_version
),
}
.into());
}
let new_version = Uuid::new_v4().to_string();
let last_version = &value["jacsVersion"];
let versioncreated = time_utils::now_rfc3339();
new_document["jacsPreviousVersion"] = last_version.clone();
new_document["jacsVersion"] = json!(format!("{}", new_version));
new_document["jacsVersionDate"] = json!(format!("{}", versioncreated));
new_document[DOCUMENT_AGENT_SIGNATURE_FIELDNAME] =
self.signing_procedure(&new_document, None, DOCUMENT_AGENT_SIGNATURE_FIELDNAME)?;
let document_hash = self.hash_doc(&new_document)?;
new_document[SHA256_FIELDNAME] = json!(format!("{}", document_hash));
self.store_jacs_document(&new_document)
}
fn archive_old_version(
&mut self,
original_document: &JACSDocument,
) -> Result<(), Box<dyn Error>> {
let lookup_key = original_document.getkey();
self.storage.remove_document(&lookup_key)?;
Ok(())
}
fn copy_document(&mut self, document_key: &str) -> Result<JACSDocument, Box<dyn Error>> {
let original_document = self.get_document(document_key)?;
let mut value = original_document.value;
let new_version = Uuid::new_v4().to_string();
let last_version = &value["jacsVersion"];
let versioncreated = time_utils::now_rfc3339();
value["jacsPreviousVersion"] = last_version.clone();
value["jacsVersion"] = json!(format!("{}", new_version));
value["jacsVersionDate"] = json!(format!("{}", versioncreated));
value[DOCUMENT_AGENT_SIGNATURE_FIELDNAME] =
self.signing_procedure(&value, None, DOCUMENT_AGENT_SIGNATURE_FIELDNAME)?;
let document_hash = self.hash_doc(&value)?;
value[SHA256_FIELDNAME] = json!(format!("{}", document_hash));
self.store_jacs_document(&value)
}
fn save_document(
&mut self,
document_key: &str,
output_filename: Option<String>,
export_embedded: Option<bool>,
extract_only: Option<bool>,
) -> Result<(), Box<dyn Error>> {
let original_document = self.get_document(document_key)?;
let document_string: String = serde_json::to_string_pretty(&original_document.value)?;
let is_extract_only = extract_only.unwrap_or_default();
if !is_extract_only {
let _ = self.fs_document_save(document_key, &document_string, output_filename)?;
}
let do_export = export_embedded.unwrap_or_default();
if do_export && let Some(jacs_files) = original_document.value["jacsFiles"].as_array() {
if let Err(e) = self.check_data_directory() {
error!("Failed to check data directory: {}", e);
}
for item in jacs_files {
if item["embed"].as_bool().unwrap_or(false) {
let contents = item["contents"].as_str().ok_or("Contents not found")?;
let path = item["path"].as_str().ok_or("Path not found")?;
let decoded_contents = STANDARD.decode(contents)?;
let mut gz_decoder = GzDecoder::new(std::io::Cursor::new(decoded_contents));
let mut inflated_contents = Vec::new();
gz_decoder.read_to_end(&mut inflated_contents)?;
let storage = self.storage.clone();
if storage.file_exists(path, None)? {
let backup_path =
format!("{}.{}.bkp", path, Local::now().format("%Y%m%d_%H%M%S"));
storage.rename_file(path, &backup_path)?;
}
storage.save_file(path, &inflated_contents)?;
#[cfg(not(target_arch = "wasm32"))]
if !self.use_filesystem() {
self.mark_file_not_executable(Path::new(path))?;
}
}
}
}
Ok(())
}
fn verify_external_document_signature(
&mut self,
document_key: &str,
) -> Result<(), Box<dyn Error>> {
let document = self.get_document(document_key)?;
let json_value = document.getvalue();
let signature_key_from = &DOCUMENT_AGENT_SIGNATURE_FIELDNAME.to_string();
let public_key_hash: String = json_value[signature_key_from]["publicKeyHash"]
.as_str()
.unwrap_or("")
.trim_matches('"')
.to_string();
let agent_id: String = json_value[signature_key_from]["agentID"]
.as_str()
.unwrap_or("")
.trim_matches('"')
.to_string();
let agent_version: String = json_value[signature_key_from]["agentVersion"]
.as_str()
.unwrap_or("")
.trim_matches('"')
.to_string();
let resolution_order = get_key_resolution_order();
info!(
"Verifying external document signature for {} using resolution order: {:?}",
document_key, resolution_order
);
let mut last_error: Option<Box<dyn Error>> = None;
let mut public_key: Option<Vec<u8>> = None;
let mut public_key_enc_type: Option<String> = None;
for source in &resolution_order {
debug!("Trying key resolution source: {:?}", source);
match source {
KeyResolutionSource::Local => match self.fs_load_public_key(&public_key_hash) {
Ok(key) => match self.fs_load_public_key_type(&public_key_hash) {
Ok(enc_type) => {
info!(
"Found public key locally for hash: {}...",
&public_key_hash[..public_key_hash.len().min(16)]
);
public_key = Some(key);
public_key_enc_type = Some(enc_type);
break;
}
Err(e) => {
debug!("Local key found but enc_type missing: {}", e);
last_error = Some(e);
}
},
Err(e) => {
debug!("Local key not found: {}", e);
last_error = Some(e);
}
},
KeyResolutionSource::Dns => {
debug!(
"DNS source configured but DNS verifies key hashes, not fetches keys. \
Skipping to next source."
);
continue;
}
KeyResolutionSource::Hai => {
if agent_id.is_empty() {
debug!("Cannot fetch from HAI: agent_id is empty");
continue;
}
let version = if agent_version.is_empty() {
"latest".to_string()
} else {
agent_version.clone()
};
match fetch_public_key_from_hai(&agent_id, &version) {
Ok(key_info) => {
info!(
"Found public key from HAI for agent {} version {}: algorithm={}",
agent_id, version, key_info.algorithm
);
if !key_info.hash.is_empty() && key_info.hash != public_key_hash {
warn!(
"HAI key hash mismatch: expected {}..., got {}...",
&public_key_hash[..public_key_hash.len().min(16)],
&key_info.hash[..key_info.hash.len().min(16)]
);
last_error = Some(format!(
"HAI key hash mismatch for agent {}: document expects {}..., HAI returned {}...",
agent_id,
&public_key_hash[..public_key_hash.len().min(16)],
&key_info.hash[..key_info.hash.len().min(16)]
).into());
continue;
}
public_key = Some(key_info.public_key.clone());
public_key_enc_type = Some(key_info.algorithm.clone());
if let Err(e) = self.fs_save_remote_public_key(
&public_key_hash,
&key_info.public_key,
key_info.algorithm.as_bytes(),
) {
debug!("Failed to cache HAI key locally (non-fatal): {}", e);
}
break;
}
Err(e) => {
debug!("HAI key fetch failed: {}", e);
last_error = Some(format!("HAI key service: {}", e).into());
}
}
}
}
}
let (final_key, final_enc_type) = match (public_key, public_key_enc_type) {
(Some(k), Some(e)) => (k, e),
_ => {
let err_msg = format!(
"Could not resolve public key for hash '{}...' from any configured source ({:?}). Last error: {}",
&public_key_hash[..public_key_hash.len().min(16)],
resolution_order,
last_error
.map(|e| e.to_string())
.unwrap_or_else(|| "unknown".to_string())
);
error!("{}", err_msg);
return Err(err_msg.into());
}
};
self.verify_document_signature(
document_key,
Some(signature_key_from),
None,
Some(final_key),
Some(final_enc_type),
)
}
fn get_document_signature_agent_id(
&mut self,
document_key: &str,
) -> Result<String, Box<dyn Error>> {
let document = self.get_document(document_key)?;
let json_value = document.getvalue();
let signature_key_from = &DOCUMENT_AGENT_SIGNATURE_FIELDNAME.to_string();
let angent_id: String = json_value[signature_key_from]["agentID"]
.as_str()
.unwrap_or("")
.trim_matches('"')
.to_string();
let angent_version: String = json_value[signature_key_from]["agentVersion"]
.as_str()
.unwrap_or("")
.trim_matches('"')
.to_string();
let agent_id_version = format!("{}:{}", angent_id, angent_version);
Ok(agent_id_version)
}
fn get_document_signature_date(
&mut self,
document_key: &str,
) -> Result<String, Box<dyn Error>> {
let document = self.get_document(document_key)?;
let json_value = document.getvalue();
let signature_key_from = &DOCUMENT_AGENT_SIGNATURE_FIELDNAME.to_string();
let date: String = json_value[signature_key_from]["date"]
.as_str()
.unwrap_or("")
.trim_matches('"')
.to_string();
Ok(date)
}
fn verify_document_signature(
&mut self,
document_key: &str,
signature_key_from: Option<&str>,
fields: Option<&[String]>,
public_key: Option<Vec<u8>>,
public_key_enc_type: Option<String>,
) -> Result<(), Box<dyn Error>> {
let document = self.get_document(document_key)?;
let document_value = document.getvalue();
self.verify_document_files(document_value)?;
let used_public_key = match public_key {
Some(public_key) => public_key,
None => self.get_public_key()?,
};
let binding = &DOCUMENT_AGENT_SIGNATURE_FIELDNAME.to_string();
let signature_key_from_final = match signature_key_from {
Some(signature_key_from) => signature_key_from,
None => binding,
};
let result = self.signature_verification_procedure(
document_value,
fields,
signature_key_from_final,
used_public_key,
public_key_enc_type,
None,
None,
);
match result {
Ok(_) => Ok(()),
Err(err) => {
let error_message =
format!("Signatures not verifiable {} {:?}! ", document_key, err);
error!("{}", error_message);
Err(error_message.into())
}
}
}
fn parse_attachement_arg(&mut self, attachments: Option<&str>) -> Option<Vec<String>> {
match attachments {
Some(path_str) => {
let storage = self.storage.clone();
match storage.list(path_str, None) {
Ok(file_paths) => {
if !file_paths.is_empty() {
Some(file_paths)
} else {
match storage.file_exists(path_str, None) {
Ok(true) => Some(vec![path_str.to_string()]),
_ => {
eprintln!("Invalid path: {}", path_str);
None
}
}
}
}
Err(_) => {
eprintln!("Failed to read path: {}", path_str);
None
}
}
}
None => None,
}
}
fn diff_json_strings(
&self,
json1: &str,
json2: &str,
) -> Result<(String, String), Box<dyn Error>> {
let changeset = Changeset::new(json1, json2, "\n");
let mut same = String::new();
let mut diffs = String::new();
for diff in changeset.diffs {
match diff {
Difference::Same(ref x) => same.push_str(format!(" {}", x).as_str()),
Difference::Add(ref x) => diffs.push_str(format!("+{}", x).as_str()),
Difference::Rem(ref x) => diffs.push_str(format!("-{}", x).as_str()),
}
}
Ok((same, diffs))
}
fn diff_strings(&self, string_one: &str, string_two: &str) -> (String, String, String) {
let changeset = Changeset::new(string_one, string_two, " ");
let mut same = String::new();
let mut add = String::new();
let mut rem = String::new();
for diff in &changeset.diffs {
match diff {
Difference::Same(x) => same.push_str(x),
Difference::Add(x) => add.push_str(x),
Difference::Rem(x) => rem.push_str(x),
}
}
(same, add, rem)
}
fn create_documents_batch(
&mut self,
documents: &[&str],
) -> Result<Vec<JACSDocument>, Box<dyn std::error::Error + 'static>> {
use tracing::info;
if documents.is_empty() {
return Ok(Vec::new());
}
info!(batch_size = documents.len(), "Creating batch of documents");
let mut results = Vec::with_capacity(documents.len());
for (index, json) in documents.iter().enumerate() {
let mut instance = self.schema.create(json)?;
instance[DOCUMENT_AGENT_SIGNATURE_FIELDNAME] =
self.signing_procedure(&instance, None, DOCUMENT_AGENT_SIGNATURE_FIELDNAME)?;
let document_hash = self.hash_doc(&instance)?;
instance[SHA256_FIELDNAME] = json!(format!("{}", document_hash));
let doc = self.store_jacs_document(&instance)?;
results.push(doc);
tracing::trace!(batch_index = index, "Batch document created");
}
info!(
batch_size = documents.len(),
"Batch document creation completed successfully"
);
Ok(results)
}
}