use cdx_core::{Document, DocumentState, Result};
#[test]
fn test_create_save_reopen() -> Result<()> {
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("test.cdx");
let doc = Document::builder()
.title("Integration Test Document")
.creator("Test Runner")
.add_heading(1, "Test Heading")
.add_paragraph("This is a test paragraph.")
.add_heading(2, "Subheading")
.add_paragraph("More content here.")
.build()?;
assert_eq!(doc.state(), DocumentState::Draft);
doc.save(&file_path)?;
let reopened = Document::open(&file_path)?;
assert_eq!(
reopened.dublin_core().terms.title,
doc.dublin_core().terms.title
);
assert_eq!(
reopened.dublin_core().terms.creator,
doc.dublin_core().terms.creator
);
assert_eq!(reopened.content().blocks.len(), doc.content().blocks.len());
Ok(())
}
#[test]
fn test_verification_fresh_document() -> Result<()> {
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("verify.cdx");
let doc = Document::builder()
.title("Verification Test")
.creator("Test")
.add_paragraph("Content to verify.")
.build()?;
doc.save(&file_path)?;
let reopened = Document::open(&file_path)?;
let report = reopened.verify()?;
assert!(
report.is_valid(),
"Fresh document should verify: {:?}",
report.errors
);
Ok(())
}
#[test]
fn test_document_id_deterministic() -> Result<()> {
let doc1 = Document::builder()
.title("Same Title")
.creator("Same Creator")
.add_paragraph("Same content.")
.build()?;
let doc2 = Document::builder()
.title("Same Title")
.creator("Same Creator")
.add_paragraph("Same content.")
.build()?;
let id1 = doc1.compute_id()?;
let id2 = doc2.compute_id()?;
assert_eq!(id1, id2, "Same content should produce same ID");
Ok(())
}
#[test]
fn test_document_id_changes_with_content() -> Result<()> {
let doc1 = Document::builder()
.title("Title")
.creator("Creator")
.add_paragraph("Content A")
.build()?;
let doc2 = Document::builder()
.title("Title")
.creator("Creator")
.add_paragraph("Content B")
.build()?;
let id1 = doc1.compute_id()?;
let id2 = doc2.compute_id()?;
assert_ne!(id1, id2, "Different content should produce different IDs");
Ok(())
}
#[test]
fn test_open_from_bytes() -> Result<()> {
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("bytes.cdx");
let doc = Document::builder()
.title("Bytes Test")
.creator("Test")
.add_paragraph("In-memory test.")
.build()?;
doc.save(&file_path)?;
let bytes = std::fs::read(&file_path)?;
let from_bytes = Document::from_bytes(bytes)?;
assert_eq!(
from_bytes.dublin_core().terms.title,
doc.dublin_core().terms.title
);
Ok(())
}
#[test]
fn test_multiple_block_types() -> Result<()> {
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("blocks.cdx");
let doc = Document::builder()
.title("Block Types Test")
.creator("Test")
.add_heading(1, "Main Title")
.add_paragraph("Introduction paragraph.")
.add_heading(2, "Section 1")
.add_paragraph("First section content.")
.add_heading(2, "Section 2")
.add_paragraph("Second section content.")
.add_heading(3, "Subsection 2.1")
.add_paragraph("Subsection content.")
.build()?;
doc.save(&file_path)?;
let reopened = Document::open(&file_path)?;
assert_eq!(reopened.content().blocks.len(), 8);
Ok(())
}
#[test]
fn test_empty_content() -> Result<()> {
let doc = Document::builder()
.title("Empty Document")
.creator("Test")
.build()?;
assert!(doc.content().is_empty());
Ok(())
}
#[cfg(feature = "signatures")]
mod signature_tests {
use super::*;
use cdx_core::security::{EcdsaSigner, EcdsaVerifier, Signer, SignerInfo, Verifier};
#[test]
fn test_sign_and_verify_integration() -> Result<()> {
let doc = Document::builder()
.title("Signed Document")
.creator("Signer")
.add_paragraph("Content to be signed.")
.build()?;
let doc_id = doc.compute_id()?;
let signer_info = SignerInfo::new("Test Signer").with_email("test@example.com");
let (signer, public_key_pem) = EcdsaSigner::generate(signer_info)?;
let signature = signer.sign(&doc_id)?;
let verifier = EcdsaVerifier::from_pem(&public_key_pem)?;
let result = verifier.verify(&doc_id, &signature)?;
assert!(result.is_valid(), "Signature should verify");
Ok(())
}
#[test]
fn test_signature_fails_for_different_document() -> Result<()> {
let doc1 = Document::builder()
.title("Document 1")
.creator("Test")
.add_paragraph("Original content.")
.build()?;
let doc2 = Document::builder()
.title("Document 2")
.creator("Test")
.add_paragraph("Different content.")
.build()?;
let id1 = doc1.compute_id()?;
let id2 = doc2.compute_id()?;
let signer_info = SignerInfo::new("Signer");
let (signer, public_key_pem) = EcdsaSigner::generate(signer_info)?;
let signature = signer.sign(&id1)?;
let verifier = EcdsaVerifier::from_pem(&public_key_pem)?;
let result = verifier.verify(&id2, &signature)?;
assert!(
!result.is_valid(),
"Signature should not verify for different document"
);
Ok(())
}
#[test]
fn test_signature_persistence() -> Result<()> {
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("signed.cdx");
let mut doc = Document::builder()
.title("Document with Signature")
.creator("Test")
.add_paragraph("Content to be signed.")
.build()?;
assert!(
!doc.has_signatures(),
"New document should have no signatures"
);
let doc_id = doc.compute_id()?;
let signer_info = SignerInfo::new("Test Signer").with_email("test@example.com");
let (signer, public_key_pem) = EcdsaSigner::generate(signer_info)?;
let signature = signer.sign(&doc_id)?;
let signature_id = signature.id.clone();
doc.add_signature(signature)?;
assert!(
doc.has_signatures(),
"Document should have signatures after adding"
);
assert_eq!(
doc.signatures().len(),
1,
"Should have exactly one signature"
);
doc.save(&file_path)?;
let reopened = Document::open(&file_path)?;
assert!(
reopened.has_signatures(),
"Reopened document should have signatures"
);
assert_eq!(
reopened.signatures().len(),
1,
"Reopened document should have exactly one signature"
);
let persisted_sig = &reopened.signatures()[0];
assert_eq!(persisted_sig.id, signature_id, "Signature ID should match");
let reopened_doc_id = reopened.compute_id()?;
let verifier = EcdsaVerifier::from_pem(&public_key_pem)?;
let result = verifier.verify(&reopened_doc_id, persisted_sig)?;
assert!(result.is_valid(), "Persisted signature should verify");
Ok(())
}
#[test]
fn test_multiple_signatures() -> Result<()> {
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("multi_signed.cdx");
let mut doc = Document::builder()
.title("Multi-Signed Document")
.creator("Test")
.add_paragraph("Content to be multiply signed.")
.build()?;
let doc_id = doc.compute_id()?;
let signer_info1 = SignerInfo::new("Signer One");
let (signer1, _public_key1) = EcdsaSigner::generate(signer_info1)?;
let signature1 = signer1.sign(&doc_id)?;
let signer_info2 = SignerInfo::new("Signer Two");
let (signer2, _public_key2) = EcdsaSigner::generate(signer_info2)?;
let signature2 = signer2.sign(&doc_id)?;
doc.add_signature(signature1)?;
doc.add_signature(signature2)?;
assert_eq!(doc.signatures().len(), 2, "Should have two signatures");
doc.save(&file_path)?;
let reopened = Document::open(&file_path)?;
assert_eq!(
reopened.signatures().len(),
2,
"Reopened document should have two signatures"
);
let signer_names: Vec<_> = reopened
.signatures()
.iter()
.map(|s| s.signer.name.as_str())
.collect();
assert!(signer_names.contains(&"Signer One"));
assert!(signer_names.contains(&"Signer Two"));
Ok(())
}
}
mod state_transition_tests {
use super::*;
#[test]
fn test_submit_for_review() -> Result<()> {
let mut doc = Document::builder()
.title("State Test")
.creator("Test")
.add_paragraph("Content.")
.build()?;
assert_eq!(doc.state(), DocumentState::Draft);
doc.submit_for_review()?;
assert_eq!(doc.state(), DocumentState::Review);
Ok(())
}
#[test]
fn test_revert_to_draft() -> Result<()> {
let mut doc = Document::builder()
.title("Revert Test")
.creator("Test")
.add_paragraph("Content.")
.build()?;
doc.submit_for_review()?;
assert_eq!(doc.state(), DocumentState::Review);
doc.revert_to_draft()?;
assert_eq!(doc.state(), DocumentState::Draft);
Ok(())
}
#[cfg(feature = "signatures")]
#[test]
fn test_revert_fails_with_signatures() -> Result<()> {
use cdx_core::security::{EcdsaSigner, Signer, SignerInfo};
let mut doc = Document::builder()
.title("Signed Revert Test")
.creator("Test")
.add_paragraph("Content.")
.build()?;
let doc_id = doc.compute_id()?;
let signer_info = SignerInfo::new("Test Signer");
let (signer, _) = EcdsaSigner::generate(signer_info)?;
let signature = signer.sign(&doc_id)?;
doc.add_signature(signature)?;
doc.submit_for_review()?;
let result = doc.revert_to_draft();
assert!(result.is_err(), "Revert should fail with signatures");
Ok(())
}
#[cfg(feature = "signatures")]
#[test]
fn test_lifecycle_draft_review() -> Result<()> {
use cdx_core::security::{EcdsaSigner, Signer, SignerInfo};
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("lifecycle.cdx");
let mut doc = Document::builder()
.title("Lifecycle Test")
.creator("Test")
.add_paragraph("Content for lifecycle test.")
.build()?;
assert_eq!(doc.state(), DocumentState::Draft);
doc.submit_for_review()?;
assert_eq!(doc.state(), DocumentState::Review);
let doc_id = doc.compute_id()?;
let signer_info = SignerInfo::new("Reviewer");
let (signer, _) = EcdsaSigner::generate(signer_info)?;
let signature = signer.sign(&doc_id)?;
doc.add_signature(signature)?;
doc.save(&file_path)?;
let reopened = Document::open(&file_path)?;
assert_eq!(reopened.state(), DocumentState::Review);
assert!(reopened.has_signatures());
Ok(())
}
#[cfg(feature = "signatures")]
#[test]
fn test_freeze_requires_lineage() -> Result<()> {
use cdx_core::security::{EcdsaSigner, Signer, SignerInfo};
let mut doc = Document::builder()
.title("Freeze Test")
.creator("Test")
.add_paragraph("Content.")
.build()?;
let doc_id = doc.compute_id()?;
let signer_info = SignerInfo::new("Signer");
let (signer, _) = EcdsaSigner::generate(signer_info)?;
let signature = signer.sign(&doc_id)?;
doc.add_signature(signature)?;
doc.submit_for_review()?;
let result = doc.freeze();
assert!(result.is_err(), "Freeze should fail without lineage");
Ok(())
}
#[test]
fn test_fork_creates_lineage() -> Result<()> {
let original = Document::builder()
.title("Original Document")
.creator("Author")
.add_paragraph("Original content.")
.build()?;
let original_id = original.compute_id()?;
let forked = original.fork()?;
assert_eq!(forked.state(), DocumentState::Draft);
let lineage = forked.manifest().lineage.as_ref();
assert!(lineage.is_some());
let lineage = lineage.unwrap();
assert_eq!(lineage.parent, Some(original_id));
assert_eq!(lineage.version, Some(2));
Ok(())
}
#[test]
fn test_fork_ancestor_chain() -> Result<()> {
let v1 = Document::builder()
.title("Version 1")
.creator("Author")
.add_paragraph("First version.")
.build()?;
let v1_id = v1.compute_id()?;
let v2 = v1.fork()?;
let v2_id = v2.compute_id()?;
let v3 = v2.fork()?;
let lineage = v3.manifest().lineage.as_ref().unwrap();
assert_eq!(lineage.parent, Some(v2_id.clone()));
assert_eq!(lineage.version, Some(3));
assert!(lineage.ancestors.contains(&v1_id));
assert!(lineage.ancestors.contains(&v2_id));
Ok(())
}
}
mod proof_tests {
use super::*;
#[test]
fn test_block_proof_generation() -> Result<()> {
let doc = Document::builder()
.title("Proof Test")
.creator("Test")
.add_heading(1, "Chapter 1")
.add_paragraph("First paragraph.")
.add_heading(2, "Section 1.1")
.add_paragraph("Second paragraph.")
.build()?;
let proof = doc.prove_block(0)?;
let index = doc.block_index()?;
let block_hash = &index.get_block(0).unwrap().hash;
let verified = doc.verify_proof(&proof, block_hash);
assert!(verified, "Proof should verify against document");
Ok(())
}
#[test]
fn test_proof_fails_for_different_document() -> Result<()> {
let doc1 = Document::builder()
.title("Document 1")
.creator("Test")
.add_paragraph("Content A")
.build()?;
let doc2 = Document::builder()
.title("Document 2")
.creator("Test")
.add_paragraph("Content B")
.build()?;
let proof = doc1.prove_block(0)?;
let index1 = doc1.block_index()?;
let block_hash = &index1.get_block(0).unwrap().hash;
let verified = doc2.verify_proof(&proof, block_hash);
assert!(
!verified,
"Proof should not verify against different document"
);
Ok(())
}
#[test]
fn test_provenance_record() -> Result<()> {
let doc = Document::builder()
.title("Provenance Test")
.creator("Author Name")
.add_paragraph("Content for provenance.")
.build()?;
let record = doc.provenance_record()?;
assert_eq!(record.document_id, doc.compute_id()?);
assert!(record.merkle.block_count > 0);
assert!(!record.merkle.root.is_pending());
Ok(())
}
#[test]
fn test_block_index() -> Result<()> {
let doc = Document::builder()
.title("Block Index Test")
.creator("Test")
.add_heading(1, "Title")
.add_paragraph("First paragraph.")
.add_paragraph("Second paragraph.")
.build()?;
let index = doc.block_index()?;
assert_eq!(index.block_count(), 3);
for i in 0..3 {
let entry = index.get_block(i).unwrap();
assert!(!entry.hash.is_pending());
}
Ok(())
}
}
#[cfg(feature = "encryption")]
mod encryption_tests {
use super::*;
use cdx_core::security::Aes256GcmEncryptor;
#[test]
fn test_encrypt_decrypt_content() -> Result<()> {
let plaintext = b"This is secret document content.";
let key = Aes256GcmEncryptor::generate_key();
let encryptor = Aes256GcmEncryptor::new(&key)?;
let encrypted = encryptor.encrypt(plaintext)?;
assert_ne!(encrypted.ciphertext.as_slice(), plaintext);
let decrypted = encryptor.decrypt(&encrypted.ciphertext, &encrypted.nonce)?;
assert_eq!(decrypted, plaintext);
Ok(())
}
#[test]
fn test_decrypt_wrong_key_fails() -> Result<()> {
let plaintext = b"Secret content";
let key1 = Aes256GcmEncryptor::generate_key();
let key2 = Aes256GcmEncryptor::generate_key();
let encryptor1 = Aes256GcmEncryptor::new(&key1)?;
let encryptor2 = Aes256GcmEncryptor::new(&key2)?;
let encrypted = encryptor1.encrypt(plaintext)?;
let result = encryptor2.decrypt(&encrypted.ciphertext, &encrypted.nonce);
assert!(result.is_err(), "Decryption with wrong key should fail");
Ok(())
}
#[test]
fn test_encrypt_large_content() -> Result<()> {
let plaintext: Vec<u8> = (0..100_000).map(|i| (i % 256) as u8).collect();
let key = Aes256GcmEncryptor::generate_key();
let encryptor = Aes256GcmEncryptor::new(&key)?;
let encrypted = encryptor.encrypt(&plaintext)?;
let decrypted = encryptor.decrypt(&encrypted.ciphertext, &encrypted.nonce)?;
assert_eq!(decrypted, plaintext);
Ok(())
}
}
mod validation_tests {
use cdx_core::validation::{
validate_block_index, validate_content, validate_dublin_core, validate_manifest,
};
#[test]
fn test_validate_real_manifest() {
let manifest = r#"{
"version": "0.1",
"id": "sha256:abcd1234",
"state": "draft",
"created": "2024-01-01T00:00:00Z",
"modified": "2024-01-01T00:00:00Z",
"content": {
"path": "content/content.json"
}
}"#;
let errors = validate_manifest(manifest).unwrap();
assert!(
errors.is_empty(),
"Valid manifest should have no errors: {:?}",
errors
);
}
#[test]
fn test_validate_real_content() {
let content = r#"{
"version": "0.1",
"blocks": [
{
"type": "paragraph",
"children": [{"type": "text", "value": "Hello"}]
}
]
}"#;
let errors = validate_content(content).unwrap();
assert!(errors.is_empty());
}
#[test]
fn test_validate_real_dublin_core() {
let dc = r#"{
"version": "0.1",
"title": "Test Document",
"creator": ["Author One", "Author Two"],
"subject": ["Testing", "Validation"],
"language": "en"
}"#;
let errors = validate_dublin_core(dc).unwrap();
assert!(errors.is_empty());
}
#[test]
fn test_validate_real_block_index() {
let index = r#"{
"version": "0.1",
"algorithm": "sha256",
"root": "abc123def456",
"blocks": [
{"id": "block-1", "hash": "hash1", "index": 0},
{"id": "block-2", "hash": "hash2", "index": 1}
]
}"#;
let errors = validate_block_index(index).unwrap();
assert!(errors.is_empty());
}
}
mod round_trip_tests {
use super::*;
#[test]
fn test_dublin_core_round_trip() -> Result<()> {
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("dublin.cdx");
let doc = Document::builder()
.title("Dublin Core Test")
.creator("Author Name")
.description("A test document with Dublin Core metadata.")
.language("en-US")
.add_paragraph("Content.")
.build()?;
doc.save(&file_path)?;
let reopened = Document::open(&file_path)?;
let dc = reopened.dublin_core();
assert_eq!(dc.terms.title, "Dublin Core Test");
assert_eq!(dc.terms.creator.as_slice().len(), 1);
assert_eq!(dc.terms.creator.as_slice()[0], "Author Name");
assert_eq!(
dc.terms.description,
Some("A test document with Dublin Core metadata.".to_string())
);
assert_eq!(dc.terms.language, Some("en-US".to_string()));
Ok(())
}
#[test]
fn test_complex_content_round_trip() -> Result<()> {
use cdx_core::content::{Block, Text};
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("complex.cdx");
let doc = Document::builder()
.title("Complex Content")
.creator("Test")
.add_heading(1, "Introduction")
.add_paragraph("This is the introduction.")
.add_heading(2, "List Section")
.add_block(Block::unordered_list(vec![
Block::list_item(vec![Block::paragraph(vec![Text::plain("Item 1")])]),
Block::list_item(vec![Block::paragraph(vec![Text::plain("Item 2")])]),
Block::list_item(vec![Block::paragraph(vec![Text::plain("Item 3")])]),
]))
.add_heading(2, "Code Section")
.add_block(Block::code_block(
"fn main() {\n println!(\"Hello\");\n}",
Some("rust".to_string()),
))
.build()?;
doc.save(&file_path)?;
let reopened = Document::open(&file_path)?;
assert_eq!(reopened.content().blocks.len(), doc.content().blocks.len());
let report = reopened.verify()?;
assert!(report.is_valid());
Ok(())
}
#[test]
fn test_extension_block_round_trip() -> Result<()> {
use cdx_core::content::Block;
use cdx_core::extensions::ExtensionBlock;
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("extension.cdx");
let ext_block =
ExtensionBlock::new("custom", "widget").with_attributes(serde_json::json!({
"color": "blue",
"size": 42
}));
let doc = Document::builder()
.title("Extension Test")
.creator("Test")
.add_paragraph("Before extension.")
.add_block(Block::Extension(ext_block))
.add_paragraph("After extension.")
.build()?;
doc.save(&file_path)?;
let reopened = Document::open(&file_path)?;
let ext = reopened
.content()
.blocks
.iter()
.find_map(|b| b.as_extension());
assert!(ext.is_some());
let ext = ext.unwrap();
assert_eq!(ext.namespace, "custom");
assert_eq!(ext.block_type, "widget");
assert_eq!(
ext.attributes.get("color"),
Some(&serde_json::json!("blue"))
);
Ok(())
}
#[test]
fn test_citation_marks_archive_roundtrip() -> Result<()> {
use cdx_core::content::{Block, ExtensionMark, Mark, Text};
let temp_dir = tempfile::tempdir().unwrap();
let file_path = temp_dir.path().join("citation.cdx");
let single_cite = Text::with_marks(
"Smith (2023)",
vec![Mark::Extension(ExtensionMark::citation("smith2023"))],
);
let multi_cite = Text::with_marks(
"Smith & Jones",
vec![Mark::Extension(ExtensionMark::multi_citation(&[
"smith2023".to_string(),
"jones2024".to_string(),
]))],
);
let plain = Text::plain(" discuss this topic.");
let doc = Document::builder()
.title("Citation Roundtrip Test")
.creator("Test")
.add_block(Block::paragraph(vec![single_cite, multi_cite, plain]))
.build()?;
doc.save(&file_path)?;
let reopened = Document::open(&file_path)?;
let para = &reopened.content().blocks[0];
if let Block::Paragraph { children, .. } = para {
assert_eq!(children.len(), 3);
if let Mark::Extension(ext) = &children[0].marks[0] {
let refs = ext
.get_string_array_attribute("refs")
.expect("single cite should have refs");
assert_eq!(refs, vec!["smith2023"]);
} else {
panic!("Expected extension mark on single citation");
}
if let Mark::Extension(ext) = &children[1].marks[0] {
let refs = ext
.get_string_array_attribute("refs")
.expect("multi cite should have refs");
assert_eq!(refs, vec!["smith2023", "jones2024"]);
} else {
panic!("Expected extension mark on multi citation");
}
} else {
panic!("Expected paragraph block");
}
Ok(())
}
}
#[cfg(feature = "ocsp")]
mod revocation_tests {
use cdx_core::security::{
RevocationConfig, RevocationMethod, RevocationReason, RevocationResult, RevocationStatus,
};
use std::time::Duration;
#[test]
fn test_revocation_status_types() {
let good = RevocationStatus::Good;
assert!(good.is_good());
assert!(!good.is_revoked());
let revoked = RevocationStatus::Revoked {
reason: Some(RevocationReason::KeyCompromise),
revocation_time: Some("2024-01-01T00:00:00Z".to_string()),
};
assert!(!revoked.is_good());
assert!(revoked.is_revoked());
let unknown = RevocationStatus::Unknown;
assert!(!unknown.is_good());
assert!(!unknown.is_revoked());
}
#[test]
fn test_revocation_config() {
let config = RevocationConfig::new()
.with_timeout(Duration::from_secs(30))
.with_prefer_ocsp(true)
.with_strict_mode(true)
.with_ocsp_responder("https://ocsp.example.com");
assert_eq!(config.timeout, Duration::from_secs(30));
assert!(config.prefer_ocsp);
assert!(config.strict_mode);
assert_eq!(
config.ocsp_responder,
Some("https://ocsp.example.com".to_string())
);
}
#[test]
fn test_revocation_result() {
let result = RevocationResult::new(
RevocationStatus::Good,
RevocationMethod::Ocsp,
"ABC123".to_string(),
)
.with_responder("https://ocsp.example.com")
.with_produced_at("2024-01-01T12:00:00Z")
.with_next_update("2024-01-02T12:00:00Z");
assert!(result.is_valid());
assert_eq!(result.method, RevocationMethod::Ocsp);
assert_eq!(result.serial_number, "ABC123");
assert!(result.responder_url.is_some());
}
#[test]
fn test_revocation_reasons() {
assert_eq!(RevocationReason::Unspecified.code(), 0);
assert_eq!(RevocationReason::KeyCompromise.code(), 1);
assert_eq!(RevocationReason::CaCompromise.code(), 2);
assert_eq!(RevocationReason::CessationOfOperation.code(), 5);
assert_eq!(
RevocationReason::from_code(1),
Some(RevocationReason::KeyCompromise)
);
assert_eq!(RevocationReason::from_code(99), None);
}
}
mod ethereum_tests {
use cdx_core::provenance::ethereum::{
verify_offline, EthereumConfig, EthereumNetwork, EthereumTimestamp,
EthereumTimestampMethod, EthereumVerification,
};
use cdx_core::{HashAlgorithm, Hasher};
#[test]
fn test_ethereum_timestamp_creation() {
let doc_hash = Hasher::hash(HashAlgorithm::Sha256, b"test document");
let tx_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef";
let timestamp = EthereumTimestamp::new(
tx_hash.to_string(),
doc_hash.clone(),
EthereumNetwork::Mainnet,
)
.with_block_number(12345678)
.with_confirmations(100)
.with_block_timestamp(1700000000);
assert!(timestamp.is_valid_tx_hash());
assert!(timestamp.is_confirmed(50));
assert!(!timestamp.is_confirmed(200));
assert_eq!(timestamp.unix_timestamp(), Some(1700000000));
}
#[test]
fn test_ethereum_networks() {
assert_eq!(EthereumNetwork::Mainnet.chain_id(), 1);
assert_eq!(EthereumNetwork::Polygon.chain_id(), 137);
assert!(EthereumNetwork::Mainnet.is_production());
assert!(!EthereumNetwork::Sepolia.is_production());
let url = EthereumNetwork::Mainnet.explorer_url("0x123");
assert_eq!(url, Some("https://etherscan.io/tx/0x123".to_string()));
}
#[test]
fn test_ethereum_verification() {
let success = EthereumVerification::success(12345678, 100, 1700000000);
assert!(success.verified);
assert!(success.hash_matches);
assert_eq!(success.confirmations, 100);
let failure = EthereumVerification::failure("Test error");
assert!(!failure.verified);
assert!(failure.error.is_some());
let pending = EthereumVerification::pending();
assert!(!pending.verified);
}
#[test]
fn test_offline_verification_valid() {
let doc_hash = Hasher::hash(HashAlgorithm::Sha256, b"test");
let tx_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef";
let timestamp =
EthereumTimestamp::new(tx_hash.to_string(), doc_hash, EthereumNetwork::Mainnet)
.with_block_number(12345678)
.with_confirmations(50)
.with_block_timestamp(1700000000);
let config = EthereumConfig::new().with_min_confirmations(12);
let result = verify_offline(×tamp, &config);
assert!(result.verified);
}
#[test]
fn test_offline_verification_insufficient_confirmations() {
let doc_hash = Hasher::hash(HashAlgorithm::Sha256, b"test");
let tx_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef";
let timestamp =
EthereumTimestamp::new(tx_hash.to_string(), doc_hash, EthereumNetwork::Mainnet)
.with_confirmations(5);
let config = EthereumConfig::new().with_min_confirmations(12);
let result = verify_offline(×tamp, &config);
assert!(!result.verified);
assert!(result.error.unwrap().contains("Insufficient"));
}
#[test]
fn test_timestamp_methods() {
assert_eq!(
EthereumTimestampMethod::TransactionData.to_string(),
"Transaction Data"
);
assert_eq!(
EthereumTimestampMethod::SmartContract.to_string(),
"Smart Contract Event"
);
}
}
mod anchor_tests {
use cdx_core::anchor::{ContentAnchor, ContentAnchorUri};
#[test]
fn test_anchor_uri_block_level() {
let uri: ContentAnchorUri = "#blockId".parse().unwrap();
assert_eq!(uri.block_id, "blockId");
assert!(uri.offset.is_none());
assert!(uri.start.is_none());
assert!(uri.end.is_none());
assert_eq!(uri.to_string(), "#blockId");
}
#[test]
fn test_anchor_uri_point() {
let uri: ContentAnchorUri = "#blockId/15".parse().unwrap();
assert_eq!(uri.block_id, "blockId");
assert_eq!(uri.offset, Some(15));
assert!(uri.start.is_none());
assert_eq!(uri.to_string(), "#blockId/15");
}
#[test]
fn test_anchor_uri_range() {
let uri: ContentAnchorUri = "#blockId/10-25".parse().unwrap();
assert_eq!(uri.block_id, "blockId");
assert!(uri.offset.is_none());
assert_eq!(uri.start, Some(10));
assert_eq!(uri.end, Some(25));
assert_eq!(uri.to_string(), "#blockId/10-25");
}
#[test]
fn test_anchor_uri_to_anchor_conversion() {
let uri: ContentAnchorUri = "#para-1/10-25".parse().unwrap();
let anchor = ContentAnchor::from(uri);
assert_eq!(anchor.block_id, "para-1");
assert!(anchor.is_range_anchor());
assert_eq!(anchor.start, Some(10));
assert_eq!(anchor.end, Some(25));
let uri_back = anchor.to_uri();
assert_eq!(uri_back.to_string(), "#para-1/10-25");
}
#[test]
fn test_anchor_type_predicates() {
let block = ContentAnchor::block("para-1");
assert!(block.is_block_anchor());
assert!(!block.is_point_anchor());
assert!(!block.is_range_anchor());
let point = ContentAnchor::point("para-1", 15);
assert!(!point.is_block_anchor());
assert!(point.is_point_anchor());
assert!(!point.is_range_anchor());
let range = ContentAnchor::range("para-1", 10, 25);
assert!(!range.is_block_anchor());
assert!(!range.is_point_anchor());
assert!(range.is_range_anchor());
}
}
mod phantom_tests {
use cdx_core::anchor::ContentAnchor;
use cdx_core::extensions::{
ConnectionStyle, Phantom, PhantomCluster, PhantomClusters, PhantomConnection,
PhantomContent, PhantomPosition, PhantomScope, PhantomSize,
};
use cdx_core::DocumentState;
#[test]
fn test_phantom_clusters_roundtrip() {
let anchor = ContentAnchor::range("para-1", 10, 25);
let content = PhantomContent::paragraph("Note text");
let phantom = Phantom::new("phantom-1", PhantomPosition::new(100.0, 50.0), content)
.with_size(PhantomSize::new(200.0, 100.0));
let cluster = PhantomCluster::new("cluster-1", anchor, "Research Notes")
.with_scope(PhantomScope::Shared)
.with_phantom(phantom);
let mut clusters = PhantomClusters::new();
clusters.add_cluster(cluster);
let json = serde_json::to_string_pretty(&clusters).unwrap();
assert!(json.contains("\"version\": \"0.1\""));
assert!(json.contains("\"clusters\":"));
assert!(json.contains("\"phantoms\":"));
assert!(json.contains("Research Notes"));
let parsed: PhantomClusters = serde_json::from_str(&json).unwrap();
assert_eq!(parsed.clusters.len(), 1);
assert_eq!(parsed.clusters[0].id, "cluster-1");
assert_eq!(parsed.clusters[0].phantoms.len(), 1);
}
#[test]
fn test_phantom_connection_valid() {
let anchor = ContentAnchor::block("para-1");
let phantom1 = Phantom::new(
"p1",
PhantomPosition::new(0.0, 0.0),
PhantomContent::paragraph("First"),
)
.connect_to("p2");
let phantom2 = Phantom::new(
"p2",
PhantomPosition::new(100.0, 0.0),
PhantomContent::paragraph("Second"),
);
let cluster = PhantomCluster::new("cluster-1", anchor, "Test")
.with_phantom(phantom1)
.with_phantom(phantom2);
let errors = cluster.validate_connections(DocumentState::Draft);
assert!(
errors.is_empty(),
"Valid connections should pass: {:?}",
errors
);
}
#[test]
fn test_phantom_connection_missing_target() {
let anchor = ContentAnchor::block("para-1");
let phantom1 = Phantom::new(
"p1",
PhantomPosition::new(0.0, 0.0),
PhantomContent::paragraph("First"),
)
.connect_to("nonexistent");
let cluster = PhantomCluster::new("cluster-1", anchor, "Test").with_phantom(phantom1);
let warnings = cluster.validate_connections(DocumentState::Draft);
assert!(!warnings.is_empty());
assert!(warnings[0].starts_with("WARNING:"));
let errors = cluster.validate_connections(DocumentState::Frozen);
assert!(!errors.is_empty());
assert!(errors[0].starts_with("ERROR:"));
}
#[test]
fn test_phantom_scope_serialization() {
let shared = PhantomScope::Shared;
let json = serde_json::to_string(&shared).unwrap();
assert!(json.contains("\"type\":\"shared\""));
let private = PhantomScope::Private;
let json = serde_json::to_string(&private).unwrap();
assert!(json.contains("\"type\":\"private\""));
let role = PhantomScope::role("editor");
let json = serde_json::to_string(&role).unwrap();
assert!(json.contains("\"type\":\"role\""));
assert!(json.contains("\"role\":\"editor\""));
}
#[test]
fn test_connection_styles() {
let conn = PhantomConnection::new("target")
.with_style(ConnectionStyle::Arrow)
.with_label("relates to");
let json = serde_json::to_string(&conn).unwrap();
assert!(json.contains("\"style\":\"arrow\""));
assert!(json.contains("\"label\":\"relates to\""));
let line = ConnectionStyle::Line;
let dashed = ConnectionStyle::Dashed;
assert_eq!(serde_json::to_string(&line).unwrap(), "\"line\"");
assert_eq!(serde_json::to_string(&dashed).unwrap(), "\"dashed\"");
}
}
#[cfg(feature = "signatures")]
mod scoped_signature_tests {
use cdx_core::security::{Signature, SignatureAlgorithm, SignatureScope, SignerInfo};
use cdx_core::{HashAlgorithm, Hasher};
use std::collections::HashMap;
#[test]
fn test_signature_scope_creation() {
let doc_id = Hasher::hash(HashAlgorithm::Sha256, b"document content");
let layout_hash = Hasher::hash(HashAlgorithm::Sha256, b"layout content");
let scope = SignatureScope::new(doc_id.clone())
.with_layout("presentation/print.json", layout_hash.clone());
assert_eq!(scope.document_id, doc_id);
assert!(scope.has_layouts());
let layouts = scope.layouts.as_ref().unwrap();
assert_eq!(layouts.get("presentation/print.json"), Some(&layout_hash));
}
#[test]
fn test_signature_scope_jcs_deterministic() {
let doc_id = Hasher::hash(HashAlgorithm::Sha256, b"test");
let layout1 = Hasher::hash(HashAlgorithm::Sha256, b"layout1");
let layout2 = Hasher::hash(HashAlgorithm::Sha256, b"layout2");
let mut layouts1 = HashMap::new();
layouts1.insert("a.json".to_string(), layout1.clone());
layouts1.insert("b.json".to_string(), layout2.clone());
let mut layouts2 = HashMap::new();
layouts2.insert("b.json".to_string(), layout2.clone());
layouts2.insert("a.json".to_string(), layout1.clone());
let scope1 = SignatureScope::new(doc_id.clone()).with_layouts(layouts1);
let scope2 = SignatureScope::new(doc_id).with_layouts(layouts2);
let jcs1 = scope1.to_jcs().unwrap();
let jcs2 = scope2.to_jcs().unwrap();
assert_eq!(jcs1, jcs2);
}
#[test]
fn test_scoped_signature_serialization() {
let doc_id = Hasher::hash(HashAlgorithm::Sha256, b"test");
let scope = SignatureScope::new(doc_id);
let signer = SignerInfo::new("Test Signer");
let sig = Signature::new("sig-1", SignatureAlgorithm::ES256, signer, "base64value")
.with_scope(scope.clone());
assert!(sig.is_scoped());
let json = serde_json::to_string_pretty(&sig).unwrap();
assert!(json.contains("\"scope\":"));
assert!(json.contains("\"documentId\":"));
let parsed: Signature = serde_json::from_str(&json).unwrap();
assert!(parsed.is_scoped());
assert_eq!(parsed.scope.unwrap().document_id, scope.document_id);
}
}
mod validation_rule_tests {
use cdx_core::extensions::ValidationRule;
#[test]
fn test_declarative_validators() {
let uppercase = ValidationRule::contains_uppercase();
let lowercase = ValidationRule::contains_lowercase();
let digit = ValidationRule::contains_digit();
let special = ValidationRule::contains_special();
let matches = ValidationRule::matches_field("password");
let json = serde_json::to_string(&uppercase).unwrap();
assert!(json.contains("containsUppercase"));
let json = serde_json::to_string(&lowercase).unwrap();
assert!(json.contains("containsLowercase"));
let json = serde_json::to_string(&digit).unwrap();
assert!(json.contains("containsDigit"));
let json = serde_json::to_string(&special).unwrap();
assert!(json.contains("containsSpecial"));
let json = serde_json::to_string(&matches).unwrap();
assert!(json.contains("matchesField"));
assert!(json.contains("password"));
}
#[test]
fn test_validation_rule_with_message() {
let rule = ValidationRule::ContainsUppercase {
message: Some("Must contain uppercase letter".to_string()),
};
let json = serde_json::to_string(&rule).unwrap();
assert!(json.contains("Must contain uppercase letter"));
let parsed: ValidationRule = serde_json::from_str(&json).unwrap();
if let ValidationRule::ContainsUppercase { message } = parsed {
assert_eq!(message, Some("Must contain uppercase letter".to_string()));
} else {
panic!("Expected ContainsUppercase variant");
}
}
#[test]
fn test_matches_field_validator() {
let rule = ValidationRule::MatchesField {
field: "confirm_password".to_string(),
message: Some("Passwords must match".to_string()),
};
let json = serde_json::to_string(&rule).unwrap();
assert!(json.contains("confirm_password"));
assert!(json.contains("Passwords must match"));
let parsed: ValidationRule = serde_json::from_str(&json).unwrap();
if let ValidationRule::MatchesField { field, message } = parsed {
assert_eq!(field, "confirm_password");
assert_eq!(message, Some("Passwords must match".to_string()));
} else {
panic!("Expected MatchesField variant");
}
}
}
mod paginated_presentation_tests {
use cdx_core::presentation::{FlowElement, PageElement, Position};
#[test]
fn test_page_element_block_id() {
let element = PageElement {
block_id: "block-1".to_string(),
position: Position::new("72pt", "72pt", "468pt"),
style: None,
overflow: None,
transform: None,
};
let json = serde_json::to_string(&element).unwrap();
assert!(json.contains("\"blockId\":"));
assert!(!json.contains("\"blockRef\":"));
let json_in = r#"{"blockId":"block-1","position":{"x":"72pt","y":"72pt","width":"468pt","height":"auto"}}"#;
let parsed: PageElement = serde_json::from_str(json_in).unwrap();
assert_eq!(parsed.block_id, "block-1");
}
#[test]
fn test_flow_element_block_ids() {
let json_in = r#"{"type":"flow","blockIds":["block-1","block-2"],"columns":1,"startPage":1,"regions":[{"page":1,"position":{"x":"72pt","y":"72pt","width":"468pt","height":"auto"}}]}"#;
let parsed: FlowElement = serde_json::from_str(json_in).unwrap();
assert_eq!(parsed.block_ids, vec!["block-1", "block-2"]);
let json_out = serde_json::to_string(&parsed).unwrap();
assert!(json_out.contains("\"blockIds\":"));
assert!(!json_out.contains("\"blockRefs\":"));
}
}
mod anchor_mark_tests {
use cdx_core::content::{Mark, Text};
#[test]
fn test_mark_anchor() {
let anchor_mark = Mark::Anchor {
id: "anchor-1".to_string(),
};
let json = serde_json::to_string(&anchor_mark).unwrap();
assert!(json.contains("\"type\":\"anchor\""));
assert!(json.contains("\"id\":\"anchor-1\""));
let parsed: Mark = serde_json::from_str(&json).unwrap();
if let Mark::Anchor { id } = parsed {
assert_eq!(id, "anchor-1");
} else {
panic!("Expected Anchor mark");
}
}
#[test]
fn test_text_with_anchor_mark() {
let text = Text::with_marks(
"anchor point",
vec![Mark::Anchor {
id: "ref-1".to_string(),
}],
);
assert!(text.has_marks());
let json = serde_json::to_string(&text).unwrap();
assert!(json.contains("anchor point"));
assert!(json.contains("\"type\":\"anchor\""));
}
}
#[cfg(feature = "timestamps-ots")]
mod ots_tests {
use base64::{engine::general_purpose::STANDARD as BASE64, Engine};
use cdx_core::provenance::ots::{
calendars, OtsClient, TimestampStatus, UpgradeResult, VerificationStatus,
};
use cdx_core::provenance::TimestampRecord;
use cdx_core::{HashAlgorithm, Hasher};
use chrono::Utc;
#[test]
fn test_ots_client_creation() {
let _client = OtsClient::new();
let custom_client =
OtsClient::with_calendars(vec!["https://custom.example.com".to_string()]);
let _ = custom_client.with_timeout(60);
}
#[test]
fn test_calendar_urls() {
assert!(!calendars::ALICE.is_empty());
assert!(!calendars::BOB.is_empty());
assert!(!calendars::FINNEY.is_empty());
assert!(calendars::ALICE.starts_with("https://"));
}
#[test]
fn test_verify_empty_proof() {
let client = OtsClient::new();
let doc_id = Hasher::hash(HashAlgorithm::Sha256, b"test document");
let timestamp = TimestampRecord::open_timestamps(Utc::now(), "");
let result = client.verify_timestamp(×tamp, &doc_id).unwrap();
assert!(!result.valid);
assert_eq!(result.status, VerificationStatus::Invalid);
}
#[test]
fn test_verify_valid_proof_structure() {
let client = OtsClient::new();
let doc_id = Hasher::hash(HashAlgorithm::Sha256, b"test document");
let proof_data = BASE64.encode(b"some_proof_data_here");
let timestamp = TimestampRecord::open_timestamps(Utc::now(), proof_data);
let result = client.verify_timestamp(×tamp, &doc_id).unwrap();
assert!(!result.valid);
assert_eq!(result.status, VerificationStatus::Pending);
}
#[test]
fn test_upgrade_result_types() {
let pending = UpgradeResult::Pending {
message: "Not ready".to_string(),
};
assert!(!pending.is_complete());
assert!(pending.into_record().is_none());
}
#[test]
fn test_timestamp_status_types() {
let pending = TimestampStatus::Pending;
assert!(pending.is_pending());
assert!(!pending.is_complete());
let complete = TimestampStatus::Complete {
bitcoin_txid: Some("abc123".to_string()),
block_height: Some(800000),
};
assert!(!complete.is_pending());
assert!(complete.is_complete());
assert_eq!(pending.to_string(), "Pending");
assert!(complete.to_string().contains("abc123"));
}
}
mod hash_boundary_tests {
use super::*;
#[test]
fn test_hash_changes_with_title() -> Result<()> {
let doc1 = Document::builder()
.title("Title A")
.creator("Author")
.add_paragraph("Same content")
.build()?;
let doc2 = Document::builder()
.title("Title B")
.creator("Author")
.add_paragraph("Same content")
.build()?;
let id1 = doc1.compute_id()?;
let id2 = doc2.compute_id()?;
let _ = (id1, id2);
Ok(())
}
#[test]
fn test_hash_changes_with_creator() -> Result<()> {
let doc1 = Document::builder()
.title("Same Title")
.creator("Author A")
.add_paragraph("Same content")
.build()?;
let doc2 = Document::builder()
.title("Same Title")
.creator("Author B")
.add_paragraph("Same content")
.build()?;
let id1 = doc1.compute_id()?;
let id2 = doc2.compute_id()?;
let _ = (id1, id2);
Ok(())
}
#[test]
fn test_hash_changes_with_description() -> Result<()> {
let doc1 = Document::builder()
.title("Title")
.creator("Author")
.description("Description A")
.add_paragraph("Content")
.build()?;
let doc2 = Document::builder()
.title("Title")
.creator("Author")
.description("Description B")
.add_paragraph("Content")
.build()?;
let id1 = doc1.compute_id()?;
let id2 = doc2.compute_id()?;
let _ = (id1, id2);
Ok(())
}
#[test]
fn test_hash_changes_with_language() -> Result<()> {
let doc1 = Document::builder()
.title("Title")
.creator("Author")
.language("en")
.add_paragraph("Content")
.build()?;
let doc2 = Document::builder()
.title("Title")
.creator("Author")
.language("de")
.add_paragraph("Content")
.build()?;
let id1 = doc1.compute_id()?;
let id2 = doc2.compute_id()?;
let _ = (id1, id2);
Ok(())
}
#[test]
fn test_hash_unchanged_by_phantoms() -> Result<()> {
use cdx_core::anchor::ContentAnchor;
use cdx_core::extensions::{
Phantom, PhantomCluster, PhantomClusters, PhantomContent, PhantomPosition,
};
let mut doc1 = Document::builder()
.title("Title")
.creator("Author")
.add_paragraph("Content")
.build()?;
let id1 = doc1.compute_id()?;
let mut clusters = PhantomClusters::new();
let position = PhantomPosition::new(100.0, 200.0);
let content = PhantomContent::paragraph("Phantom note");
let phantom = Phantom::new("p1", position, content);
let cluster = PhantomCluster::new("c1", ContentAnchor::block("block-1"), "Notes")
.with_phantom(phantom);
clusters.add_cluster(cluster);
doc1.set_phantom_clusters(clusters)?;
let id2 = doc1.compute_id()?;
assert_eq!(
id1, id2,
"Hash should NOT change when phantom data is added"
);
Ok(())
}
#[test]
fn test_hash_unchanged_by_forms() -> Result<()> {
use cdx_core::extensions::FormData;
let mut doc1 = Document::builder()
.title("Title")
.creator("Author")
.add_paragraph("Content")
.build()?;
let id1 = doc1.compute_id()?;
let mut form_data = FormData::new();
form_data.set("field1", serde_json::json!("value1"));
form_data.set("field2", serde_json::json!(42));
doc1.set_form_data(form_data)?;
let id2 = doc1.compute_id()?;
assert_eq!(id1, id2, "Hash should NOT change when form data is added");
Ok(())
}
#[test]
fn test_hash_unchanged_by_comments() -> Result<()> {
use cdx_core::extensions::{Collaborator, Comment, CommentThread};
let mut doc1 = Document::builder()
.title("Title")
.creator("Author")
.add_paragraph("Content")
.build()?;
let id1 = doc1.compute_id()?;
let mut thread = CommentThread::new();
let author = Collaborator::new("Reviewer");
let comment = Comment::new("c1", "block-1", author, "This needs revision");
thread.add(comment);
doc1.set_comments(thread)?;
let id2 = doc1.compute_id()?;
assert_eq!(id1, id2, "Hash should NOT change when comments are added");
Ok(())
}
#[cfg(feature = "signatures")]
#[test]
fn test_hash_unchanged_by_signatures() -> Result<()> {
use cdx_core::security::{EcdsaSigner, Signer, SignerInfo};
let mut doc1 = Document::builder()
.title("Title")
.creator("Author")
.add_paragraph("Content")
.build()?;
let id1 = doc1.compute_id()?;
let signer_info = SignerInfo::new("Test Signer");
let (signer, _) = EcdsaSigner::generate(signer_info)?;
let signature = signer.sign(&id1)?;
doc1.add_signature(signature)?;
let id2 = doc1.compute_id()?;
assert_eq!(id1, id2, "Hash should NOT change when signatures are added");
Ok(())
}
}
mod lineage_validation_tests {
use super::*;
#[test]
fn test_lineage_parent_hash_format() -> Result<()> {
let original = Document::builder()
.title("Original")
.creator("Author")
.add_paragraph("Content")
.build()?;
let forked = original.fork()?;
let lineage = forked.manifest().lineage.as_ref().unwrap();
let parent = lineage.parent.as_ref().unwrap();
assert!(!parent.is_pending(), "Parent hash should not be pending");
assert!(
parent.to_string().contains(':'),
"Parent hash should be in algorithm:hexdigest format"
);
Ok(())
}
#[test]
fn test_lineage_ancestors_ordered() -> Result<()> {
let v1 = Document::builder()
.title("Version 1")
.creator("Author")
.add_paragraph("V1 content")
.build()?;
let v1_id = v1.compute_id()?;
let v2 = v1.fork()?;
let v2_id = v2.compute_id()?;
let v3 = v2.fork()?;
let v3_lineage = v3.manifest().lineage.as_ref().unwrap();
if !v3_lineage.ancestors.is_empty() {
assert!(
v3_lineage.ancestors.contains(&v1_id),
"Ancestors should contain v1"
);
}
assert_eq!(v3_lineage.parent, Some(v2_id));
Ok(())
}
#[test]
fn test_lineage_version_positive() -> Result<()> {
let doc = Document::builder()
.title("Document")
.creator("Author")
.add_paragraph("Content")
.build()?;
let forked = doc.fork()?;
let lineage = forked.manifest().lineage.as_ref().unwrap();
assert!(lineage.version.unwrap_or(0) >= 1, "Version should be >= 1");
Ok(())
}
#[test]
fn test_lineage_depth_matches_ancestors() -> Result<()> {
let v1 = Document::builder()
.title("V1")
.creator("Author")
.add_paragraph("Content")
.build()?;
let v2 = v1.fork()?;
let v2_lineage = v2.manifest().lineage.as_ref().unwrap();
let expected_depth = v2_lineage.ancestors.len() as u32 + 1;
assert_eq!(
v2_lineage.depth.unwrap_or(0),
expected_depth,
"Depth should equal ancestors.len() + 1 for non-root"
);
Ok(())
}
#[test]
fn test_fork_creates_valid_lineage() -> Result<()> {
let original = Document::builder()
.title("Original")
.creator("Author")
.add_paragraph("Content")
.build()?;
let original_id = original.compute_id()?;
let forked = original.fork()?;
assert!(
forked.manifest().lineage.is_some(),
"Forked document must have lineage"
);
let lineage = forked.manifest().lineage.as_ref().unwrap();
assert_eq!(
lineage.parent,
Some(original_id),
"Parent should be the original document ID"
);
assert_eq!(lineage.version, Some(2), "Version should be 2");
Ok(())
}
}
mod state_requirement_tests {
use super::*;
use cdx_core::{ContentRef, Lineage, Metadata, PresentationRef, SecurityRef};
#[test]
fn test_review_state_requires_computed_id() -> Result<()> {
let mut doc = Document::builder()
.title("Test")
.creator("Author")
.add_paragraph("Content")
.build()?;
assert!(doc.id().is_pending());
doc.submit_for_review()?;
assert!(!doc.id().is_pending(), "Review state requires computed ID");
Ok(())
}
#[cfg(feature = "signatures")]
#[test]
fn test_frozen_requires_signature() {
use cdx_core::DocumentId;
fn test_hash() -> DocumentId {
"sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
.parse()
.unwrap()
}
let content = ContentRef {
path: "content/document.json".to_string(),
hash: test_hash(),
compression: None,
merkle_root: None,
block_count: None,
};
let metadata = Metadata {
dublin_core: "metadata/dublin-core.json".to_string(),
custom: None,
};
let mut manifest = cdx_core::Manifest::new(content, metadata);
manifest.id = test_hash();
manifest.state = DocumentState::Frozen;
manifest.lineage = Some(Lineage::root());
manifest.presentation.push(PresentationRef {
presentation_type: "precise".to_string(),
path: "presentation/layouts/letter.json".to_string(),
hash: test_hash(),
default: false,
});
let result = manifest.validate();
assert!(
result.is_err(),
"Frozen state should require signature/security"
);
}
#[cfg(feature = "signatures")]
#[test]
fn test_published_requires_signature() {
use cdx_core::DocumentId;
fn test_hash() -> DocumentId {
"sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
.parse()
.unwrap()
}
let content = ContentRef {
path: "content/document.json".to_string(),
hash: test_hash(),
compression: None,
merkle_root: None,
block_count: None,
};
let metadata = Metadata {
dublin_core: "metadata/dublin-core.json".to_string(),
custom: None,
};
let mut manifest = cdx_core::Manifest::new(content, metadata);
manifest.id = test_hash();
manifest.state = DocumentState::Published;
manifest.lineage = Some(Lineage::root());
manifest.presentation.push(PresentationRef {
presentation_type: "precise".to_string(),
path: "presentation/layouts/letter.json".to_string(),
hash: test_hash(),
default: false,
});
let result = manifest.validate();
assert!(
result.is_err(),
"Published state should require signature/security"
);
manifest.security = Some(SecurityRef {
signatures: Some("security/signatures.json".to_string()),
encryption: None,
});
let result = manifest.validate();
assert!(
result.is_ok(),
"Published state with signature should validate: {:?}",
result
);
}
#[cfg(feature = "signatures")]
#[test]
fn test_frozen_to_published_transition() -> Result<()> {
use cdx_core::security::{EcdsaSigner, Signer, SignerInfo};
let mut doc = Document::builder()
.title("Test")
.creator("Author")
.add_paragraph("Content")
.build()?;
doc.submit_for_review()?;
let doc_id = doc.compute_id()?;
let signer_info = SignerInfo::new("Signer");
let (signer, _) = EcdsaSigner::generate(signer_info)?;
let signature = signer.sign(&doc_id)?;
doc.add_signature(signature)?;
doc.set_lineage(None, 1, None)?;
doc.manifest_mut().presentation.push(PresentationRef {
presentation_type: "precise".to_string(),
path: "presentation/layouts/letter.json".to_string(),
hash: doc_id.clone(),
default: false,
});
doc.freeze()?;
assert_eq!(doc.state(), DocumentState::Frozen);
doc.publish()?;
assert_eq!(doc.state(), DocumentState::Published);
Ok(())
}
}
mod merkle_proof_tests {
use super::*;
use cdx_core::{HashAlgorithm, Hasher};
#[test]
fn test_merkle_root_matches_block_hashes() -> Result<()> {
let doc = Document::builder()
.title("Merkle Test")
.creator("Author")
.add_heading(1, "Chapter 1")
.add_paragraph("First paragraph")
.add_paragraph("Second paragraph")
.build()?;
let index = doc.block_index()?;
let merkle_root = index.merkle_root();
assert!(!merkle_root.is_pending(), "Merkle root should be computed");
assert_eq!(index.block_count(), 3, "Should have 3 blocks");
Ok(())
}
#[test]
fn test_merkle_proof_verifies_block() -> Result<()> {
let doc = Document::builder()
.title("Proof Test")
.creator("Author")
.add_heading(1, "Title")
.add_paragraph("Content A")
.add_paragraph("Content B")
.add_paragraph("Content C")
.build()?;
for i in 0..4 {
let proof = doc.prove_block(i)?;
let index = doc.block_index()?;
let block_hash = &index.get_block(i).unwrap().hash;
assert!(
doc.verify_proof(&proof, block_hash),
"Proof should verify for block {i}"
);
}
Ok(())
}
#[test]
fn test_merkle_proof_fails_tampered_block() -> Result<()> {
let doc = Document::builder()
.title("Tamper Test")
.creator("Author")
.add_paragraph("Original content")
.build()?;
let proof = doc.prove_block(0)?;
let fake_hash = Hasher::hash(HashAlgorithm::Sha256, b"tampered content");
assert!(
!doc.verify_proof(&proof, &fake_hash),
"Proof should fail for tampered block"
);
Ok(())
}
#[test]
fn test_block_index_hash_consistency() -> Result<()> {
let doc = Document::builder()
.title("Consistency Test")
.creator("Author")
.add_paragraph("Block 1")
.add_paragraph("Block 2")
.build()?;
let index = doc.block_index()?;
for i in 0..index.block_count() {
let entry = index.get_block(i).unwrap();
assert!(
!entry.hash.is_pending(),
"Block {i} hash should be computed"
);
}
let index2 = doc.block_index()?;
assert_eq!(
index.merkle_root(),
index2.merkle_root(),
"Block index should be deterministic"
);
Ok(())
}
}
mod manifest_validation_tests {
use super::*;
use cdx_core::{ContentRef, DocumentId, Metadata};
fn test_hash() -> DocumentId {
"sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
.parse()
.unwrap()
}
#[test]
fn test_manifest_id_valid_hash_pattern() {
let valid: std::result::Result<DocumentId, _> =
"sha256:3a7bd3e2360a3d29eea436fcfb7e44c735d117c42d1c1835420b6b9942dd4f1b".parse();
assert!(valid.is_ok(), "Valid hash should parse");
let pending: std::result::Result<DocumentId, _> = "pending".parse();
assert!(pending.is_ok(), "Pending should parse");
}
#[test]
fn test_manifest_id_pending_allowed_for_draft() {
let content = ContentRef {
path: "content/document.json".to_string(),
hash: DocumentId::pending(),
compression: None,
merkle_root: None,
block_count: None,
};
let metadata = Metadata {
dublin_core: "metadata/dublin-core.json".to_string(),
custom: None,
};
let manifest = cdx_core::Manifest::new(content, metadata);
assert!(manifest.id.is_pending());
assert_eq!(manifest.state, DocumentState::Draft);
assert!(
manifest.validate().is_ok(),
"Draft with pending ID should validate"
);
}
#[test]
fn test_manifest_all_required_fields() {
let content = ContentRef {
path: "content/document.json".to_string(),
hash: test_hash(),
compression: None,
merkle_root: None,
block_count: None,
};
let metadata = Metadata {
dublin_core: "metadata/dublin-core.json".to_string(),
custom: None,
};
let manifest = cdx_core::Manifest::new(content, metadata);
let json = serde_json::to_string(&manifest).unwrap();
assert!(json.contains("\"codex\""), "codex field required");
assert!(json.contains("\"id\""), "id field required");
assert!(json.contains("\"state\""), "state field required");
assert!(json.contains("\"created\""), "created field required");
assert!(json.contains("\"modified\""), "modified field required");
assert!(json.contains("\"content\""), "content field required");
assert!(json.contains("\"metadata\""), "metadata field required");
}
}
mod extension_declaration_tests {
use cdx_core::Extension;
#[test]
fn test_extension_id_format_valid() {
let ext = Extension::required("codex.semantic", "0.1");
assert_eq!(ext.namespace(), "semantic");
let ext = Extension::required("codex.legal", "0.1");
assert_eq!(ext.namespace(), "legal");
let ext = Extension::required("org.example.custom", "1.0");
assert_eq!(ext.namespace(), "custom");
}
#[test]
fn test_extension_version_present() {
let ext = Extension::required("codex.semantic", "0.1");
assert!(!ext.version.is_empty(), "Version must be present");
let ext = Extension::optional("codex.forms", "1.2.3");
assert!(!ext.version.is_empty(), "Version must be present");
}
#[test]
fn test_required_extension_flag() {
let required = Extension::required("codex.security", "0.1");
assert!(
required.required,
"required=true should reject if unsupported"
);
let optional = Extension::optional("codex.phantoms", "0.1");
assert!(
!optional.required,
"required=false should allow graceful degradation"
);
}
}
#[cfg(feature = "signatures")]
mod signature_validation_tests {
use super::*;
use cdx_core::security::{EcdsaSigner, Signer, SignerInfo};
#[test]
fn test_signature_requires_signer_name() -> Result<()> {
let doc = Document::builder()
.title("Sig Test")
.creator("Author")
.add_paragraph("Content")
.build()?;
let doc_id = doc.compute_id()?;
let signer_info = SignerInfo::new("Test Signer");
let (signer, _) = EcdsaSigner::generate(signer_info)?;
let signature = signer.sign(&doc_id)?;
assert!(
!signature.signer.name.is_empty(),
"Signature must have signer.name"
);
Ok(())
}
#[test]
fn test_signature_document_id_matches_manifest() -> Result<()> {
let mut doc = Document::builder()
.title("Match Test")
.creator("Author")
.add_paragraph("Content")
.build()?;
let doc_id = doc.compute_id()?;
let signer_info = SignerInfo::new("Signer");
let (signer, _) = EcdsaSigner::generate(signer_info)?;
let signature = signer.sign(&doc_id)?;
doc.add_signature(signature)?;
let sig_file = doc.signature_file().unwrap();
assert_eq!(
sig_file.document_id, doc_id,
"Signature documentId should match manifest id"
);
Ok(())
}
}
mod metadata_validation_tests {
use super::*;
#[test]
fn test_dublin_core_title_required() {
let doc = Document::builder()
.title("Required Title")
.creator("Author")
.build()
.unwrap();
assert!(!doc.dublin_core().title().is_empty(), "Title is required");
}
#[test]
fn test_dublin_core_creator_required() {
let doc = Document::builder()
.title("Title")
.creator("Required Creator")
.build()
.unwrap();
assert!(
!doc.dublin_core().creators().is_empty(),
"Creator is required"
);
}
}
mod archive_structure_tests {
use super::*;
use std::io::Cursor;
#[test]
fn test_archive_contains_required_files() -> Result<()> {
let doc = Document::builder()
.title("Archive Test")
.creator("Author")
.add_paragraph("Content")
.build()?;
let bytes = doc.to_bytes()?;
let cursor = Cursor::new(bytes);
let mut archive = zip::ZipArchive::new(cursor).unwrap();
let mut found_manifest = false;
let mut found_content = false;
let mut found_dublin_core = false;
for i in 0..archive.len() {
let file = archive.by_index(i).unwrap();
let name = file.name();
if name == "manifest.json" {
found_manifest = true;
} else if name == "content/document.json" {
found_content = true;
} else if name == "metadata/dublin-core.json" {
found_dublin_core = true;
}
}
assert!(found_manifest, "manifest.json must exist");
assert!(found_content, "content/document.json must exist");
assert!(found_dublin_core, "metadata/dublin-core.json must exist");
Ok(())
}
#[test]
fn test_manifest_must_be_first_file() -> Result<()> {
let doc = Document::builder()
.title("First File Test")
.creator("Author")
.add_paragraph("Content")
.build()?;
let bytes = doc.to_bytes()?;
let cursor = Cursor::new(bytes);
let mut archive = zip::ZipArchive::new(cursor).unwrap();
let first_file = archive.by_index(0).unwrap();
assert_eq!(
first_file.name(),
"manifest.json",
"manifest.json must be the first file in the archive"
);
Ok(())
}
}
mod asset_embedding_tests {
use cdx_core::archive::{CdxReader, CdxWriter, CompressionMethod};
use cdx_core::asset::{verify_asset_hash, ImageAsset, ImageFormat, ImageIndex};
use cdx_core::{ContentRef, DocumentId, HashAlgorithm, Hasher, Manifest, Metadata, Result};
const CONTENT_PATH: &str = "content/document.json";
const DUBLIN_CORE_PATH: &str = "metadata/dublin-core.json";
const ASSET_PATH: &str = "assets/images/logo.png";
const INDEX_PATH: &str = "assets/images/index.json";
fn create_test_manifest() -> Manifest {
let content = ContentRef {
path: CONTENT_PATH.to_string(),
hash: DocumentId::pending(),
compression: None,
merkle_root: None,
block_count: None,
};
let metadata = Metadata {
dublin_core: DUBLIN_CORE_PATH.to_string(),
custom: None,
};
Manifest::new(content, metadata)
}
#[test]
fn test_asset_index_hash_matches_file() -> Result<()> {
let asset_data = b"fake PNG image data for testing";
let hash = Hasher::hash(HashAlgorithm::Sha256, asset_data);
assert!(verify_asset_hash(ASSET_PATH, asset_data, &hash, HashAlgorithm::Sha256).is_ok());
let mut writer = CdxWriter::in_memory();
let manifest = create_test_manifest();
writer.write_manifest(&manifest)?;
writer.write_file(
CONTENT_PATH,
br#"{"version":"0.1","blocks":[]}"#,
CompressionMethod::Deflate,
)?;
writer.write_file(
DUBLIN_CORE_PATH,
br#"{"title":"Test"}"#,
CompressionMethod::Deflate,
)?;
writer.write_file(ASSET_PATH, asset_data, CompressionMethod::Stored)?;
let bytes = writer.finish()?.into_inner();
let mut reader = CdxReader::from_bytes(bytes)?;
let read_data = reader.read_file_verified(ASSET_PATH, &hash)?;
assert_eq!(read_data, asset_data);
Ok(())
}
#[test]
fn test_asset_missing_file_error() -> Result<()> {
let mut writer = CdxWriter::in_memory();
let manifest = create_test_manifest();
writer.write_manifest(&manifest)?;
writer.write_file(
CONTENT_PATH,
br#"{"version":"0.1","blocks":[]}"#,
CompressionMethod::Deflate,
)?;
writer.write_file(
DUBLIN_CORE_PATH,
br#"{"title":"Test"}"#,
CompressionMethod::Deflate,
)?;
let hash = Hasher::hash(HashAlgorithm::Sha256, b"nonexistent data");
let image = ImageAsset::new("logo", ImageFormat::Png)
.with_hash(hash)
.with_size(100);
let mut index: ImageIndex = Default::default();
index.add(image, 100);
let index_json = serde_json::to_vec_pretty(&index)?;
writer.write_file(INDEX_PATH, &index_json, CompressionMethod::Deflate)?;
let bytes = writer.finish()?.into_inner();
let mut reader = CdxReader::from_bytes(bytes)?;
let result = reader.read_file(ASSET_PATH);
assert!(result.is_err(), "Reading a missing asset file should error");
Ok(())
}
#[test]
fn test_asset_hash_mismatch_error() -> Result<()> {
let asset_data = b"actual asset content";
let wrong_hash = Hasher::hash(HashAlgorithm::Sha256, b"different content");
let result = verify_asset_hash(ASSET_PATH, asset_data, &wrong_hash, HashAlgorithm::Sha256);
assert!(result.is_err(), "Hash mismatch should produce error");
let mut writer = CdxWriter::in_memory();
let manifest = create_test_manifest();
writer.write_manifest(&manifest)?;
writer.write_file(
CONTENT_PATH,
br#"{"version":"0.1","blocks":[]}"#,
CompressionMethod::Deflate,
)?;
writer.write_file(
DUBLIN_CORE_PATH,
br#"{"title":"Test"}"#,
CompressionMethod::Deflate,
)?;
writer.write_file(ASSET_PATH, asset_data, CompressionMethod::Stored)?;
let bytes = writer.finish()?.into_inner();
let mut reader = CdxReader::from_bytes(bytes)?;
let result = reader.read_file_verified(ASSET_PATH, &wrong_hash);
assert!(
result.is_err(),
"read_file_verified should fail on hash mismatch"
);
Ok(())
}
#[test]
fn test_asset_hashes_included_in_document_id() -> Result<()> {
use cdx_core::content::Block;
use cdx_core::Document;
let doc1 = Document::builder()
.title("Asset ID Test")
.creator("Author")
.add_paragraph("Text before image")
.add_block(Block::image("assets/images/photo_v1.png", "Photo"))
.build()?;
let doc2 = Document::builder()
.title("Asset ID Test")
.creator("Author")
.add_paragraph("Text before image")
.add_block(Block::image("assets/images/photo_v2.png", "Photo"))
.build()?;
let id1 = doc1.compute_id()?;
let id2 = doc2.compute_id()?;
assert_ne!(
id1, id2,
"Different asset references in content should produce different document IDs"
);
let doc3 = Document::builder()
.title("Asset ID Test")
.creator("Author")
.add_paragraph("Text before image")
.add_block(Block::image("assets/images/photo_v1.png", "Photo"))
.build()?;
let id3 = doc3.compute_id()?;
assert_eq!(
id1, id3,
"Same asset references should produce same document ID"
);
Ok(())
}
}
#[cfg(test)]
mod proptest_tests {
use super::*;
use proptest::prelude::*;
proptest! {
#[test]
fn proptest_hash_determinism_random_content(
title in "[a-zA-Z ]{1,50}",
creator in "[a-zA-Z ]{1,30}",
content in "[a-zA-Z0-9 .,!?]{1,200}"
) {
let doc1 = Document::builder()
.title(&title)
.creator(&creator)
.add_paragraph(&content)
.build()
.unwrap();
let doc2 = Document::builder()
.title(&title)
.creator(&creator)
.add_paragraph(&content)
.build()
.unwrap();
let id1 = doc1.compute_id().unwrap();
let id2 = doc2.compute_id().unwrap();
prop_assert_eq!(id1, id2, "Same content should produce same hash");
}
#[test]
fn proptest_content_serialization_roundtrip(
title in "[a-zA-Z ]{1,50}",
para1 in "[a-zA-Z0-9 .,]{1,100}",
para2 in "[a-zA-Z0-9 .,]{1,100}"
) {
let doc = Document::builder()
.title(&title)
.creator("Author")
.add_paragraph(¶1)
.add_paragraph(¶2)
.build()
.unwrap();
let bytes = doc.to_bytes().unwrap();
let loaded = Document::from_bytes(bytes).unwrap();
prop_assert_eq!(doc.title(), loaded.title());
prop_assert_eq!(doc.content().blocks.len(), loaded.content().blocks.len());
}
#[test]
fn proptest_hash_boundary_metadata_inclusion(
title1 in "[a-zA-Z ]{1,50}",
title2 in "[a-zA-Z ]{1,50}",
creator1 in "[a-zA-Z ]{1,30}",
creator2 in "[a-zA-Z ]{1,30}",
) {
prop_assume!(title1 != title2 || creator1 != creator2);
let doc1 = Document::builder()
.title(&title1)
.creator(&creator1)
.add_paragraph("Fixed content")
.build()
.unwrap();
let doc2 = Document::builder()
.title(&title2)
.creator(&creator2)
.add_paragraph("Fixed content")
.build()
.unwrap();
let id1 = doc1.compute_id().unwrap();
let id2 = doc2.compute_id().unwrap();
prop_assert_ne!(
id1, id2,
"Different identity metadata should produce different hashes"
);
}
#[test]
fn proptest_block_structure_constraints(
text in "[a-zA-Z0-9 .,!?]{1,100}",
level in 1u8..=6u8,
lang in "(rust|python|javascript|go|java)"
) {
use cdx_core::content::Block;
let blocks = vec![
Block::paragraph(vec![]),
Block::heading(level, vec![]),
Block::code_block(text, Some(lang)),
Block::horizontal_rule(),
Block::blockquote(vec![]),
];
for block in &blocks {
let json = serde_json::to_value(block).unwrap();
prop_assert!(
json.get("type").is_some(),
"Block {:?} must serialize with a 'type' field",
block
);
let json_str = serde_json::to_string(block).unwrap();
let deserialized: Block = serde_json::from_str(&json_str).unwrap();
let re_serialized = serde_json::to_string(&deserialized).unwrap();
prop_assert_eq!(
json_str, re_serialized,
"Block round-trip should be stable"
);
}
}
}
}