use std::collections::{BTreeMap, HashMap, HashSet};
use crate::model::{
CompletenessDeclaration, ComponentType, CreatorType, CryptoAssetType, CryptoMaterialState,
CryptoPrimitive, EolStatus, ExternalRefType, HashAlgorithm, NormalizedSbom, StalenessLevel,
};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CompletenessMetrics {
pub components_with_version: f32,
pub components_with_purl: f32,
pub components_with_cpe: f32,
pub components_with_supplier: f32,
pub components_with_hashes: f32,
pub components_with_licenses: f32,
pub components_with_description: f32,
pub has_creator_info: bool,
pub has_timestamp: bool,
pub has_serial_number: bool,
pub total_components: usize,
}
impl CompletenessMetrics {
#[must_use]
pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
let total = sbom.components.len();
if total == 0 {
return Self::empty();
}
let mut with_version = 0;
let mut with_purl = 0;
let mut with_cpe = 0;
let mut with_supplier = 0;
let mut with_hashes = 0;
let mut with_licenses = 0;
let mut with_description = 0;
for comp in sbom.components.values() {
if comp.version.is_some() {
with_version += 1;
}
if comp.identifiers.purl.is_some() {
with_purl += 1;
}
if !comp.identifiers.cpe.is_empty() {
with_cpe += 1;
}
if comp.supplier.is_some() {
with_supplier += 1;
}
if !comp.hashes.is_empty() {
with_hashes += 1;
}
if !comp.licenses.declared.is_empty() || comp.licenses.concluded.is_some() {
with_licenses += 1;
}
if comp.description.is_some() {
with_description += 1;
}
}
let pct = |count: usize| (count as f32 / total as f32) * 100.0;
Self {
components_with_version: pct(with_version),
components_with_purl: pct(with_purl),
components_with_cpe: pct(with_cpe),
components_with_supplier: pct(with_supplier),
components_with_hashes: pct(with_hashes),
components_with_licenses: pct(with_licenses),
components_with_description: pct(with_description),
has_creator_info: !sbom.document.creators.is_empty(),
has_timestamp: true, has_serial_number: sbom.document.serial_number.is_some(),
total_components: total,
}
}
#[must_use]
pub const fn empty() -> Self {
Self {
components_with_version: 0.0,
components_with_purl: 0.0,
components_with_cpe: 0.0,
components_with_supplier: 0.0,
components_with_hashes: 0.0,
components_with_licenses: 0.0,
components_with_description: 0.0,
has_creator_info: false,
has_timestamp: false,
has_serial_number: false,
total_components: 0,
}
}
#[must_use]
pub fn overall_score(&self, weights: &CompletenessWeights) -> f32 {
let mut score = 0.0;
let mut total_weight = 0.0;
score += self.components_with_version * weights.version;
total_weight += weights.version * 100.0;
score += self.components_with_purl * weights.purl;
total_weight += weights.purl * 100.0;
score += self.components_with_cpe * weights.cpe;
total_weight += weights.cpe * 100.0;
score += self.components_with_supplier * weights.supplier;
total_weight += weights.supplier * 100.0;
score += self.components_with_hashes * weights.hashes;
total_weight += weights.hashes * 100.0;
score += self.components_with_licenses * weights.licenses;
total_weight += weights.licenses * 100.0;
if self.has_creator_info {
score += 100.0 * weights.creator_info;
}
total_weight += weights.creator_info * 100.0;
if self.has_serial_number {
score += 100.0 * weights.serial_number;
}
total_weight += weights.serial_number * 100.0;
if total_weight > 0.0 {
(score / total_weight) * 100.0
} else {
0.0
}
}
}
#[derive(Debug, Clone)]
pub struct CompletenessWeights {
pub version: f32,
pub purl: f32,
pub cpe: f32,
pub supplier: f32,
pub hashes: f32,
pub licenses: f32,
pub creator_info: f32,
pub serial_number: f32,
}
impl Default for CompletenessWeights {
fn default() -> Self {
Self {
version: 1.0,
purl: 1.5, cpe: 0.5, supplier: 1.0,
hashes: 1.0,
licenses: 1.2, creator_info: 0.3,
serial_number: 0.2,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HashQualityMetrics {
pub components_with_any_hash: usize,
pub components_with_strong_hash: usize,
pub components_with_weak_only: usize,
pub algorithm_distribution: BTreeMap<String, usize>,
pub total_hashes: usize,
}
impl HashQualityMetrics {
#[must_use]
pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
let mut with_any = 0;
let mut with_strong = 0;
let mut with_weak_only = 0;
let mut distribution: BTreeMap<String, usize> = BTreeMap::new();
let mut total_hashes = 0;
for comp in sbom.components.values() {
if comp.hashes.is_empty() {
continue;
}
with_any += 1;
total_hashes += comp.hashes.len();
let mut has_strong = false;
let mut has_weak = false;
for hash in &comp.hashes {
let label = hash_algorithm_label(&hash.algorithm);
*distribution.entry(label).or_insert(0) += 1;
if is_strong_hash(&hash.algorithm) {
has_strong = true;
} else {
has_weak = true;
}
}
if has_strong {
with_strong += 1;
} else if has_weak {
with_weak_only += 1;
}
}
Self {
components_with_any_hash: with_any,
components_with_strong_hash: with_strong,
components_with_weak_only: with_weak_only,
algorithm_distribution: distribution,
total_hashes,
}
}
#[must_use]
pub fn quality_score(&self, total_components: usize) -> f32 {
if total_components == 0 {
return 0.0;
}
let any_coverage = self.components_with_any_hash as f32 / total_components as f32;
let strong_coverage = self.components_with_strong_hash as f32 / total_components as f32;
let weak_only_ratio = self.components_with_weak_only as f32 / total_components as f32;
let base = any_coverage * 60.0;
let strong_bonus = strong_coverage * 40.0;
let weak_penalty = weak_only_ratio * 10.0;
(base + strong_bonus - weak_penalty).clamp(0.0, 100.0)
}
}
fn is_strong_hash(algo: &HashAlgorithm) -> bool {
matches!(
algo,
HashAlgorithm::Sha256
| HashAlgorithm::Sha384
| HashAlgorithm::Sha512
| HashAlgorithm::Sha3_256
| HashAlgorithm::Sha3_384
| HashAlgorithm::Sha3_512
| HashAlgorithm::Blake2b256
| HashAlgorithm::Blake2b384
| HashAlgorithm::Blake2b512
| HashAlgorithm::Blake3
| HashAlgorithm::Streebog256
| HashAlgorithm::Streebog512
)
}
fn hash_algorithm_label(algo: &HashAlgorithm) -> String {
match algo {
HashAlgorithm::Md5 => "MD5".to_string(),
HashAlgorithm::Sha1 => "SHA-1".to_string(),
HashAlgorithm::Sha256 => "SHA-256".to_string(),
HashAlgorithm::Sha384 => "SHA-384".to_string(),
HashAlgorithm::Sha512 => "SHA-512".to_string(),
HashAlgorithm::Sha3_256 => "SHA3-256".to_string(),
HashAlgorithm::Sha3_384 => "SHA3-384".to_string(),
HashAlgorithm::Sha3_512 => "SHA3-512".to_string(),
HashAlgorithm::Blake2b256 => "BLAKE2b-256".to_string(),
HashAlgorithm::Blake2b384 => "BLAKE2b-384".to_string(),
HashAlgorithm::Blake2b512 => "BLAKE2b-512".to_string(),
HashAlgorithm::Blake3 => "BLAKE3".to_string(),
HashAlgorithm::Streebog256 => "Streebog-256".to_string(),
HashAlgorithm::Streebog512 => "Streebog-512".to_string(),
HashAlgorithm::Other(s) => s.clone(),
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct IdentifierMetrics {
pub valid_purls: usize,
pub invalid_purls: usize,
pub valid_cpes: usize,
pub invalid_cpes: usize,
pub with_swid: usize,
pub ecosystems: Vec<String>,
pub missing_all_identifiers: usize,
}
impl IdentifierMetrics {
#[must_use]
pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
let mut valid_purls = 0;
let mut invalid_purls = 0;
let mut valid_cpes = 0;
let mut invalid_cpes = 0;
let mut with_swid = 0;
let mut missing_all = 0;
let mut ecosystems = std::collections::HashSet::new();
for comp in sbom.components.values() {
let has_purl = comp.identifiers.purl.is_some();
let has_cpe = !comp.identifiers.cpe.is_empty();
let has_swid = comp.identifiers.swid.is_some();
if let Some(ref purl) = comp.identifiers.purl {
if is_valid_purl(purl) {
valid_purls += 1;
if let Some(eco) = extract_ecosystem_from_purl(purl) {
ecosystems.insert(eco);
}
} else {
invalid_purls += 1;
}
}
for cpe in &comp.identifiers.cpe {
if is_valid_cpe(cpe) {
valid_cpes += 1;
} else {
invalid_cpes += 1;
}
}
if has_swid {
with_swid += 1;
}
if !has_purl && !has_cpe && !has_swid {
missing_all += 1;
}
}
let mut ecosystem_list: Vec<String> = ecosystems.into_iter().collect();
ecosystem_list.sort();
Self {
valid_purls,
invalid_purls,
valid_cpes,
invalid_cpes,
with_swid,
ecosystems: ecosystem_list,
missing_all_identifiers: missing_all,
}
}
#[must_use]
pub fn quality_score(&self, total_components: usize) -> f32 {
if total_components == 0 {
return 0.0;
}
let with_valid_id = self.valid_purls + self.valid_cpes + self.with_swid;
let coverage =
(with_valid_id.min(total_components) as f32 / total_components as f32) * 100.0;
let invalid_count = self.invalid_purls + self.invalid_cpes;
let penalty = (invalid_count as f32 / total_components as f32) * 20.0;
(coverage - penalty).clamp(0.0, 100.0)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LicenseMetrics {
pub with_declared: usize,
pub with_concluded: usize,
pub valid_spdx_expressions: usize,
pub non_standard_licenses: usize,
pub noassertion_count: usize,
pub deprecated_licenses: usize,
pub restrictive_licenses: usize,
pub copyleft_license_ids: Vec<String>,
pub unique_licenses: Vec<String>,
}
impl LicenseMetrics {
#[must_use]
pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
let mut with_declared = 0;
let mut with_concluded = 0;
let mut valid_spdx = 0;
let mut non_standard = 0;
let mut noassertion = 0;
let mut deprecated = 0;
let mut restrictive = 0;
let mut licenses = HashSet::new();
let mut copyleft_ids = HashSet::new();
for comp in sbom.components.values() {
if !comp.licenses.declared.is_empty() {
with_declared += 1;
for lic in &comp.licenses.declared {
let expr = &lic.expression;
licenses.insert(expr.clone());
if expr == "NOASSERTION" {
noassertion += 1;
} else if is_valid_spdx_license(expr) {
valid_spdx += 1;
} else {
non_standard += 1;
}
if is_deprecated_spdx_license(expr) {
deprecated += 1;
}
if is_restrictive_license(expr) {
restrictive += 1;
copyleft_ids.insert(expr.clone());
}
}
}
if comp.licenses.concluded.is_some() {
with_concluded += 1;
}
}
let mut license_list: Vec<String> = licenses.into_iter().collect();
license_list.sort();
let mut copyleft_list: Vec<String> = copyleft_ids.into_iter().collect();
copyleft_list.sort();
Self {
with_declared,
with_concluded,
valid_spdx_expressions: valid_spdx,
non_standard_licenses: non_standard,
noassertion_count: noassertion,
deprecated_licenses: deprecated,
restrictive_licenses: restrictive,
copyleft_license_ids: copyleft_list,
unique_licenses: license_list,
}
}
#[must_use]
pub fn quality_score(&self, total_components: usize) -> f32 {
if total_components == 0 {
return 0.0;
}
let coverage = (self.with_declared as f32 / total_components as f32) * 60.0;
let spdx_ratio = if self.with_declared > 0 {
self.valid_spdx_expressions as f32 / self.with_declared as f32
} else {
0.0
};
let spdx_bonus = spdx_ratio * 30.0;
let noassertion_penalty =
(self.noassertion_count as f32 / total_components.max(1) as f32) * 10.0;
let deprecated_penalty = (self.deprecated_licenses as f32 * 2.0).min(10.0);
(coverage + spdx_bonus - noassertion_penalty - deprecated_penalty).clamp(0.0, 100.0)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VulnerabilityMetrics {
pub components_with_vulns: usize,
pub total_vulnerabilities: usize,
pub with_cvss: usize,
pub with_cwe: usize,
pub with_remediation: usize,
pub with_vex_status: usize,
}
impl VulnerabilityMetrics {
#[must_use]
pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
let mut components_with_vulns = 0;
let mut total_vulns = 0;
let mut with_cvss = 0;
let mut with_cwe = 0;
let mut with_remediation = 0;
let mut with_vex = 0;
for comp in sbom.components.values() {
if !comp.vulnerabilities.is_empty() {
components_with_vulns += 1;
}
for vuln in &comp.vulnerabilities {
total_vulns += 1;
if !vuln.cvss.is_empty() {
with_cvss += 1;
}
if !vuln.cwes.is_empty() {
with_cwe += 1;
}
if vuln.remediation.is_some() {
with_remediation += 1;
}
}
if comp.vex_status.is_some()
|| comp.vulnerabilities.iter().any(|v| v.vex_status.is_some())
{
with_vex += 1;
}
}
Self {
components_with_vulns,
total_vulnerabilities: total_vulns,
with_cvss,
with_cwe,
with_remediation,
with_vex_status: with_vex,
}
}
#[must_use]
pub fn documentation_score(&self) -> Option<f32> {
if self.total_vulnerabilities == 0 {
return None; }
let cvss_ratio = self.with_cvss as f32 / self.total_vulnerabilities as f32;
let cwe_ratio = self.with_cwe as f32 / self.total_vulnerabilities as f32;
let remediation_ratio = self.with_remediation as f32 / self.total_vulnerabilities as f32;
Some(
remediation_ratio
.mul_add(30.0, cvss_ratio.mul_add(40.0, cwe_ratio * 30.0))
.min(100.0),
)
}
}
const MAX_EDGES_FOR_GRAPH_ANALYSIS: usize = 50_000;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[non_exhaustive]
pub enum ComplexityLevel {
Low,
Moderate,
High,
VeryHigh,
}
impl ComplexityLevel {
#[must_use]
pub const fn from_score(simplicity: f32) -> Self {
match simplicity as u32 {
75..=100 => Self::Low,
50..=74 => Self::Moderate,
25..=49 => Self::High,
_ => Self::VeryHigh,
}
}
#[must_use]
pub const fn label(&self) -> &'static str {
match self {
Self::Low => "Low",
Self::Moderate => "Moderate",
Self::High => "High",
Self::VeryHigh => "Very High",
}
}
}
impl std::fmt::Display for ComplexityLevel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.label())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ComplexityFactors {
pub dependency_volume: f32,
pub normalized_depth: f32,
pub fanout_concentration: f32,
pub cycle_ratio: f32,
pub fragmentation: f32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DependencyMetrics {
pub total_dependencies: usize,
pub components_with_deps: usize,
pub max_depth: Option<usize>,
pub avg_depth: Option<f32>,
pub orphan_components: usize,
pub root_components: usize,
pub cycle_count: usize,
pub island_count: usize,
pub graph_analysis_skipped: bool,
pub max_out_degree: usize,
pub software_complexity_index: Option<f32>,
pub complexity_level: Option<ComplexityLevel>,
pub complexity_factors: Option<ComplexityFactors>,
}
impl DependencyMetrics {
#[must_use]
pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
use crate::model::CanonicalId;
let total_deps = sbom.edges.len();
let mut children: HashMap<&str, Vec<&str>> = HashMap::new();
let mut has_outgoing: HashSet<&str> = HashSet::new();
let mut has_incoming: HashSet<&str> = HashSet::new();
for edge in &sbom.edges {
children
.entry(edge.from.value())
.or_default()
.push(edge.to.value());
has_outgoing.insert(edge.from.value());
has_incoming.insert(edge.to.value());
}
let all_ids: Vec<&str> = sbom.components.keys().map(CanonicalId::value).collect();
let orphans = all_ids
.iter()
.filter(|c| !has_outgoing.contains(*c) && !has_incoming.contains(*c))
.count();
let roots: Vec<&str> = has_outgoing
.iter()
.filter(|c| !has_incoming.contains(*c))
.copied()
.collect();
let root_count = roots.len();
let max_out_degree = children.values().map(Vec::len).max().unwrap_or(0);
if total_deps > MAX_EDGES_FOR_GRAPH_ANALYSIS {
return Self {
total_dependencies: total_deps,
components_with_deps: has_outgoing.len(),
max_depth: None,
avg_depth: None,
orphan_components: orphans,
root_components: root_count,
cycle_count: 0,
island_count: 0,
graph_analysis_skipped: true,
max_out_degree,
software_complexity_index: None,
complexity_level: None,
complexity_factors: None,
};
}
let (max_depth, avg_depth) = compute_depth(&roots, &children);
let cycle_count = detect_cycles(&all_ids, &children);
let island_count = count_islands(&all_ids, &sbom.edges);
let component_count = all_ids.len();
let (complexity_index, complexity_lvl, factors) = compute_complexity(
total_deps,
component_count,
max_depth.unwrap_or(0),
max_out_degree,
cycle_count,
orphans,
island_count,
);
Self {
total_dependencies: total_deps,
components_with_deps: has_outgoing.len(),
max_depth,
avg_depth,
orphan_components: orphans,
root_components: root_count,
cycle_count,
island_count,
graph_analysis_skipped: false,
max_out_degree,
software_complexity_index: Some(complexity_index),
complexity_level: Some(complexity_lvl),
complexity_factors: Some(factors),
}
}
#[must_use]
pub fn quality_score(&self, total_components: usize) -> f32 {
if total_components == 0 {
return 0.0;
}
let coverage = if total_components > 1 {
(self.components_with_deps as f32 / (total_components - 1) as f32) * 100.0
} else {
100.0 };
let orphan_ratio = self.orphan_components as f32 / total_components as f32;
let orphan_penalty = orphan_ratio * 10.0;
let cycle_penalty = (self.cycle_count as f32 * 5.0).min(20.0);
let island_penalty = if total_components > 5 && self.island_count > 3 {
((self.island_count - 3) as f32 * 3.0).min(15.0)
} else {
0.0
};
(coverage - orphan_penalty - cycle_penalty - island_penalty).clamp(0.0, 100.0)
}
}
fn compute_depth(
roots: &[&str],
children: &HashMap<&str, Vec<&str>>,
) -> (Option<usize>, Option<f32>) {
use std::collections::VecDeque;
if roots.is_empty() {
return (None, None);
}
let mut visited: HashSet<&str> = HashSet::new();
let mut queue: VecDeque<(&str, usize)> = VecDeque::new();
let mut max_d: usize = 0;
let mut total_depth: usize = 0;
let mut count: usize = 0;
for &root in roots {
if visited.insert(root) {
queue.push_back((root, 0));
}
}
while let Some((node, depth)) = queue.pop_front() {
max_d = max_d.max(depth);
total_depth += depth;
count += 1;
if let Some(kids) = children.get(node) {
for &kid in kids {
if visited.insert(kid) {
queue.push_back((kid, depth + 1));
}
}
}
}
let avg = if count > 0 {
Some(total_depth as f32 / count as f32)
} else {
None
};
(Some(max_d), avg)
}
fn detect_cycles(all_nodes: &[&str], children: &HashMap<&str, Vec<&str>>) -> usize {
const WHITE: u8 = 0;
const GRAY: u8 = 1;
const BLACK: u8 = 2;
let mut color: HashMap<&str, u8> = HashMap::with_capacity(all_nodes.len());
for &node in all_nodes {
color.insert(node, WHITE);
}
let mut cycles = 0;
fn dfs<'a>(
node: &'a str,
children: &HashMap<&str, Vec<&'a str>>,
color: &mut HashMap<&'a str, u8>,
cycles: &mut usize,
) {
color.insert(node, GRAY);
if let Some(kids) = children.get(node) {
for &kid in kids {
match color.get(kid).copied().unwrap_or(WHITE) {
GRAY => *cycles += 1, WHITE => dfs(kid, children, color, cycles),
_ => {}
}
}
}
color.insert(node, BLACK);
}
for &node in all_nodes {
if color.get(node).copied().unwrap_or(WHITE) == WHITE {
dfs(node, children, &mut color, &mut cycles);
}
}
cycles
}
fn count_islands(all_nodes: &[&str], edges: &[crate::model::DependencyEdge]) -> usize {
if all_nodes.is_empty() {
return 0;
}
let node_idx: HashMap<&str, usize> =
all_nodes.iter().enumerate().map(|(i, &n)| (n, i)).collect();
let mut parent: Vec<usize> = (0..all_nodes.len()).collect();
let mut rank: Vec<u8> = vec![0; all_nodes.len()];
fn find(parent: &mut Vec<usize>, x: usize) -> usize {
if parent[x] != x {
parent[x] = find(parent, parent[x]); }
parent[x]
}
fn union(parent: &mut Vec<usize>, rank: &mut [u8], a: usize, b: usize) {
let ra = find(parent, a);
let rb = find(parent, b);
if ra != rb {
if rank[ra] < rank[rb] {
parent[ra] = rb;
} else if rank[ra] > rank[rb] {
parent[rb] = ra;
} else {
parent[rb] = ra;
rank[ra] += 1;
}
}
}
for edge in edges {
if let (Some(&a), Some(&b)) = (
node_idx.get(edge.from.value()),
node_idx.get(edge.to.value()),
) {
union(&mut parent, &mut rank, a, b);
}
}
let mut roots = HashSet::new();
for i in 0..all_nodes.len() {
roots.insert(find(&mut parent, i));
}
roots.len()
}
fn compute_complexity(
edges: usize,
components: usize,
max_depth: usize,
max_out_degree: usize,
cycle_count: usize,
_orphans: usize,
islands: usize,
) -> (f32, ComplexityLevel, ComplexityFactors) {
if components == 0 {
let factors = ComplexityFactors {
dependency_volume: 0.0,
normalized_depth: 0.0,
fanout_concentration: 0.0,
cycle_ratio: 0.0,
fragmentation: 0.0,
};
return (100.0, ComplexityLevel::Low, factors);
}
let edge_ratio = edges as f64 / components as f64;
let dependency_volume = ((1.0 + edge_ratio).ln() / 20.0_f64.ln()).min(1.0) as f32;
let normalized_depth = (max_depth as f32 / 15.0).min(1.0);
let fanout_denom = (components as f32 * 0.25).max(4.0);
let fanout_concentration = (max_out_degree as f32 / fanout_denom).min(1.0);
let cycle_threshold = (components as f32 * 0.05).max(1.0);
let cycle_ratio = (cycle_count as f32 / cycle_threshold).min(1.0);
let extra_islands = islands.saturating_sub(1);
let fragmentation = if components > 1 {
(extra_islands as f32 / (components - 1) as f32).min(1.0)
} else {
0.0
};
let factors = ComplexityFactors {
dependency_volume,
normalized_depth,
fanout_concentration,
cycle_ratio,
fragmentation,
};
let raw_complexity = 0.30 * dependency_volume
+ 0.20 * normalized_depth
+ 0.20 * fanout_concentration
+ 0.20 * cycle_ratio
+ 0.10 * fragmentation;
let simplicity_index = (100.0 - raw_complexity * 100.0).clamp(0.0, 100.0);
let level = ComplexityLevel::from_score(simplicity_index);
(simplicity_index, level, factors)
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProvenanceMetrics {
pub has_tool_creator: bool,
pub has_tool_version: bool,
pub has_org_creator: bool,
pub has_contact_email: bool,
pub has_serial_number: bool,
pub has_document_name: bool,
pub timestamp_age_days: u32,
pub is_fresh: bool,
pub has_primary_component: bool,
pub lifecycle_phase: Option<String>,
pub completeness_declaration: CompletenessDeclaration,
pub has_signature: bool,
pub has_citations: bool,
pub citations_count: usize,
}
const FRESHNESS_THRESHOLD_DAYS: u32 = 90;
impl ProvenanceMetrics {
#[must_use]
pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
let doc = &sbom.document;
let has_tool_creator = doc
.creators
.iter()
.any(|c| c.creator_type == CreatorType::Tool);
let has_tool_version = doc.creators.iter().any(|c| {
c.creator_type == CreatorType::Tool
&& (c.name.contains(' ') || c.name.contains('/') || c.name.contains('@'))
});
let has_org_creator = doc
.creators
.iter()
.any(|c| c.creator_type == CreatorType::Organization);
let has_contact_email = doc.creators.iter().any(|c| c.email.is_some());
let age_days = (chrono::Utc::now() - doc.created).num_days().max(0) as u32;
Self {
has_tool_creator,
has_tool_version,
has_org_creator,
has_contact_email,
has_serial_number: doc.serial_number.is_some(),
has_document_name: doc.name.is_some(),
timestamp_age_days: age_days,
is_fresh: age_days < FRESHNESS_THRESHOLD_DAYS,
has_primary_component: sbom.primary_component_id.is_some(),
lifecycle_phase: doc.lifecycle_phase.clone(),
completeness_declaration: doc.completeness_declaration.clone(),
has_signature: doc.signature.is_some(),
has_citations: doc.citations_count > 0,
citations_count: doc.citations_count,
}
}
#[must_use]
pub fn quality_score(&self, is_cyclonedx: bool) -> f32 {
let mut score = 0.0;
let mut total_weight = 0.0;
let completeness_declared =
self.completeness_declaration != CompletenessDeclaration::Unknown;
let checks: &[(bool, f32)] = &[
(self.has_tool_creator, 15.0),
(self.has_tool_version, 5.0),
(self.has_org_creator, 12.0),
(self.has_contact_email, 8.0),
(self.has_serial_number, 8.0),
(self.has_document_name, 5.0),
(self.is_fresh, 12.0),
(self.has_primary_component, 12.0),
(completeness_declared, 8.0),
(self.has_signature, 5.0),
];
for &(present, weight) in checks {
if present {
score += weight;
}
total_weight += weight;
}
if is_cyclonedx {
let weight = 10.0;
if self.lifecycle_phase.is_some() {
score += weight;
}
total_weight += weight;
let citations_weight = 5.0;
if self.has_citations {
score += citations_weight;
}
total_weight += citations_weight;
}
if total_weight > 0.0 {
(score / total_weight) * 100.0
} else {
0.0
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AuditabilityMetrics {
pub components_with_vcs: usize,
pub components_with_website: usize,
pub components_with_advisories: usize,
pub components_with_any_external_ref: usize,
pub has_security_contact: bool,
pub has_vuln_disclosure_url: bool,
}
impl AuditabilityMetrics {
#[must_use]
pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
let mut with_vcs = 0;
let mut with_website = 0;
let mut with_advisories = 0;
let mut with_any = 0;
for comp in sbom.components.values() {
if comp.external_refs.is_empty() {
continue;
}
with_any += 1;
let has_vcs = comp
.external_refs
.iter()
.any(|r| r.ref_type == ExternalRefType::Vcs);
let has_website = comp
.external_refs
.iter()
.any(|r| r.ref_type == ExternalRefType::Website);
let has_advisories = comp
.external_refs
.iter()
.any(|r| r.ref_type == ExternalRefType::Advisories);
if has_vcs {
with_vcs += 1;
}
if has_website {
with_website += 1;
}
if has_advisories {
with_advisories += 1;
}
}
Self {
components_with_vcs: with_vcs,
components_with_website: with_website,
components_with_advisories: with_advisories,
components_with_any_external_ref: with_any,
has_security_contact: sbom.document.security_contact.is_some(),
has_vuln_disclosure_url: sbom.document.vulnerability_disclosure_url.is_some(),
}
}
#[must_use]
pub fn quality_score(&self, total_components: usize) -> f32 {
if total_components == 0 {
return 0.0;
}
let ref_coverage =
(self.components_with_any_external_ref as f32 / total_components as f32) * 40.0;
let vcs_coverage = (self.components_with_vcs as f32 / total_components as f32) * 20.0;
let security_contact_score = if self.has_security_contact { 20.0 } else { 0.0 };
let disclosure_score = if self.has_vuln_disclosure_url {
20.0
} else {
0.0
};
(ref_coverage + vcs_coverage + security_contact_score + disclosure_score).min(100.0)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LifecycleMetrics {
pub eol_components: usize,
pub stale_components: usize,
pub deprecated_components: usize,
pub archived_components: usize,
pub outdated_components: usize,
pub enriched_components: usize,
pub enrichment_coverage: f32,
}
impl LifecycleMetrics {
#[must_use]
pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
let total = sbom.components.len();
let mut eol = 0;
let mut stale = 0;
let mut deprecated = 0;
let mut archived = 0;
let mut outdated = 0;
let mut enriched = 0;
for comp in sbom.components.values() {
let has_lifecycle_data = comp.eol.is_some() || comp.staleness.is_some();
if has_lifecycle_data {
enriched += 1;
}
if let Some(ref eol_info) = comp.eol
&& eol_info.status == EolStatus::EndOfLife
{
eol += 1;
}
if let Some(ref stale_info) = comp.staleness {
match stale_info.level {
StalenessLevel::Stale | StalenessLevel::Abandoned => stale += 1,
StalenessLevel::Deprecated => deprecated += 1,
StalenessLevel::Archived => archived += 1,
_ => {}
}
if stale_info.is_deprecated {
deprecated += 1;
}
if stale_info.is_archived {
archived += 1;
}
if stale_info.latest_version.is_some() {
outdated += 1;
}
}
}
let coverage = if total > 0 {
(enriched as f32 / total as f32) * 100.0
} else {
0.0
};
Self {
eol_components: eol,
stale_components: stale,
deprecated_components: deprecated,
archived_components: archived,
outdated_components: outdated,
enriched_components: enriched,
enrichment_coverage: coverage,
}
}
#[must_use]
pub fn has_data(&self) -> bool {
self.enriched_components > 0
}
#[must_use]
pub fn quality_score(&self) -> Option<f32> {
if !self.has_data() {
return None;
}
let mut score = 100.0_f32;
score -= (self.eol_components as f32 * 15.0).min(60.0);
score -= (self.stale_components as f32 * 5.0).min(30.0);
score -= ((self.deprecated_components + self.archived_components) as f32 * 3.0).min(20.0);
score -= (self.outdated_components as f32 * 1.0).min(10.0);
Some(score.clamp(0.0, 100.0))
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct CryptographyMetrics {
pub total_crypto_components: usize,
pub algorithms_count: usize,
pub certificates_count: usize,
pub keys_count: usize,
pub protocols_count: usize,
pub quantum_safe_count: usize,
pub quantum_vulnerable_count: usize,
pub weak_algorithm_count: usize,
pub hybrid_pqc_count: usize,
pub expired_certificates: usize,
pub expiring_soon_certificates: usize,
pub compromised_keys: usize,
pub inadequate_key_sizes: usize,
pub weak_algorithm_names: Vec<String>,
pub algorithms_with_oid: usize,
pub algorithms_with_family: usize,
pub algorithms_with_primitive: usize,
pub algorithms_with_security_level: usize,
pub certs_with_signature_algo_ref: usize,
pub keys_with_algorithm_ref: usize,
pub protocols_with_cipher_suites: usize,
pub keys_with_state: usize,
pub keys_with_protection: usize,
pub keys_with_lifecycle_dates: usize,
pub certs_with_validity_dates: usize,
}
impl CryptographyMetrics {
#[must_use]
pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
let mut m = Self::default();
for comp in sbom.components.values() {
if comp.component_type != ComponentType::Cryptographic {
continue;
}
m.total_crypto_components += 1;
let Some(cp) = &comp.crypto_properties else {
continue;
};
match cp.asset_type {
CryptoAssetType::Algorithm => {
m.algorithms_count += 1;
if cp.oid.is_some() {
m.algorithms_with_oid += 1;
}
if let Some(algo) = &cp.algorithm_properties {
if algo.algorithm_family.is_some() {
m.algorithms_with_family += 1;
}
if !matches!(algo.primitive, CryptoPrimitive::Other(_)) {
m.algorithms_with_primitive += 1;
}
if algo.classical_security_level.is_some()
|| algo.nist_quantum_security_level.is_some()
{
m.algorithms_with_security_level += 1;
}
if algo.is_quantum_safe() {
m.quantum_safe_count += 1;
} else if algo.nist_quantum_security_level == Some(0) {
m.quantum_vulnerable_count += 1;
}
if algo.is_weak_by_name(&comp.name) {
m.weak_algorithm_count += 1;
m.weak_algorithm_names.push(comp.name.clone());
}
if algo.is_hybrid_pqc() {
m.hybrid_pqc_count += 1;
}
}
}
CryptoAssetType::Certificate => {
m.certificates_count += 1;
if let Some(cert) = &cp.certificate_properties {
if cert.not_valid_before.is_some() && cert.not_valid_after.is_some() {
m.certs_with_validity_dates += 1;
}
if cert.signature_algorithm_ref.is_some() {
m.certs_with_signature_algo_ref += 1;
}
if cert.is_expired() {
m.expired_certificates += 1;
} else if cert.is_expiring_soon(90) {
m.expiring_soon_certificates += 1;
}
}
}
CryptoAssetType::RelatedCryptoMaterial => {
m.keys_count += 1;
if let Some(mat) = &cp.related_crypto_material_properties {
if mat.state.is_some() {
m.keys_with_state += 1;
}
if mat.secured_by.is_some() {
m.keys_with_protection += 1;
}
if mat.creation_date.is_some() || mat.activation_date.is_some() {
m.keys_with_lifecycle_dates += 1;
}
if mat.algorithm_ref.is_some() {
m.keys_with_algorithm_ref += 1;
}
if mat.state == Some(CryptoMaterialState::Compromised) {
m.compromised_keys += 1;
}
if let Some(size) = mat.size {
let is_symmetric = matches!(
mat.material_type,
crate::model::CryptoMaterialType::SymmetricKey
| crate::model::CryptoMaterialType::SecretKey
);
if (is_symmetric && size < 128) || (!is_symmetric && size < 2048) {
m.inadequate_key_sizes += 1;
}
}
}
}
CryptoAssetType::Protocol => {
m.protocols_count += 1;
if let Some(proto) = &cp.protocol_properties
&& !proto.cipher_suites.is_empty()
{
m.protocols_with_cipher_suites += 1;
}
}
_ => {}
}
}
m
}
#[must_use]
pub fn has_data(&self) -> bool {
self.total_crypto_components > 0
}
#[must_use]
pub fn quantum_readiness_score(&self) -> f32 {
if self.algorithms_count == 0 {
return 100.0;
}
(self.quantum_safe_count as f32 / self.algorithms_count as f32) * 100.0
}
#[must_use]
pub fn quality_score(&self) -> Option<f32> {
if !self.has_data() {
return None;
}
let mut score = 100.0_f32;
score -= (self.weak_algorithm_count as f32 * 15.0).min(50.0);
score -= (self.quantum_vulnerable_count as f32 * 8.0).min(40.0);
score -= (self.expired_certificates as f32 * 10.0).min(30.0);
score -= (self.compromised_keys as f32 * 20.0).min(40.0);
score -= (self.inadequate_key_sizes as f32 * 5.0).min(20.0);
score -= (self.expiring_soon_certificates as f32 * 3.0).min(15.0);
score += (self.hybrid_pqc_count as f32 * 2.0).min(10.0);
Some(score.clamp(0.0, 100.0))
}
#[must_use]
pub fn crypto_completeness_score(&self) -> f32 {
if self.algorithms_count == 0 {
return 100.0;
}
let family_pct = self.algorithms_with_family as f32 / self.algorithms_count as f32;
let primitive_pct = self.algorithms_with_primitive as f32 / self.algorithms_count as f32;
let level_pct = self.algorithms_with_security_level as f32 / self.algorithms_count as f32;
(family_pct * 40.0 + primitive_pct * 30.0 + level_pct * 30.0).clamp(0.0, 100.0)
}
#[must_use]
pub fn crypto_identifier_score(&self) -> f32 {
if self.algorithms_count == 0 {
return 100.0;
}
let oid_pct = self.algorithms_with_oid as f32 / self.algorithms_count as f32;
(oid_pct * 100.0).clamp(0.0, 100.0)
}
#[must_use]
pub fn algorithm_strength_score(&self) -> f32 {
if self.algorithms_count == 0 {
return 100.0;
}
let mut score = 100.0_f32;
score -= (self.weak_algorithm_count as f32 * 15.0).min(60.0);
score -= (self.inadequate_key_sizes as f32 * 8.0).min(30.0);
if self.algorithms_count > 0 {
let vuln_pct = self.quantum_vulnerable_count as f32 / self.algorithms_count as f32;
score -= vuln_pct * 30.0;
}
score.clamp(0.0, 100.0)
}
#[must_use]
pub fn crypto_dependency_score(&self) -> f32 {
let linkable = self.certificates_count + self.keys_count + self.protocols_count;
if linkable == 0 {
return 100.0;
}
let resolved = self.certs_with_signature_algo_ref
+ self.keys_with_algorithm_ref
+ self.protocols_with_cipher_suites;
let pct = resolved as f32 / linkable as f32;
(pct * 100.0).clamp(0.0, 100.0)
}
#[must_use]
pub fn crypto_lifecycle_score(&self) -> f32 {
let mut score = 100.0_f32;
if self.keys_count > 0 {
let state_pct = self.keys_with_state as f32 / self.keys_count as f32;
let protection_pct = self.keys_with_protection as f32 / self.keys_count as f32;
let lifecycle_pct = self.keys_with_lifecycle_dates as f32 / self.keys_count as f32;
let key_completeness =
(state_pct * 0.4 + protection_pct * 0.3 + lifecycle_pct * 0.3) * 100.0;
score = score * 0.5 + key_completeness * 0.5;
score -= (self.compromised_keys as f32 * 20.0).min(40.0);
score -= (self.inadequate_key_sizes as f32 * 5.0).min(20.0);
}
if self.certificates_count > 0 {
let validity_pct =
self.certs_with_validity_dates as f32 / self.certificates_count as f32;
score -= (1.0 - validity_pct) * 15.0;
score -= (self.expired_certificates as f32 * 15.0).min(45.0);
score -= (self.expiring_soon_certificates as f32 * 5.0).min(20.0);
}
score.clamp(0.0, 100.0)
}
#[must_use]
pub fn pqc_readiness_score(&self) -> f32 {
if self.algorithms_count == 0 {
return 100.0;
}
let mut score = 0.0_f32;
let qs_pct = self.quantum_safe_count as f32 / self.algorithms_count as f32;
score += qs_pct * 60.0;
if self.hybrid_pqc_count > 0 {
score += 15.0;
}
if self.weak_algorithm_count == 0 {
score += 25.0;
} else {
score += (25.0 - self.weak_algorithm_count as f32 * 5.0).max(0.0);
}
score.clamp(0.0, 100.0)
}
#[must_use]
pub fn quantum_readiness_pct(&self) -> f32 {
if self.algorithms_count == 0 {
return 0.0;
}
(self.quantum_safe_count as f32 / self.algorithms_count as f32) * 100.0
}
#[must_use]
pub const fn cbom_category_labels() -> [&'static str; 8] {
["Crpt", "OIDs", "Algo", "Refs", "Life", "PQC", "Prov", "Lic"]
}
}
fn is_valid_purl(purl: &str) -> bool {
purl.starts_with("pkg:") && purl.contains('/')
}
fn extract_ecosystem_from_purl(purl: &str) -> Option<String> {
if let Some(rest) = purl.strip_prefix("pkg:")
&& let Some(slash_idx) = rest.find('/')
{
return Some(rest[..slash_idx].to_string());
}
None
}
fn is_valid_cpe(cpe: &str) -> bool {
cpe.starts_with("cpe:2.3:") || cpe.starts_with("cpe:/")
}
fn is_valid_spdx_license(expr: &str) -> bool {
const COMMON_SPDX: &[&str] = &[
"MIT",
"Apache-2.0",
"GPL-2.0",
"GPL-3.0",
"BSD-2-Clause",
"BSD-3-Clause",
"ISC",
"MPL-2.0",
"LGPL-2.1",
"LGPL-3.0",
"AGPL-3.0",
"Unlicense",
"CC0-1.0",
"0BSD",
"EPL-2.0",
"CDDL-1.0",
"Artistic-2.0",
"GPL-2.0-only",
"GPL-2.0-or-later",
"GPL-3.0-only",
"GPL-3.0-or-later",
"LGPL-2.1-only",
"LGPL-2.1-or-later",
"LGPL-3.0-only",
"LGPL-3.0-or-later",
];
let trimmed = expr.trim();
COMMON_SPDX.contains(&trimmed)
|| trimmed.contains(" AND ")
|| trimmed.contains(" OR ")
|| trimmed.contains(" WITH ")
}
fn is_deprecated_spdx_license(expr: &str) -> bool {
const DEPRECATED: &[&str] = &[
"GPL-2.0",
"GPL-2.0+",
"GPL-3.0",
"GPL-3.0+",
"LGPL-2.0",
"LGPL-2.0+",
"LGPL-2.1",
"LGPL-2.1+",
"LGPL-3.0",
"LGPL-3.0+",
"AGPL-1.0",
"AGPL-3.0",
"GFDL-1.1",
"GFDL-1.2",
"GFDL-1.3",
"BSD-2-Clause-FreeBSD",
"BSD-2-Clause-NetBSD",
"eCos-2.0",
"Nunit",
"StandardML-NJ",
"wxWindows",
];
let trimmed = expr.trim();
DEPRECATED.contains(&trimmed)
}
fn is_restrictive_license(expr: &str) -> bool {
let trimmed = expr.trim().to_uppercase();
trimmed.starts_with("GPL")
|| trimmed.starts_with("LGPL")
|| trimmed.starts_with("AGPL")
|| trimmed.starts_with("EUPL")
|| trimmed.starts_with("SSPL")
|| trimmed.starts_with("OSL")
|| trimmed.starts_with("CPAL")
|| trimmed.starts_with("CC-BY-SA")
|| trimmed.starts_with("CC-BY-NC")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_purl_validation() {
assert!(is_valid_purl("pkg:npm/@scope/name@1.0.0"));
assert!(is_valid_purl("pkg:maven/group/artifact@1.0"));
assert!(!is_valid_purl("npm:something"));
assert!(!is_valid_purl("invalid"));
}
#[test]
fn test_cpe_validation() {
assert!(is_valid_cpe("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*"));
assert!(is_valid_cpe("cpe:/a:vendor:product:1.0"));
assert!(!is_valid_cpe("something:else"));
}
#[test]
fn test_spdx_license_validation() {
assert!(is_valid_spdx_license("MIT"));
assert!(is_valid_spdx_license("Apache-2.0"));
assert!(is_valid_spdx_license("MIT AND Apache-2.0"));
assert!(is_valid_spdx_license("GPL-2.0 OR MIT"));
}
#[test]
fn test_strong_hash_classification() {
assert!(is_strong_hash(&HashAlgorithm::Sha256));
assert!(is_strong_hash(&HashAlgorithm::Sha3_256));
assert!(is_strong_hash(&HashAlgorithm::Blake3));
assert!(!is_strong_hash(&HashAlgorithm::Md5));
assert!(!is_strong_hash(&HashAlgorithm::Sha1));
assert!(!is_strong_hash(&HashAlgorithm::Other("custom".to_string())));
}
#[test]
fn test_deprecated_license_detection() {
assert!(is_deprecated_spdx_license("GPL-2.0"));
assert!(is_deprecated_spdx_license("LGPL-2.1"));
assert!(is_deprecated_spdx_license("AGPL-3.0"));
assert!(!is_deprecated_spdx_license("GPL-2.0-only"));
assert!(!is_deprecated_spdx_license("MIT"));
assert!(!is_deprecated_spdx_license("Apache-2.0"));
}
#[test]
fn test_restrictive_license_detection() {
assert!(is_restrictive_license("GPL-3.0-only"));
assert!(is_restrictive_license("LGPL-2.1-or-later"));
assert!(is_restrictive_license("AGPL-3.0-only"));
assert!(is_restrictive_license("EUPL-1.2"));
assert!(is_restrictive_license("CC-BY-SA-4.0"));
assert!(!is_restrictive_license("MIT"));
assert!(!is_restrictive_license("Apache-2.0"));
assert!(!is_restrictive_license("BSD-3-Clause"));
}
#[test]
fn test_hash_quality_score_no_components() {
let metrics = HashQualityMetrics {
components_with_any_hash: 0,
components_with_strong_hash: 0,
components_with_weak_only: 0,
algorithm_distribution: BTreeMap::new(),
total_hashes: 0,
};
assert_eq!(metrics.quality_score(0), 0.0);
}
#[test]
fn test_hash_quality_score_all_strong() {
let metrics = HashQualityMetrics {
components_with_any_hash: 10,
components_with_strong_hash: 10,
components_with_weak_only: 0,
algorithm_distribution: BTreeMap::new(),
total_hashes: 10,
};
assert_eq!(metrics.quality_score(10), 100.0);
}
#[test]
fn test_hash_quality_score_weak_only_penalty() {
let metrics = HashQualityMetrics {
components_with_any_hash: 10,
components_with_strong_hash: 0,
components_with_weak_only: 10,
algorithm_distribution: BTreeMap::new(),
total_hashes: 10,
};
assert_eq!(metrics.quality_score(10), 50.0);
}
#[test]
fn test_lifecycle_no_enrichment_returns_none() {
let metrics = LifecycleMetrics {
eol_components: 0,
stale_components: 0,
deprecated_components: 0,
archived_components: 0,
outdated_components: 0,
enriched_components: 0,
enrichment_coverage: 0.0,
};
assert!(!metrics.has_data());
assert!(metrics.quality_score().is_none());
}
#[test]
fn test_lifecycle_with_eol_penalty() {
let metrics = LifecycleMetrics {
eol_components: 2,
stale_components: 0,
deprecated_components: 0,
archived_components: 0,
outdated_components: 0,
enriched_components: 10,
enrichment_coverage: 100.0,
};
assert_eq!(metrics.quality_score(), Some(70.0));
}
#[test]
fn test_cycle_detection_no_cycles() {
let children: HashMap<&str, Vec<&str>> =
HashMap::from([("a", vec!["b"]), ("b", vec!["c"])]);
let all_nodes = vec!["a", "b", "c"];
assert_eq!(detect_cycles(&all_nodes, &children), 0);
}
#[test]
fn test_cycle_detection_with_cycle() {
let children: HashMap<&str, Vec<&str>> =
HashMap::from([("a", vec!["b"]), ("b", vec!["c"]), ("c", vec!["a"])]);
let all_nodes = vec!["a", "b", "c"];
assert_eq!(detect_cycles(&all_nodes, &children), 1);
}
#[test]
fn test_depth_computation() {
let children: HashMap<&str, Vec<&str>> =
HashMap::from([("root", vec!["a", "b"]), ("a", vec!["c"])]);
let roots = vec!["root"];
let (max_d, avg_d) = compute_depth(&roots, &children);
assert_eq!(max_d, Some(2)); assert!(avg_d.is_some());
}
#[test]
fn test_depth_empty_roots() {
let children: HashMap<&str, Vec<&str>> = HashMap::new();
let roots: Vec<&str> = vec![];
let (max_d, avg_d) = compute_depth(&roots, &children);
assert_eq!(max_d, None);
assert_eq!(avg_d, None);
}
#[test]
fn test_provenance_quality_score() {
let metrics = ProvenanceMetrics {
has_tool_creator: true,
has_tool_version: true,
has_org_creator: true,
has_contact_email: true,
has_serial_number: true,
has_document_name: true,
timestamp_age_days: 10,
is_fresh: true,
has_primary_component: true,
lifecycle_phase: Some("build".to_string()),
completeness_declaration: CompletenessDeclaration::Complete,
has_signature: true,
has_citations: true,
citations_count: 3,
};
assert_eq!(metrics.quality_score(true), 100.0);
}
#[test]
fn test_provenance_score_without_cyclonedx() {
let metrics = ProvenanceMetrics {
has_tool_creator: true,
has_tool_version: true,
has_org_creator: true,
has_contact_email: true,
has_serial_number: true,
has_document_name: true,
timestamp_age_days: 10,
is_fresh: true,
has_primary_component: true,
lifecycle_phase: None,
completeness_declaration: CompletenessDeclaration::Complete,
has_signature: true,
has_citations: false,
citations_count: 0,
};
assert_eq!(metrics.quality_score(false), 100.0);
}
#[test]
fn test_complexity_empty_graph() {
let (simplicity, level, factors) = compute_complexity(0, 0, 0, 0, 0, 0, 0);
assert_eq!(simplicity, 100.0);
assert_eq!(level, ComplexityLevel::Low);
assert_eq!(factors.dependency_volume, 0.0);
}
#[test]
fn test_complexity_single_node() {
let (simplicity, level, _) = compute_complexity(0, 1, 0, 0, 0, 1, 1);
assert!(
simplicity >= 80.0,
"Single node simplicity {simplicity} should be >= 80"
);
assert_eq!(level, ComplexityLevel::Low);
}
#[test]
fn test_complexity_monotonic_edges() {
let (s1, _, _) = compute_complexity(5, 10, 2, 3, 0, 1, 1);
let (s2, _, _) = compute_complexity(20, 10, 2, 3, 0, 1, 1);
assert!(
s2 <= s1,
"More edges should not increase simplicity: {s2} vs {s1}"
);
}
#[test]
fn test_complexity_monotonic_cycles() {
let (s1, _, _) = compute_complexity(10, 10, 2, 3, 0, 1, 1);
let (s2, _, _) = compute_complexity(10, 10, 2, 3, 3, 1, 1);
assert!(
s2 <= s1,
"More cycles should not increase simplicity: {s2} vs {s1}"
);
}
#[test]
fn test_complexity_monotonic_depth() {
let (s1, _, _) = compute_complexity(10, 10, 2, 3, 0, 1, 1);
let (s2, _, _) = compute_complexity(10, 10, 10, 3, 0, 1, 1);
assert!(
s2 <= s1,
"More depth should not increase simplicity: {s2} vs {s1}"
);
}
#[test]
fn test_complexity_graph_skipped() {
let (simplicity, _, _) = compute_complexity(100, 50, 5, 10, 2, 5, 3);
assert!(simplicity >= 0.0 && simplicity <= 100.0);
}
#[test]
fn test_complexity_level_bands() {
assert_eq!(ComplexityLevel::from_score(100.0), ComplexityLevel::Low);
assert_eq!(ComplexityLevel::from_score(75.0), ComplexityLevel::Low);
assert_eq!(ComplexityLevel::from_score(74.0), ComplexityLevel::Moderate);
assert_eq!(ComplexityLevel::from_score(50.0), ComplexityLevel::Moderate);
assert_eq!(ComplexityLevel::from_score(49.0), ComplexityLevel::High);
assert_eq!(ComplexityLevel::from_score(25.0), ComplexityLevel::High);
assert_eq!(ComplexityLevel::from_score(24.0), ComplexityLevel::VeryHigh);
assert_eq!(ComplexityLevel::from_score(0.0), ComplexityLevel::VeryHigh);
}
#[test]
fn test_completeness_declaration_display() {
assert_eq!(CompletenessDeclaration::Complete.to_string(), "complete");
assert_eq!(
CompletenessDeclaration::IncompleteFirstPartyOnly.to_string(),
"incomplete (first-party only)"
);
assert_eq!(CompletenessDeclaration::Unknown.to_string(), "unknown");
}
#[test]
fn crypto_completeness_all_documented() {
let m = CryptographyMetrics {
algorithms_count: 4,
algorithms_with_family: 4,
algorithms_with_primitive: 4,
algorithms_with_security_level: 4,
..Default::default()
};
let score = m.crypto_completeness_score();
assert!(
(score - 100.0).abs() < 0.1,
"fully documented → 100, got {score}"
);
}
#[test]
fn crypto_completeness_partial() {
let m = CryptographyMetrics {
algorithms_count: 4,
algorithms_with_family: 2, algorithms_with_primitive: 4, algorithms_with_security_level: 0, ..Default::default()
};
let score = m.crypto_completeness_score();
assert!((score - 50.0).abs() < 0.1, "partial → 50, got {score}");
}
#[test]
fn crypto_identifier_full_oid_coverage() {
let m = CryptographyMetrics {
algorithms_count: 5,
algorithms_with_oid: 5,
..Default::default()
};
assert!((m.crypto_identifier_score() - 100.0).abs() < 0.1);
}
#[test]
fn crypto_identifier_no_oids() {
let m = CryptographyMetrics {
algorithms_count: 5,
algorithms_with_oid: 0,
..Default::default()
};
assert!((m.crypto_identifier_score() - 0.0).abs() < 0.1);
}
#[test]
fn algorithm_strength_weak_penalty() {
let m = CryptographyMetrics {
algorithms_count: 5,
weak_algorithm_count: 2,
..Default::default()
};
let score = m.algorithm_strength_score();
assert!((score - 70.0).abs() < 0.1, "2 weak → 70, got {score}");
}
#[test]
fn algorithm_strength_quantum_vulnerable() {
let m = CryptographyMetrics {
algorithms_count: 10,
quantum_vulnerable_count: 10,
..Default::default()
};
let score = m.algorithm_strength_score();
assert!(
(score - 70.0).abs() < 0.1,
"all quantum vuln → 70, got {score}"
);
}
#[test]
fn crypto_lifecycle_compromised_keys() {
let m = CryptographyMetrics {
keys_count: 3,
keys_with_state: 3,
keys_with_protection: 3,
keys_with_lifecycle_dates: 3,
compromised_keys: 1,
..Default::default()
};
let score = m.crypto_lifecycle_score();
assert!(score < 85.0);
assert!(score > 50.0);
}
#[test]
fn crypto_lifecycle_expired_certs() {
let m = CryptographyMetrics {
certificates_count: 4,
certs_with_validity_dates: 4,
expired_certificates: 2,
expiring_soon_certificates: 1,
..Default::default()
};
let score = m.crypto_lifecycle_score();
assert!(score < 70.0);
}
#[test]
fn pqc_readiness_all_quantum_safe() {
let m = CryptographyMetrics {
algorithms_count: 5,
quantum_safe_count: 5,
hybrid_pqc_count: 2,
weak_algorithm_count: 0,
..Default::default()
};
let score = m.pqc_readiness_score();
assert!(
(score - 100.0).abs() < 0.1,
"all safe + hybrid → 100, got {score}"
);
}
#[test]
fn pqc_readiness_no_quantum_safe() {
let m = CryptographyMetrics {
algorithms_count: 5,
quantum_safe_count: 0,
hybrid_pqc_count: 0,
weak_algorithm_count: 0,
..Default::default()
};
let score = m.pqc_readiness_score();
assert!(
(score - 25.0).abs() < 0.1,
"no safe, no weak → 25, got {score}"
);
}
#[test]
fn crypto_dependency_all_resolved() {
let m = CryptographyMetrics {
certificates_count: 2,
keys_count: 3,
protocols_count: 1,
certs_with_signature_algo_ref: 2,
keys_with_algorithm_ref: 3,
protocols_with_cipher_suites: 1,
..Default::default()
};
assert!((m.crypto_dependency_score() - 100.0).abs() < 0.1);
}
#[test]
fn crypto_dependency_none_resolved() {
let m = CryptographyMetrics {
certificates_count: 2,
keys_count: 3,
protocols_count: 1,
..Default::default()
};
assert!((m.crypto_dependency_score() - 0.0).abs() < 0.1);
}
#[test]
fn quality_score_none_when_no_crypto() {
let m = CryptographyMetrics::default();
assert!(m.quality_score().is_none());
}
#[test]
fn quantum_readiness_pct_zero_algorithms() {
let m = CryptographyMetrics::default();
assert!((m.quantum_readiness_pct() - 0.0).abs() < 0.01);
}
}