1use std::collections::{BTreeMap, HashMap, HashSet};
6
7use crate::model::{
8 CompletenessDeclaration, ComponentType, CreatorType, CryptoAssetType, CryptoMaterialState,
9 CryptoPrimitive, EolStatus, ExternalRefType, HashAlgorithm, NormalizedSbom, StalenessLevel,
10};
11use serde::{Deserialize, Serialize};
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
15pub struct CompletenessMetrics {
16 pub components_with_version: f32,
18 pub components_with_purl: f32,
20 pub components_with_cpe: f32,
22 pub components_with_supplier: f32,
24 pub components_with_hashes: f32,
26 pub components_with_licenses: f32,
28 pub components_with_description: f32,
30 pub has_creator_info: bool,
32 pub has_timestamp: bool,
34 pub has_serial_number: bool,
36 pub total_components: usize,
38}
39
40impl CompletenessMetrics {
41 #[must_use]
43 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
44 let total = sbom.components.len();
45 if total == 0 {
46 return Self::empty();
47 }
48
49 let mut with_version = 0;
50 let mut with_purl = 0;
51 let mut with_cpe = 0;
52 let mut with_supplier = 0;
53 let mut with_hashes = 0;
54 let mut with_licenses = 0;
55 let mut with_description = 0;
56
57 for comp in sbom.components.values() {
58 if comp.version.is_some() {
59 with_version += 1;
60 }
61 if comp.identifiers.purl.is_some() {
62 with_purl += 1;
63 }
64 if !comp.identifiers.cpe.is_empty() {
65 with_cpe += 1;
66 }
67 if comp.supplier.is_some() {
68 with_supplier += 1;
69 }
70 if !comp.hashes.is_empty() {
71 with_hashes += 1;
72 }
73 if !comp.licenses.declared.is_empty() || comp.licenses.concluded.is_some() {
74 with_licenses += 1;
75 }
76 if comp.description.is_some() {
77 with_description += 1;
78 }
79 }
80
81 let pct = |count: usize| (count as f32 / total as f32) * 100.0;
82
83 Self {
84 components_with_version: pct(with_version),
85 components_with_purl: pct(with_purl),
86 components_with_cpe: pct(with_cpe),
87 components_with_supplier: pct(with_supplier),
88 components_with_hashes: pct(with_hashes),
89 components_with_licenses: pct(with_licenses),
90 components_with_description: pct(with_description),
91 has_creator_info: !sbom.document.creators.is_empty(),
92 has_timestamp: true, has_serial_number: sbom.document.serial_number.is_some(),
94 total_components: total,
95 }
96 }
97
98 #[must_use]
100 pub const fn empty() -> Self {
101 Self {
102 components_with_version: 0.0,
103 components_with_purl: 0.0,
104 components_with_cpe: 0.0,
105 components_with_supplier: 0.0,
106 components_with_hashes: 0.0,
107 components_with_licenses: 0.0,
108 components_with_description: 0.0,
109 has_creator_info: false,
110 has_timestamp: false,
111 has_serial_number: false,
112 total_components: 0,
113 }
114 }
115
116 #[must_use]
118 pub fn overall_score(&self, weights: &CompletenessWeights) -> f32 {
119 let mut score = 0.0;
120 let mut total_weight = 0.0;
121
122 score += self.components_with_version * weights.version;
124 total_weight += weights.version * 100.0;
125
126 score += self.components_with_purl * weights.purl;
127 total_weight += weights.purl * 100.0;
128
129 score += self.components_with_cpe * weights.cpe;
130 total_weight += weights.cpe * 100.0;
131
132 score += self.components_with_supplier * weights.supplier;
133 total_weight += weights.supplier * 100.0;
134
135 score += self.components_with_hashes * weights.hashes;
136 total_weight += weights.hashes * 100.0;
137
138 score += self.components_with_licenses * weights.licenses;
139 total_weight += weights.licenses * 100.0;
140
141 if self.has_creator_info {
143 score += 100.0 * weights.creator_info;
144 }
145 total_weight += weights.creator_info * 100.0;
146
147 if self.has_serial_number {
148 score += 100.0 * weights.serial_number;
149 }
150 total_weight += weights.serial_number * 100.0;
151
152 if total_weight > 0.0 {
153 (score / total_weight) * 100.0
154 } else {
155 0.0
156 }
157 }
158}
159
160#[derive(Debug, Clone)]
162pub struct CompletenessWeights {
163 pub version: f32,
164 pub purl: f32,
165 pub cpe: f32,
166 pub supplier: f32,
167 pub hashes: f32,
168 pub licenses: f32,
169 pub creator_info: f32,
170 pub serial_number: f32,
171}
172
173impl Default for CompletenessWeights {
174 fn default() -> Self {
175 Self {
176 version: 1.0,
177 purl: 1.5, cpe: 0.5, supplier: 1.0,
180 hashes: 1.0,
181 licenses: 1.2, creator_info: 0.3,
183 serial_number: 0.2,
184 }
185 }
186}
187
188#[derive(Debug, Clone, Serialize, Deserialize)]
194pub struct HashQualityMetrics {
195 pub components_with_any_hash: usize,
197 pub components_with_strong_hash: usize,
199 pub components_with_weak_only: usize,
201 pub algorithm_distribution: BTreeMap<String, usize>,
203 pub total_hashes: usize,
205 pub vendor_components_total: usize,
210 pub vendor_components_with_hash: usize,
212 pub vendor_components_with_strong_hash: usize,
214}
215
216impl HashQualityMetrics {
217 #[must_use]
219 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
220 let mut with_any = 0;
221 let mut with_strong = 0;
222 let mut with_weak_only = 0;
223 let mut distribution: BTreeMap<String, usize> = BTreeMap::new();
224 let mut total_hashes = 0;
225 let mut vendor_total = 0;
226 let mut vendor_with_hash = 0;
227 let mut vendor_with_strong = 0;
228
229 for comp in sbom.components.values() {
230 let is_vendor = (comp.supplier.is_some() || comp.author.is_some())
232 && !matches!(
233 comp.canonical_id.source(),
234 crate::model::IdSource::Synthetic
235 | crate::model::IdSource::FormatSpecific
236 | crate::model::IdSource::NameVersion
237 );
238 if is_vendor {
239 vendor_total += 1;
240 }
241
242 if comp.hashes.is_empty() {
243 continue;
244 }
245 with_any += 1;
246 total_hashes += comp.hashes.len();
247
248 let mut has_strong = false;
249 let mut has_weak = false;
250
251 for hash in &comp.hashes {
252 let label = hash_algorithm_label(&hash.algorithm);
253 *distribution.entry(label).or_insert(0) += 1;
254
255 if is_strong_hash(&hash.algorithm) {
256 has_strong = true;
257 } else {
258 has_weak = true;
259 }
260 }
261
262 if has_strong {
263 with_strong += 1;
264 } else if has_weak {
265 with_weak_only += 1;
266 }
267
268 if is_vendor {
269 vendor_with_hash += 1;
270 if has_strong {
271 vendor_with_strong += 1;
272 }
273 }
274 }
275
276 Self {
277 components_with_any_hash: with_any,
278 components_with_strong_hash: with_strong,
279 components_with_weak_only: with_weak_only,
280 algorithm_distribution: distribution,
281 total_hashes,
282 vendor_components_total: vendor_total,
283 vendor_components_with_hash: vendor_with_hash,
284 vendor_components_with_strong_hash: vendor_with_strong,
285 }
286 }
287
288 #[must_use]
292 pub fn vendor_hash_coverage(&self) -> Option<f64> {
293 if self.vendor_components_total == 0 {
294 None
295 } else {
296 #[allow(clippy::cast_precision_loss)]
297 Some(self.vendor_components_with_hash as f64 / self.vendor_components_total as f64)
298 }
299 }
300
301 #[must_use]
303 pub fn vendor_strong_hash_coverage(&self) -> Option<f64> {
304 if self.vendor_components_total == 0 {
305 None
306 } else {
307 #[allow(clippy::cast_precision_loss)]
308 Some(
309 self.vendor_components_with_strong_hash as f64
310 / self.vendor_components_total as f64,
311 )
312 }
313 }
314
315 #[must_use]
320 pub fn quality_score(&self, total_components: usize) -> f32 {
321 if total_components == 0 {
322 return 0.0;
323 }
324
325 let any_coverage = self.components_with_any_hash as f32 / total_components as f32;
326 let strong_coverage = self.components_with_strong_hash as f32 / total_components as f32;
327 let weak_only_ratio = self.components_with_weak_only as f32 / total_components as f32;
328
329 let base = any_coverage * 60.0;
330 let strong_bonus = strong_coverage * 40.0;
331 let weak_penalty = weak_only_ratio * 10.0;
332
333 (base + strong_bonus - weak_penalty).clamp(0.0, 100.0)
334 }
335}
336
337fn is_strong_hash(algo: &HashAlgorithm) -> bool {
339 matches!(
340 algo,
341 HashAlgorithm::Sha256
342 | HashAlgorithm::Sha384
343 | HashAlgorithm::Sha512
344 | HashAlgorithm::Sha3_256
345 | HashAlgorithm::Sha3_384
346 | HashAlgorithm::Sha3_512
347 | HashAlgorithm::Blake2b256
348 | HashAlgorithm::Blake2b384
349 | HashAlgorithm::Blake2b512
350 | HashAlgorithm::Blake3
351 | HashAlgorithm::Streebog256
352 | HashAlgorithm::Streebog512
353 )
354}
355
356fn hash_algorithm_label(algo: &HashAlgorithm) -> String {
358 match algo {
359 HashAlgorithm::Md5 => "MD5".to_string(),
360 HashAlgorithm::Sha1 => "SHA-1".to_string(),
361 HashAlgorithm::Sha256 => "SHA-256".to_string(),
362 HashAlgorithm::Sha384 => "SHA-384".to_string(),
363 HashAlgorithm::Sha512 => "SHA-512".to_string(),
364 HashAlgorithm::Sha3_256 => "SHA3-256".to_string(),
365 HashAlgorithm::Sha3_384 => "SHA3-384".to_string(),
366 HashAlgorithm::Sha3_512 => "SHA3-512".to_string(),
367 HashAlgorithm::Blake2b256 => "BLAKE2b-256".to_string(),
368 HashAlgorithm::Blake2b384 => "BLAKE2b-384".to_string(),
369 HashAlgorithm::Blake2b512 => "BLAKE2b-512".to_string(),
370 HashAlgorithm::Blake3 => "BLAKE3".to_string(),
371 HashAlgorithm::Streebog256 => "Streebog-256".to_string(),
372 HashAlgorithm::Streebog512 => "Streebog-512".to_string(),
373 HashAlgorithm::Other(s) => s.clone(),
374 }
375}
376
377#[derive(Debug, Clone, Serialize, Deserialize)]
383pub struct IdentifierMetrics {
384 pub valid_purls: usize,
386 pub invalid_purls: usize,
388 pub valid_cpes: usize,
390 pub invalid_cpes: usize,
392 pub with_swid: usize,
394 pub ecosystems: Vec<String>,
396 pub missing_all_identifiers: usize,
398}
399
400impl IdentifierMetrics {
401 #[must_use]
403 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
404 let mut valid_purls = 0;
405 let mut invalid_purls = 0;
406 let mut valid_cpes = 0;
407 let mut invalid_cpes = 0;
408 let mut with_swid = 0;
409 let mut missing_all = 0;
410 let mut ecosystems = std::collections::HashSet::new();
411
412 for comp in sbom.components.values() {
413 let has_purl = comp.identifiers.purl.is_some();
414 let has_cpe = !comp.identifiers.cpe.is_empty();
415 let has_swid = comp.identifiers.swid.is_some();
416
417 if let Some(ref purl) = comp.identifiers.purl {
418 if is_valid_purl(purl) {
419 valid_purls += 1;
420 if let Some(eco) = extract_ecosystem_from_purl(purl) {
422 ecosystems.insert(eco);
423 }
424 } else {
425 invalid_purls += 1;
426 }
427 }
428
429 for cpe in &comp.identifiers.cpe {
430 if is_valid_cpe(cpe) {
431 valid_cpes += 1;
432 } else {
433 invalid_cpes += 1;
434 }
435 }
436
437 if has_swid {
438 with_swid += 1;
439 }
440
441 if !has_purl && !has_cpe && !has_swid {
442 missing_all += 1;
443 }
444 }
445
446 let mut ecosystem_list: Vec<String> = ecosystems.into_iter().collect();
447 ecosystem_list.sort();
448
449 Self {
450 valid_purls,
451 invalid_purls,
452 valid_cpes,
453 invalid_cpes,
454 with_swid,
455 ecosystems: ecosystem_list,
456 missing_all_identifiers: missing_all,
457 }
458 }
459
460 #[must_use]
462 pub fn quality_score(&self, total_components: usize) -> f32 {
463 if total_components == 0 {
464 return 0.0;
465 }
466
467 let with_valid_id = self.valid_purls + self.valid_cpes + self.with_swid;
468 let coverage =
469 (with_valid_id.min(total_components) as f32 / total_components as f32) * 100.0;
470
471 let invalid_count = self.invalid_purls + self.invalid_cpes;
473 let penalty = (invalid_count as f32 / total_components as f32) * 20.0;
474
475 (coverage - penalty).clamp(0.0, 100.0)
476 }
477}
478
479#[derive(Debug, Clone, Serialize, Deserialize)]
481pub struct LicenseMetrics {
482 pub with_declared: usize,
484 pub with_concluded: usize,
486 pub valid_spdx_expressions: usize,
488 pub non_standard_licenses: usize,
490 pub noassertion_count: usize,
492 pub deprecated_licenses: usize,
494 pub restrictive_licenses: usize,
496 pub copyleft_license_ids: Vec<String>,
498 pub unique_licenses: Vec<String>,
500}
501
502impl LicenseMetrics {
503 #[must_use]
505 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
506 let mut with_declared = 0;
507 let mut with_concluded = 0;
508 let mut valid_spdx = 0;
509 let mut non_standard = 0;
510 let mut noassertion = 0;
511 let mut deprecated = 0;
512 let mut restrictive = 0;
513 let mut licenses = HashSet::new();
514 let mut copyleft_ids = HashSet::new();
515
516 for comp in sbom.components.values() {
517 if !comp.licenses.declared.is_empty() {
518 with_declared += 1;
519 for lic in &comp.licenses.declared {
520 let expr = &lic.expression;
521 licenses.insert(expr.clone());
522
523 if expr == "NOASSERTION" {
524 noassertion += 1;
525 } else if is_valid_spdx_license(expr) {
526 valid_spdx += 1;
527 } else {
528 non_standard += 1;
529 }
530
531 if is_deprecated_spdx_license(expr) {
532 deprecated += 1;
533 }
534 if is_restrictive_license(expr) {
535 restrictive += 1;
536 copyleft_ids.insert(expr.clone());
537 }
538 }
539 }
540
541 if comp.licenses.concluded.is_some() {
542 with_concluded += 1;
543 }
544 }
545
546 let mut license_list: Vec<String> = licenses.into_iter().collect();
547 license_list.sort();
548
549 let mut copyleft_list: Vec<String> = copyleft_ids.into_iter().collect();
550 copyleft_list.sort();
551
552 Self {
553 with_declared,
554 with_concluded,
555 valid_spdx_expressions: valid_spdx,
556 non_standard_licenses: non_standard,
557 noassertion_count: noassertion,
558 deprecated_licenses: deprecated,
559 restrictive_licenses: restrictive,
560 copyleft_license_ids: copyleft_list,
561 unique_licenses: license_list,
562 }
563 }
564
565 #[must_use]
567 pub fn quality_score(&self, total_components: usize) -> f32 {
568 if total_components == 0 {
569 return 0.0;
570 }
571
572 let coverage = (self.with_declared as f32 / total_components as f32) * 60.0;
573
574 let spdx_ratio = if self.with_declared > 0 {
576 self.valid_spdx_expressions as f32 / self.with_declared as f32
577 } else {
578 0.0
579 };
580 let spdx_bonus = spdx_ratio * 30.0;
581
582 let noassertion_penalty =
584 (self.noassertion_count as f32 / total_components.max(1) as f32) * 10.0;
585
586 let deprecated_penalty = (self.deprecated_licenses as f32 * 2.0).min(10.0);
588
589 (coverage + spdx_bonus - noassertion_penalty - deprecated_penalty).clamp(0.0, 100.0)
590 }
591}
592
593#[derive(Debug, Clone, Serialize, Deserialize)]
595pub struct VulnerabilityMetrics {
596 pub components_with_vulns: usize,
598 pub total_vulnerabilities: usize,
600 pub with_cvss: usize,
602 pub with_cwe: usize,
604 pub with_remediation: usize,
606 pub with_vex_status: usize,
608}
609
610impl VulnerabilityMetrics {
611 #[must_use]
613 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
614 let mut components_with_vulns = 0;
615 let mut total_vulns = 0;
616 let mut with_cvss = 0;
617 let mut with_cwe = 0;
618 let mut with_remediation = 0;
619 let mut with_vex = 0;
620
621 for comp in sbom.components.values() {
622 if !comp.vulnerabilities.is_empty() {
623 components_with_vulns += 1;
624 }
625
626 for vuln in &comp.vulnerabilities {
627 total_vulns += 1;
628
629 if !vuln.cvss.is_empty() {
630 with_cvss += 1;
631 }
632 if !vuln.cwes.is_empty() {
633 with_cwe += 1;
634 }
635 if vuln.remediation.is_some() {
636 with_remediation += 1;
637 }
638 }
639
640 if comp.vex_status.is_some()
641 || comp.vulnerabilities.iter().any(|v| v.vex_status.is_some())
642 {
643 with_vex += 1;
644 }
645 }
646
647 Self {
648 components_with_vulns,
649 total_vulnerabilities: total_vulns,
650 with_cvss,
651 with_cwe,
652 with_remediation,
653 with_vex_status: with_vex,
654 }
655 }
656
657 #[must_use]
664 pub fn documentation_score(&self) -> Option<f32> {
665 if self.total_vulnerabilities == 0 {
666 return None; }
668
669 let cvss_ratio = self.with_cvss as f32 / self.total_vulnerabilities as f32;
670 let cwe_ratio = self.with_cwe as f32 / self.total_vulnerabilities as f32;
671 let remediation_ratio = self.with_remediation as f32 / self.total_vulnerabilities as f32;
672
673 Some(
674 remediation_ratio
675 .mul_add(30.0, cvss_ratio.mul_add(40.0, cwe_ratio * 30.0))
676 .min(100.0),
677 )
678 }
679}
680
681const MAX_EDGES_FOR_GRAPH_ANALYSIS: usize = 50_000;
687
688#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
694#[non_exhaustive]
695pub enum ComplexityLevel {
696 Low,
698 Moderate,
700 High,
702 VeryHigh,
704}
705
706impl ComplexityLevel {
707 #[must_use]
709 pub const fn from_score(simplicity: f32) -> Self {
710 match simplicity as u32 {
711 75..=100 => Self::Low,
712 50..=74 => Self::Moderate,
713 25..=49 => Self::High,
714 _ => Self::VeryHigh,
715 }
716 }
717
718 #[must_use]
720 pub const fn label(&self) -> &'static str {
721 match self {
722 Self::Low => "Low",
723 Self::Moderate => "Moderate",
724 Self::High => "High",
725 Self::VeryHigh => "Very High",
726 }
727 }
728}
729
730impl std::fmt::Display for ComplexityLevel {
731 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
732 f.write_str(self.label())
733 }
734}
735
736#[derive(Debug, Clone, Serialize, Deserialize)]
739pub struct ComplexityFactors {
740 pub dependency_volume: f32,
742 pub normalized_depth: f32,
744 pub fanout_concentration: f32,
746 pub cycle_ratio: f32,
748 pub fragmentation: f32,
750}
751
752#[derive(Debug, Clone, Serialize, Deserialize)]
754pub struct DependencyMetrics {
755 pub total_dependencies: usize,
757 pub components_with_deps: usize,
759 pub max_depth: Option<usize>,
761 pub avg_depth: Option<f32>,
763 pub orphan_components: usize,
765 pub root_components: usize,
767 pub cycle_count: usize,
769 pub island_count: usize,
771 pub graph_analysis_skipped: bool,
773 pub max_out_degree: usize,
775 pub software_complexity_index: Option<f32>,
777 pub complexity_level: Option<ComplexityLevel>,
779 pub complexity_factors: Option<ComplexityFactors>,
781}
782
783impl DependencyMetrics {
784 #[must_use]
786 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
787 use crate::model::CanonicalId;
788
789 let total_deps = sbom.edges.len();
790
791 let mut children: HashMap<&str, Vec<&str>> = HashMap::new();
793 let mut has_outgoing: HashSet<&str> = HashSet::new();
794 let mut has_incoming: HashSet<&str> = HashSet::new();
795
796 for edge in &sbom.edges {
797 children
798 .entry(edge.from.value())
799 .or_default()
800 .push(edge.to.value());
801 has_outgoing.insert(edge.from.value());
802 has_incoming.insert(edge.to.value());
803 }
804
805 let all_ids: Vec<&str> = sbom.components.keys().map(CanonicalId::value).collect();
806
807 let orphans = all_ids
808 .iter()
809 .filter(|c| !has_outgoing.contains(*c) && !has_incoming.contains(*c))
810 .count();
811
812 let roots: Vec<&str> = has_outgoing
813 .iter()
814 .filter(|c| !has_incoming.contains(*c))
815 .copied()
816 .collect();
817 let root_count = roots.len();
818
819 let max_out_degree = children.values().map(Vec::len).max().unwrap_or(0);
821
822 if total_deps > MAX_EDGES_FOR_GRAPH_ANALYSIS {
824 return Self {
825 total_dependencies: total_deps,
826 components_with_deps: has_outgoing.len(),
827 max_depth: None,
828 avg_depth: None,
829 orphan_components: orphans,
830 root_components: root_count,
831 cycle_count: 0,
832 island_count: 0,
833 graph_analysis_skipped: true,
834 max_out_degree,
835 software_complexity_index: None,
836 complexity_level: None,
837 complexity_factors: None,
838 };
839 }
840
841 let (max_depth, avg_depth) = compute_depth(&roots, &children);
843
844 let cycle_count = detect_cycles(&all_ids, &children);
846
847 let island_count = count_islands(&all_ids, &sbom.edges);
849
850 let component_count = all_ids.len();
852 let (complexity_index, complexity_lvl, factors) = compute_complexity(
853 total_deps,
854 component_count,
855 max_depth.unwrap_or(0),
856 max_out_degree,
857 cycle_count,
858 orphans,
859 island_count,
860 );
861
862 Self {
863 total_dependencies: total_deps,
864 components_with_deps: has_outgoing.len(),
865 max_depth,
866 avg_depth,
867 orphan_components: orphans,
868 root_components: root_count,
869 cycle_count,
870 island_count,
871 graph_analysis_skipped: false,
872 max_out_degree,
873 software_complexity_index: Some(complexity_index),
874 complexity_level: Some(complexity_lvl),
875 complexity_factors: Some(factors),
876 }
877 }
878
879 #[must_use]
881 pub fn quality_score(&self, total_components: usize) -> f32 {
882 if total_components == 0 {
883 return 0.0;
884 }
885
886 let coverage = if total_components > 1 {
888 (self.components_with_deps as f32 / (total_components - 1) as f32) * 100.0
889 } else {
890 100.0 };
892
893 let orphan_ratio = self.orphan_components as f32 / total_components as f32;
895 let orphan_penalty = orphan_ratio * 10.0;
896
897 let cycle_penalty = (self.cycle_count as f32 * 5.0).min(20.0);
899
900 let island_penalty = if total_components > 5 && self.island_count > 3 {
902 ((self.island_count - 3) as f32 * 3.0).min(15.0)
903 } else {
904 0.0
905 };
906
907 (coverage - orphan_penalty - cycle_penalty - island_penalty).clamp(0.0, 100.0)
908 }
909}
910
911fn compute_depth(
913 roots: &[&str],
914 children: &HashMap<&str, Vec<&str>>,
915) -> (Option<usize>, Option<f32>) {
916 use std::collections::VecDeque;
917
918 if roots.is_empty() {
919 return (None, None);
920 }
921
922 let mut visited: HashSet<&str> = HashSet::new();
923 let mut queue: VecDeque<(&str, usize)> = VecDeque::new();
924 let mut max_d: usize = 0;
925 let mut total_depth: usize = 0;
926 let mut count: usize = 0;
927
928 for &root in roots {
929 if visited.insert(root) {
930 queue.push_back((root, 0));
931 }
932 }
933
934 while let Some((node, depth)) = queue.pop_front() {
935 max_d = max_d.max(depth);
936 total_depth += depth;
937 count += 1;
938
939 if let Some(kids) = children.get(node) {
940 for &kid in kids {
941 if visited.insert(kid) {
942 queue.push_back((kid, depth + 1));
943 }
944 }
945 }
946 }
947
948 let avg = if count > 0 {
949 Some(total_depth as f32 / count as f32)
950 } else {
951 None
952 };
953
954 (Some(max_d), avg)
955}
956
957fn detect_cycles(all_nodes: &[&str], children: &HashMap<&str, Vec<&str>>) -> usize {
959 const WHITE: u8 = 0;
960 const GRAY: u8 = 1;
961 const BLACK: u8 = 2;
962
963 let mut color: HashMap<&str, u8> = HashMap::with_capacity(all_nodes.len());
964 for &node in all_nodes {
965 color.insert(node, WHITE);
966 }
967
968 let mut cycles = 0;
969
970 fn dfs<'a>(
971 node: &'a str,
972 children: &HashMap<&str, Vec<&'a str>>,
973 color: &mut HashMap<&'a str, u8>,
974 cycles: &mut usize,
975 ) {
976 color.insert(node, GRAY);
977
978 if let Some(kids) = children.get(node) {
979 for &kid in kids {
980 match color.get(kid).copied().unwrap_or(WHITE) {
981 GRAY => *cycles += 1, WHITE => dfs(kid, children, color, cycles),
983 _ => {}
984 }
985 }
986 }
987
988 color.insert(node, BLACK);
989 }
990
991 for &node in all_nodes {
992 if color.get(node).copied().unwrap_or(WHITE) == WHITE {
993 dfs(node, children, &mut color, &mut cycles);
994 }
995 }
996
997 cycles
998}
999
1000fn count_islands(all_nodes: &[&str], edges: &[crate::model::DependencyEdge]) -> usize {
1002 if all_nodes.is_empty() {
1003 return 0;
1004 }
1005
1006 let node_idx: HashMap<&str, usize> =
1008 all_nodes.iter().enumerate().map(|(i, &n)| (n, i)).collect();
1009
1010 let mut parent: Vec<usize> = (0..all_nodes.len()).collect();
1011 let mut rank: Vec<u8> = vec![0; all_nodes.len()];
1012
1013 fn find(parent: &mut Vec<usize>, x: usize) -> usize {
1014 if parent[x] != x {
1015 parent[x] = find(parent, parent[x]); }
1017 parent[x]
1018 }
1019
1020 fn union(parent: &mut Vec<usize>, rank: &mut [u8], a: usize, b: usize) {
1021 let ra = find(parent, a);
1022 let rb = find(parent, b);
1023 if ra != rb {
1024 if rank[ra] < rank[rb] {
1025 parent[ra] = rb;
1026 } else if rank[ra] > rank[rb] {
1027 parent[rb] = ra;
1028 } else {
1029 parent[rb] = ra;
1030 rank[ra] += 1;
1031 }
1032 }
1033 }
1034
1035 for edge in edges {
1036 if let (Some(&a), Some(&b)) = (
1037 node_idx.get(edge.from.value()),
1038 node_idx.get(edge.to.value()),
1039 ) {
1040 union(&mut parent, &mut rank, a, b);
1041 }
1042 }
1043
1044 let mut roots = HashSet::new();
1046 for i in 0..all_nodes.len() {
1047 roots.insert(find(&mut parent, i));
1048 }
1049
1050 roots.len()
1051}
1052
1053fn compute_complexity(
1058 edges: usize,
1059 components: usize,
1060 max_depth: usize,
1061 max_out_degree: usize,
1062 cycle_count: usize,
1063 _orphans: usize,
1064 islands: usize,
1065) -> (f32, ComplexityLevel, ComplexityFactors) {
1066 if components == 0 {
1067 let factors = ComplexityFactors {
1068 dependency_volume: 0.0,
1069 normalized_depth: 0.0,
1070 fanout_concentration: 0.0,
1071 cycle_ratio: 0.0,
1072 fragmentation: 0.0,
1073 };
1074 return (100.0, ComplexityLevel::Low, factors);
1075 }
1076
1077 let edge_ratio = edges as f64 / components as f64;
1079 let dependency_volume = ((1.0 + edge_ratio).ln() / 20.0_f64.ln()).min(1.0) as f32;
1080
1081 let normalized_depth = (max_depth as f32 / 15.0).min(1.0);
1083
1084 let fanout_denom = (components as f32 * 0.25).max(4.0);
1087 let fanout_concentration = (max_out_degree as f32 / fanout_denom).min(1.0);
1088
1089 let cycle_threshold = (components as f32 * 0.05).max(1.0);
1091 let cycle_ratio = (cycle_count as f32 / cycle_threshold).min(1.0);
1092
1093 let extra_islands = islands.saturating_sub(1);
1096 let fragmentation = if components > 1 {
1097 (extra_islands as f32 / (components - 1) as f32).min(1.0)
1098 } else {
1099 0.0
1100 };
1101
1102 let factors = ComplexityFactors {
1103 dependency_volume,
1104 normalized_depth,
1105 fanout_concentration,
1106 cycle_ratio,
1107 fragmentation,
1108 };
1109
1110 let raw_complexity = 0.30 * dependency_volume
1111 + 0.20 * normalized_depth
1112 + 0.20 * fanout_concentration
1113 + 0.20 * cycle_ratio
1114 + 0.10 * fragmentation;
1115
1116 let simplicity_index = (100.0 - raw_complexity * 100.0).clamp(0.0, 100.0);
1117 let level = ComplexityLevel::from_score(simplicity_index);
1118
1119 (simplicity_index, level, factors)
1120}
1121
1122#[derive(Debug, Clone, Serialize, Deserialize)]
1128pub struct ProvenanceMetrics {
1129 pub has_tool_creator: bool,
1131 pub has_tool_version: bool,
1133 pub has_org_creator: bool,
1135 pub has_contact_email: bool,
1137 pub has_serial_number: bool,
1139 pub has_document_name: bool,
1141 pub timestamp_age_days: u32,
1143 pub is_fresh: bool,
1145 pub has_primary_component: bool,
1147 pub lifecycle_phase: Option<String>,
1149 pub completeness_declaration: CompletenessDeclaration,
1151 pub has_signature: bool,
1153 pub has_citations: bool,
1155 pub citations_count: usize,
1157}
1158
1159const FRESHNESS_THRESHOLD_DAYS: u32 = 90;
1161
1162impl ProvenanceMetrics {
1163 #[must_use]
1165 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
1166 let doc = &sbom.document;
1167
1168 let has_tool_creator = doc
1169 .creators
1170 .iter()
1171 .any(|c| c.creator_type == CreatorType::Tool);
1172 let has_tool_version = doc.creators.iter().any(|c| {
1173 c.creator_type == CreatorType::Tool
1174 && (c.name.contains(' ') || c.name.contains('/') || c.name.contains('@'))
1175 });
1176 let has_org_creator = doc
1177 .creators
1178 .iter()
1179 .any(|c| c.creator_type == CreatorType::Organization);
1180 let has_contact_email = doc.creators.iter().any(|c| c.email.is_some());
1181
1182 let age_days = (chrono::Utc::now() - doc.created).num_days().max(0) as u32;
1183
1184 Self {
1185 has_tool_creator,
1186 has_tool_version,
1187 has_org_creator,
1188 has_contact_email,
1189 has_serial_number: doc.serial_number.is_some(),
1190 has_document_name: doc.name.is_some(),
1191 timestamp_age_days: age_days,
1192 is_fresh: age_days < FRESHNESS_THRESHOLD_DAYS,
1193 has_primary_component: sbom.primary_component_id.is_some(),
1194 lifecycle_phase: doc.lifecycle_phase.clone(),
1195 completeness_declaration: doc.completeness_declaration.clone(),
1196 has_signature: doc.signature.is_some(),
1197 has_citations: doc.citations_count > 0,
1198 citations_count: doc.citations_count,
1199 }
1200 }
1201
1202 #[must_use]
1209 pub fn quality_score(&self, is_cyclonedx: bool) -> f32 {
1210 let mut score = 0.0;
1211 let mut total_weight = 0.0;
1212
1213 let completeness_declared =
1214 self.completeness_declaration != CompletenessDeclaration::Unknown;
1215
1216 let checks: &[(bool, f32)] = &[
1217 (self.has_tool_creator, 15.0),
1218 (self.has_tool_version, 5.0),
1219 (self.has_org_creator, 12.0),
1220 (self.has_contact_email, 8.0),
1221 (self.has_serial_number, 8.0),
1222 (self.has_document_name, 5.0),
1223 (self.is_fresh, 12.0),
1224 (self.has_primary_component, 12.0),
1225 (completeness_declared, 8.0),
1226 (self.has_signature, 5.0),
1227 ];
1228
1229 for &(present, weight) in checks {
1230 if present {
1231 score += weight;
1232 }
1233 total_weight += weight;
1234 }
1235
1236 if is_cyclonedx {
1238 let weight = 10.0;
1239 if self.lifecycle_phase.is_some() {
1240 score += weight;
1241 }
1242 total_weight += weight;
1243
1244 let citations_weight = 5.0;
1246 if self.has_citations {
1247 score += citations_weight;
1248 }
1249 total_weight += citations_weight;
1250 }
1251
1252 if total_weight > 0.0 {
1253 (score / total_weight) * 100.0
1254 } else {
1255 0.0
1256 }
1257 }
1258}
1259
1260#[derive(Debug, Clone, Serialize, Deserialize)]
1266pub struct AuditabilityMetrics {
1267 pub components_with_vcs: usize,
1269 pub components_with_website: usize,
1271 pub components_with_advisories: usize,
1273 pub components_with_any_external_ref: usize,
1275 pub has_security_contact: bool,
1277 pub has_vuln_disclosure_url: bool,
1279}
1280
1281impl AuditabilityMetrics {
1282 #[must_use]
1284 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
1285 let mut with_vcs = 0;
1286 let mut with_website = 0;
1287 let mut with_advisories = 0;
1288 let mut with_any = 0;
1289
1290 for comp in sbom.components.values() {
1291 if comp.external_refs.is_empty() {
1292 continue;
1293 }
1294 with_any += 1;
1295
1296 let has_vcs = comp
1297 .external_refs
1298 .iter()
1299 .any(|r| r.ref_type == ExternalRefType::Vcs);
1300 let has_website = comp
1301 .external_refs
1302 .iter()
1303 .any(|r| r.ref_type == ExternalRefType::Website);
1304 let has_advisories = comp
1305 .external_refs
1306 .iter()
1307 .any(|r| r.ref_type == ExternalRefType::Advisories);
1308
1309 if has_vcs {
1310 with_vcs += 1;
1311 }
1312 if has_website {
1313 with_website += 1;
1314 }
1315 if has_advisories {
1316 with_advisories += 1;
1317 }
1318 }
1319
1320 Self {
1321 components_with_vcs: with_vcs,
1322 components_with_website: with_website,
1323 components_with_advisories: with_advisories,
1324 components_with_any_external_ref: with_any,
1325 has_security_contact: sbom.document.security_contact.is_some(),
1326 has_vuln_disclosure_url: sbom.document.vulnerability_disclosure_url.is_some(),
1327 }
1328 }
1329
1330 #[must_use]
1334 pub fn quality_score(&self, total_components: usize) -> f32 {
1335 if total_components == 0 {
1336 return 0.0;
1337 }
1338
1339 let ref_coverage =
1341 (self.components_with_any_external_ref as f32 / total_components as f32) * 40.0;
1342 let vcs_coverage = (self.components_with_vcs as f32 / total_components as f32) * 20.0;
1343
1344 let security_contact_score = if self.has_security_contact { 20.0 } else { 0.0 };
1346 let disclosure_score = if self.has_vuln_disclosure_url {
1347 20.0
1348 } else {
1349 0.0
1350 };
1351
1352 (ref_coverage + vcs_coverage + security_contact_score + disclosure_score).min(100.0)
1353 }
1354}
1355
1356#[derive(Debug, Clone, Serialize, Deserialize)]
1362pub struct LifecycleMetrics {
1363 pub eol_components: usize,
1365 pub stale_components: usize,
1367 pub deprecated_components: usize,
1369 pub archived_components: usize,
1371 pub outdated_components: usize,
1373 pub enriched_components: usize,
1375 pub enrichment_coverage: f32,
1377}
1378
1379impl LifecycleMetrics {
1380 #[must_use]
1386 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
1387 let total = sbom.components.len();
1388 let mut eol = 0;
1389 let mut stale = 0;
1390 let mut deprecated = 0;
1391 let mut archived = 0;
1392 let mut outdated = 0;
1393 let mut enriched = 0;
1394
1395 for comp in sbom.components.values() {
1396 let has_lifecycle_data = comp.eol.is_some() || comp.staleness.is_some();
1397 if has_lifecycle_data {
1398 enriched += 1;
1399 }
1400
1401 if let Some(ref eol_info) = comp.eol
1402 && eol_info.status == EolStatus::EndOfLife
1403 {
1404 eol += 1;
1405 }
1406
1407 if let Some(ref stale_info) = comp.staleness {
1408 match stale_info.level {
1409 StalenessLevel::Stale | StalenessLevel::Abandoned => stale += 1,
1410 StalenessLevel::Deprecated => deprecated += 1,
1411 StalenessLevel::Archived => archived += 1,
1412 _ => {}
1413 }
1414 if stale_info.is_deprecated {
1415 deprecated += 1;
1416 }
1417 if stale_info.is_archived {
1418 archived += 1;
1419 }
1420 if stale_info.latest_version.is_some() {
1421 outdated += 1;
1422 }
1423 }
1424 }
1425
1426 let coverage = if total > 0 {
1427 (enriched as f32 / total as f32) * 100.0
1428 } else {
1429 0.0
1430 };
1431
1432 Self {
1433 eol_components: eol,
1434 stale_components: stale,
1435 deprecated_components: deprecated,
1436 archived_components: archived,
1437 outdated_components: outdated,
1438 enriched_components: enriched,
1439 enrichment_coverage: coverage,
1440 }
1441 }
1442
1443 #[must_use]
1445 pub fn has_data(&self) -> bool {
1446 self.enriched_components > 0
1447 }
1448
1449 #[must_use]
1454 pub fn quality_score(&self) -> Option<f32> {
1455 if !self.has_data() {
1456 return None;
1457 }
1458
1459 let mut score = 100.0_f32;
1460
1461 score -= (self.eol_components as f32 * 15.0).min(60.0);
1463 score -= (self.stale_components as f32 * 5.0).min(30.0);
1465 score -= ((self.deprecated_components + self.archived_components) as f32 * 3.0).min(20.0);
1467 score -= (self.outdated_components as f32 * 1.0).min(10.0);
1469
1470 Some(score.clamp(0.0, 100.0))
1471 }
1472}
1473
1474#[derive(Debug, Clone, Default, Serialize, Deserialize)]
1484pub struct CryptographyMetrics {
1485 pub total_crypto_components: usize,
1487 pub algorithms_count: usize,
1489 pub certificates_count: usize,
1491 pub keys_count: usize,
1493 pub protocols_count: usize,
1495 pub quantum_safe_count: usize,
1497 pub quantum_vulnerable_count: usize,
1499 pub weak_algorithm_count: usize,
1501 pub hybrid_pqc_count: usize,
1503 pub expired_certificates: usize,
1505 pub expiring_soon_certificates: usize,
1507 pub compromised_keys: usize,
1509 pub inadequate_key_sizes: usize,
1511 pub weak_algorithm_names: Vec<String>,
1513
1514 pub algorithms_with_oid: usize,
1517 pub algorithms_with_family: usize,
1519 pub algorithms_with_primitive: usize,
1521 pub algorithms_with_security_level: usize,
1523
1524 pub certs_with_signature_algo_ref: usize,
1527 pub keys_with_algorithm_ref: usize,
1529 pub protocols_with_cipher_suites: usize,
1531
1532 pub keys_with_state: usize,
1535 pub keys_with_protection: usize,
1537 pub keys_with_lifecycle_dates: usize,
1539
1540 pub certs_with_validity_dates: usize,
1543}
1544
1545impl CryptographyMetrics {
1546 #[must_use]
1548 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
1549 let mut m = Self::default();
1550
1551 for comp in sbom.components.values() {
1552 if comp.component_type != ComponentType::Cryptographic {
1553 continue;
1554 }
1555 m.total_crypto_components += 1;
1556
1557 let Some(cp) = &comp.crypto_properties else {
1558 continue;
1559 };
1560
1561 match cp.asset_type {
1562 CryptoAssetType::Algorithm => {
1563 m.algorithms_count += 1;
1564 if cp.oid.is_some() {
1565 m.algorithms_with_oid += 1;
1566 }
1567 if let Some(algo) = &cp.algorithm_properties {
1568 if algo.algorithm_family.is_some() {
1569 m.algorithms_with_family += 1;
1570 }
1571 if !matches!(algo.primitive, CryptoPrimitive::Other(_)) {
1572 m.algorithms_with_primitive += 1;
1573 }
1574 if algo.classical_security_level.is_some()
1575 || algo.nist_quantum_security_level.is_some()
1576 {
1577 m.algorithms_with_security_level += 1;
1578 }
1579 if algo.is_quantum_safe() {
1580 m.quantum_safe_count += 1;
1581 } else if algo.nist_quantum_security_level == Some(0) {
1582 m.quantum_vulnerable_count += 1;
1583 }
1584 if algo.is_weak_by_name(&comp.name) {
1585 m.weak_algorithm_count += 1;
1586 m.weak_algorithm_names.push(comp.name.clone());
1587 }
1588 if algo.is_hybrid_pqc() {
1589 m.hybrid_pqc_count += 1;
1590 }
1591 }
1592 }
1593 CryptoAssetType::Certificate => {
1594 m.certificates_count += 1;
1595 if let Some(cert) = &cp.certificate_properties {
1596 if cert.not_valid_before.is_some() && cert.not_valid_after.is_some() {
1597 m.certs_with_validity_dates += 1;
1598 }
1599 if cert.signature_algorithm_ref.is_some() {
1600 m.certs_with_signature_algo_ref += 1;
1601 }
1602 if cert.is_expired() {
1603 m.expired_certificates += 1;
1604 } else if cert.is_expiring_soon(90) {
1605 m.expiring_soon_certificates += 1;
1606 }
1607 }
1608 }
1609 CryptoAssetType::RelatedCryptoMaterial => {
1610 m.keys_count += 1;
1611 if let Some(mat) = &cp.related_crypto_material_properties {
1612 if mat.state.is_some() {
1613 m.keys_with_state += 1;
1614 }
1615 if mat.secured_by.is_some() {
1616 m.keys_with_protection += 1;
1617 }
1618 if mat.creation_date.is_some() || mat.activation_date.is_some() {
1619 m.keys_with_lifecycle_dates += 1;
1620 }
1621 if mat.algorithm_ref.is_some() {
1622 m.keys_with_algorithm_ref += 1;
1623 }
1624 if mat.state == Some(CryptoMaterialState::Compromised) {
1625 m.compromised_keys += 1;
1626 }
1627 if let Some(size) = mat.size {
1629 let is_symmetric = matches!(
1630 mat.material_type,
1631 crate::model::CryptoMaterialType::SymmetricKey
1632 | crate::model::CryptoMaterialType::SecretKey
1633 );
1634 if (is_symmetric && size < 128) || (!is_symmetric && size < 2048) {
1635 m.inadequate_key_sizes += 1;
1636 }
1637 }
1638 }
1639 }
1640 CryptoAssetType::Protocol => {
1641 m.protocols_count += 1;
1642 if let Some(proto) = &cp.protocol_properties
1643 && !proto.cipher_suites.is_empty()
1644 {
1645 m.protocols_with_cipher_suites += 1;
1646 }
1647 }
1648 _ => {}
1649 }
1650 }
1651
1652 m
1653 }
1654
1655 #[must_use]
1657 pub fn has_data(&self) -> bool {
1658 self.total_crypto_components > 0
1659 }
1660
1661 #[must_use]
1664 pub fn quantum_readiness_score(&self) -> f32 {
1665 if self.algorithms_count == 0 {
1666 return 100.0;
1667 }
1668 (self.quantum_safe_count as f32 / self.algorithms_count as f32) * 100.0
1669 }
1670
1671 #[must_use]
1673 pub fn quality_score(&self) -> Option<f32> {
1674 if !self.has_data() {
1675 return None;
1676 }
1677
1678 let mut score = 100.0_f32;
1679
1680 score -= (self.weak_algorithm_count as f32 * 15.0).min(50.0);
1682 score -= (self.quantum_vulnerable_count as f32 * 8.0).min(40.0);
1684 score -= (self.expired_certificates as f32 * 10.0).min(30.0);
1686 score -= (self.compromised_keys as f32 * 20.0).min(40.0);
1688 score -= (self.inadequate_key_sizes as f32 * 5.0).min(20.0);
1690 score -= (self.expiring_soon_certificates as f32 * 3.0).min(15.0);
1692 score += (self.hybrid_pqc_count as f32 * 2.0).min(10.0);
1694
1695 Some(score.clamp(0.0, 100.0))
1696 }
1697
1698 #[must_use]
1702 pub fn crypto_completeness_score(&self) -> f32 {
1703 if self.algorithms_count == 0 {
1704 return 100.0;
1705 }
1706 let family_pct = self.algorithms_with_family as f32 / self.algorithms_count as f32;
1707 let primitive_pct = self.algorithms_with_primitive as f32 / self.algorithms_count as f32;
1708 let level_pct = self.algorithms_with_security_level as f32 / self.algorithms_count as f32;
1709 (family_pct * 40.0 + primitive_pct * 30.0 + level_pct * 30.0).clamp(0.0, 100.0)
1710 }
1711
1712 #[must_use]
1714 pub fn crypto_identifier_score(&self) -> f32 {
1715 if self.algorithms_count == 0 {
1716 return 100.0;
1717 }
1718 let oid_pct = self.algorithms_with_oid as f32 / self.algorithms_count as f32;
1719 (oid_pct * 100.0).clamp(0.0, 100.0)
1720 }
1721
1722 #[must_use]
1724 pub fn algorithm_strength_score(&self) -> f32 {
1725 if self.algorithms_count == 0 {
1726 return 100.0;
1727 }
1728 let mut score = 100.0_f32;
1729 score -= (self.weak_algorithm_count as f32 * 15.0).min(60.0);
1730 score -= (self.inadequate_key_sizes as f32 * 8.0).min(30.0);
1731 if self.algorithms_count > 0 {
1732 let vuln_pct = self.quantum_vulnerable_count as f32 / self.algorithms_count as f32;
1733 score -= vuln_pct * 30.0;
1734 }
1735 score.clamp(0.0, 100.0)
1736 }
1737
1738 #[must_use]
1740 pub fn crypto_dependency_score(&self) -> f32 {
1741 let linkable = self.certificates_count + self.keys_count + self.protocols_count;
1742 if linkable == 0 {
1743 return 100.0;
1744 }
1745 let resolved = self.certs_with_signature_algo_ref
1746 + self.keys_with_algorithm_ref
1747 + self.protocols_with_cipher_suites;
1748 let pct = resolved as f32 / linkable as f32;
1749 (pct * 100.0).clamp(0.0, 100.0)
1750 }
1751
1752 #[must_use]
1754 pub fn crypto_lifecycle_score(&self) -> f32 {
1755 let mut score = 100.0_f32;
1756
1757 if self.keys_count > 0 {
1758 let state_pct = self.keys_with_state as f32 / self.keys_count as f32;
1759 let protection_pct = self.keys_with_protection as f32 / self.keys_count as f32;
1760 let lifecycle_pct = self.keys_with_lifecycle_dates as f32 / self.keys_count as f32;
1761 let key_completeness =
1762 (state_pct * 0.4 + protection_pct * 0.3 + lifecycle_pct * 0.3) * 100.0;
1763 score = score * 0.5 + key_completeness * 0.5;
1764 score -= (self.compromised_keys as f32 * 20.0).min(40.0);
1765 score -= (self.inadequate_key_sizes as f32 * 5.0).min(20.0);
1766 }
1767
1768 if self.certificates_count > 0 {
1769 let validity_pct =
1770 self.certs_with_validity_dates as f32 / self.certificates_count as f32;
1771 score -= (1.0 - validity_pct) * 15.0;
1772 score -= (self.expired_certificates as f32 * 15.0).min(45.0);
1773 score -= (self.expiring_soon_certificates as f32 * 5.0).min(20.0);
1774 }
1775
1776 score.clamp(0.0, 100.0)
1777 }
1778
1779 #[must_use]
1781 pub fn pqc_readiness_score(&self) -> f32 {
1782 if self.algorithms_count == 0 {
1783 return 100.0;
1784 }
1785 let mut score = 0.0_f32;
1786 let qs_pct = self.quantum_safe_count as f32 / self.algorithms_count as f32;
1787 score += qs_pct * 60.0;
1788 if self.hybrid_pqc_count > 0 {
1789 score += 15.0;
1790 }
1791 if self.weak_algorithm_count == 0 {
1792 score += 25.0;
1793 } else {
1794 score += (25.0 - self.weak_algorithm_count as f32 * 5.0).max(0.0);
1795 }
1796 score.clamp(0.0, 100.0)
1797 }
1798
1799 #[must_use]
1801 pub fn quantum_readiness_pct(&self) -> f32 {
1802 if self.algorithms_count == 0 {
1803 return 0.0;
1804 }
1805 (self.quantum_safe_count as f32 / self.algorithms_count as f32) * 100.0
1806 }
1807
1808 #[must_use]
1810 pub const fn cbom_category_labels() -> [&'static str; 8] {
1811 ["Crpt", "OIDs", "Algo", "Refs", "Life", "PQC", "Prov", "Lic"]
1812 }
1813}
1814
1815fn is_valid_purl(purl: &str) -> bool {
1820 purl.starts_with("pkg:") && purl.contains('/')
1822}
1823
1824fn extract_ecosystem_from_purl(purl: &str) -> Option<String> {
1825 if let Some(rest) = purl.strip_prefix("pkg:")
1827 && let Some(slash_idx) = rest.find('/')
1828 {
1829 return Some(rest[..slash_idx].to_string());
1830 }
1831 None
1832}
1833
1834fn is_valid_cpe(cpe: &str) -> bool {
1835 cpe.starts_with("cpe:2.3:") || cpe.starts_with("cpe:/")
1837}
1838
1839fn is_valid_spdx_license(expr: &str) -> bool {
1840 const COMMON_SPDX: &[&str] = &[
1842 "MIT",
1843 "Apache-2.0",
1844 "GPL-2.0",
1845 "GPL-3.0",
1846 "BSD-2-Clause",
1847 "BSD-3-Clause",
1848 "ISC",
1849 "MPL-2.0",
1850 "LGPL-2.1",
1851 "LGPL-3.0",
1852 "AGPL-3.0",
1853 "Unlicense",
1854 "CC0-1.0",
1855 "0BSD",
1856 "EPL-2.0",
1857 "CDDL-1.0",
1858 "Artistic-2.0",
1859 "GPL-2.0-only",
1860 "GPL-2.0-or-later",
1861 "GPL-3.0-only",
1862 "GPL-3.0-or-later",
1863 "LGPL-2.1-only",
1864 "LGPL-2.1-or-later",
1865 "LGPL-3.0-only",
1866 "LGPL-3.0-or-later",
1867 ];
1868
1869 let trimmed = expr.trim();
1871 COMMON_SPDX.contains(&trimmed)
1872 || trimmed.contains(" AND ")
1873 || trimmed.contains(" OR ")
1874 || trimmed.contains(" WITH ")
1875}
1876
1877fn is_deprecated_spdx_license(expr: &str) -> bool {
1882 const DEPRECATED: &[&str] = &[
1883 "GPL-2.0",
1884 "GPL-2.0+",
1885 "GPL-3.0",
1886 "GPL-3.0+",
1887 "LGPL-2.0",
1888 "LGPL-2.0+",
1889 "LGPL-2.1",
1890 "LGPL-2.1+",
1891 "LGPL-3.0",
1892 "LGPL-3.0+",
1893 "AGPL-1.0",
1894 "AGPL-3.0",
1895 "GFDL-1.1",
1896 "GFDL-1.2",
1897 "GFDL-1.3",
1898 "BSD-2-Clause-FreeBSD",
1899 "BSD-2-Clause-NetBSD",
1900 "eCos-2.0",
1901 "Nunit",
1902 "StandardML-NJ",
1903 "wxWindows",
1904 ];
1905 let trimmed = expr.trim();
1906 DEPRECATED.contains(&trimmed)
1907}
1908
1909fn is_restrictive_license(expr: &str) -> bool {
1914 let trimmed = expr.trim().to_uppercase();
1915 trimmed.starts_with("GPL")
1916 || trimmed.starts_with("LGPL")
1917 || trimmed.starts_with("AGPL")
1918 || trimmed.starts_with("EUPL")
1919 || trimmed.starts_with("SSPL")
1920 || trimmed.starts_with("OSL")
1921 || trimmed.starts_with("CPAL")
1922 || trimmed.starts_with("CC-BY-SA")
1923 || trimmed.starts_with("CC-BY-NC")
1924}
1925
1926#[cfg(test)]
1927mod tests {
1928 use super::*;
1929
1930 #[test]
1931 fn test_purl_validation() {
1932 assert!(is_valid_purl("pkg:npm/@scope/name@1.0.0"));
1933 assert!(is_valid_purl("pkg:maven/group/artifact@1.0"));
1934 assert!(!is_valid_purl("npm:something"));
1935 assert!(!is_valid_purl("invalid"));
1936 }
1937
1938 #[test]
1939 fn test_cpe_validation() {
1940 assert!(is_valid_cpe("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*"));
1941 assert!(is_valid_cpe("cpe:/a:vendor:product:1.0"));
1942 assert!(!is_valid_cpe("something:else"));
1943 }
1944
1945 #[test]
1946 fn test_spdx_license_validation() {
1947 assert!(is_valid_spdx_license("MIT"));
1948 assert!(is_valid_spdx_license("Apache-2.0"));
1949 assert!(is_valid_spdx_license("MIT AND Apache-2.0"));
1950 assert!(is_valid_spdx_license("GPL-2.0 OR MIT"));
1951 }
1952
1953 #[test]
1954 fn test_strong_hash_classification() {
1955 assert!(is_strong_hash(&HashAlgorithm::Sha256));
1956 assert!(is_strong_hash(&HashAlgorithm::Sha3_256));
1957 assert!(is_strong_hash(&HashAlgorithm::Blake3));
1958 assert!(!is_strong_hash(&HashAlgorithm::Md5));
1959 assert!(!is_strong_hash(&HashAlgorithm::Sha1));
1960 assert!(!is_strong_hash(&HashAlgorithm::Other("custom".to_string())));
1961 }
1962
1963 #[test]
1964 fn test_deprecated_license_detection() {
1965 assert!(is_deprecated_spdx_license("GPL-2.0"));
1966 assert!(is_deprecated_spdx_license("LGPL-2.1"));
1967 assert!(is_deprecated_spdx_license("AGPL-3.0"));
1968 assert!(!is_deprecated_spdx_license("GPL-2.0-only"));
1969 assert!(!is_deprecated_spdx_license("MIT"));
1970 assert!(!is_deprecated_spdx_license("Apache-2.0"));
1971 }
1972
1973 #[test]
1974 fn test_restrictive_license_detection() {
1975 assert!(is_restrictive_license("GPL-3.0-only"));
1976 assert!(is_restrictive_license("LGPL-2.1-or-later"));
1977 assert!(is_restrictive_license("AGPL-3.0-only"));
1978 assert!(is_restrictive_license("EUPL-1.2"));
1979 assert!(is_restrictive_license("CC-BY-SA-4.0"));
1980 assert!(!is_restrictive_license("MIT"));
1981 assert!(!is_restrictive_license("Apache-2.0"));
1982 assert!(!is_restrictive_license("BSD-3-Clause"));
1983 }
1984
1985 #[test]
1986 fn test_hash_quality_score_no_components() {
1987 let metrics = HashQualityMetrics {
1988 components_with_any_hash: 0,
1989 components_with_strong_hash: 0,
1990 components_with_weak_only: 0,
1991 algorithm_distribution: BTreeMap::new(),
1992 total_hashes: 0,
1993 vendor_components_total: 0,
1994 vendor_components_with_hash: 0,
1995 vendor_components_with_strong_hash: 0,
1996 };
1997 assert_eq!(metrics.quality_score(0), 0.0);
1998 }
1999
2000 #[test]
2001 fn test_hash_quality_score_all_strong() {
2002 let metrics = HashQualityMetrics {
2003 components_with_any_hash: 10,
2004 components_with_strong_hash: 10,
2005 components_with_weak_only: 0,
2006 algorithm_distribution: BTreeMap::new(),
2007 total_hashes: 10,
2008 vendor_components_total: 0,
2009 vendor_components_with_hash: 0,
2010 vendor_components_with_strong_hash: 0,
2011 };
2012 assert_eq!(metrics.quality_score(10), 100.0);
2013 }
2014
2015 #[test]
2016 fn test_hash_quality_score_weak_only_penalty() {
2017 let metrics = HashQualityMetrics {
2018 components_with_any_hash: 10,
2019 components_with_strong_hash: 0,
2020 components_with_weak_only: 10,
2021 algorithm_distribution: BTreeMap::new(),
2022 total_hashes: 10,
2023 vendor_components_total: 0,
2024 vendor_components_with_hash: 0,
2025 vendor_components_with_strong_hash: 0,
2026 };
2027 assert_eq!(metrics.quality_score(10), 50.0);
2029 }
2030
2031 #[test]
2032 fn test_lifecycle_no_enrichment_returns_none() {
2033 let metrics = LifecycleMetrics {
2034 eol_components: 0,
2035 stale_components: 0,
2036 deprecated_components: 0,
2037 archived_components: 0,
2038 outdated_components: 0,
2039 enriched_components: 0,
2040 enrichment_coverage: 0.0,
2041 };
2042 assert!(!metrics.has_data());
2043 assert!(metrics.quality_score().is_none());
2044 }
2045
2046 #[test]
2047 fn test_lifecycle_with_eol_penalty() {
2048 let metrics = LifecycleMetrics {
2049 eol_components: 2,
2050 stale_components: 0,
2051 deprecated_components: 0,
2052 archived_components: 0,
2053 outdated_components: 0,
2054 enriched_components: 10,
2055 enrichment_coverage: 100.0,
2056 };
2057 assert_eq!(metrics.quality_score(), Some(70.0));
2059 }
2060
2061 #[test]
2062 fn test_cycle_detection_no_cycles() {
2063 let children: HashMap<&str, Vec<&str>> =
2064 HashMap::from([("a", vec!["b"]), ("b", vec!["c"])]);
2065 let all_nodes = vec!["a", "b", "c"];
2066 assert_eq!(detect_cycles(&all_nodes, &children), 0);
2067 }
2068
2069 #[test]
2070 fn test_cycle_detection_with_cycle() {
2071 let children: HashMap<&str, Vec<&str>> =
2072 HashMap::from([("a", vec!["b"]), ("b", vec!["c"]), ("c", vec!["a"])]);
2073 let all_nodes = vec!["a", "b", "c"];
2074 assert_eq!(detect_cycles(&all_nodes, &children), 1);
2075 }
2076
2077 #[test]
2078 fn test_depth_computation() {
2079 let children: HashMap<&str, Vec<&str>> =
2080 HashMap::from([("root", vec!["a", "b"]), ("a", vec!["c"])]);
2081 let roots = vec!["root"];
2082 let (max_d, avg_d) = compute_depth(&roots, &children);
2083 assert_eq!(max_d, Some(2)); assert!(avg_d.is_some());
2085 }
2086
2087 #[test]
2088 fn test_depth_empty_roots() {
2089 let children: HashMap<&str, Vec<&str>> = HashMap::new();
2090 let roots: Vec<&str> = vec![];
2091 let (max_d, avg_d) = compute_depth(&roots, &children);
2092 assert_eq!(max_d, None);
2093 assert_eq!(avg_d, None);
2094 }
2095
2096 #[test]
2097 fn test_provenance_quality_score() {
2098 let metrics = ProvenanceMetrics {
2099 has_tool_creator: true,
2100 has_tool_version: true,
2101 has_org_creator: true,
2102 has_contact_email: true,
2103 has_serial_number: true,
2104 has_document_name: true,
2105 timestamp_age_days: 10,
2106 is_fresh: true,
2107 has_primary_component: true,
2108 lifecycle_phase: Some("build".to_string()),
2109 completeness_declaration: CompletenessDeclaration::Complete,
2110 has_signature: true,
2111 has_citations: true,
2112 citations_count: 3,
2113 };
2114 assert_eq!(metrics.quality_score(true), 100.0);
2116 }
2117
2118 #[test]
2119 fn test_provenance_score_without_cyclonedx() {
2120 let metrics = ProvenanceMetrics {
2121 has_tool_creator: true,
2122 has_tool_version: true,
2123 has_org_creator: true,
2124 has_contact_email: true,
2125 has_serial_number: true,
2126 has_document_name: true,
2127 timestamp_age_days: 10,
2128 is_fresh: true,
2129 has_primary_component: true,
2130 lifecycle_phase: None,
2131 completeness_declaration: CompletenessDeclaration::Complete,
2132 has_signature: true,
2133 has_citations: false,
2134 citations_count: 0,
2135 };
2136 assert_eq!(metrics.quality_score(false), 100.0);
2138 }
2139
2140 #[test]
2141 fn test_complexity_empty_graph() {
2142 let (simplicity, level, factors) = compute_complexity(0, 0, 0, 0, 0, 0, 0);
2143 assert_eq!(simplicity, 100.0);
2144 assert_eq!(level, ComplexityLevel::Low);
2145 assert_eq!(factors.dependency_volume, 0.0);
2146 }
2147
2148 #[test]
2149 fn test_complexity_single_node() {
2150 let (simplicity, level, _) = compute_complexity(0, 1, 0, 0, 0, 1, 1);
2152 assert!(
2153 simplicity >= 80.0,
2154 "Single node simplicity {simplicity} should be >= 80"
2155 );
2156 assert_eq!(level, ComplexityLevel::Low);
2157 }
2158
2159 #[test]
2160 fn test_complexity_monotonic_edges() {
2161 let (s1, _, _) = compute_complexity(5, 10, 2, 3, 0, 1, 1);
2163 let (s2, _, _) = compute_complexity(20, 10, 2, 3, 0, 1, 1);
2164 assert!(
2165 s2 <= s1,
2166 "More edges should not increase simplicity: {s2} vs {s1}"
2167 );
2168 }
2169
2170 #[test]
2171 fn test_complexity_monotonic_cycles() {
2172 let (s1, _, _) = compute_complexity(10, 10, 2, 3, 0, 1, 1);
2173 let (s2, _, _) = compute_complexity(10, 10, 2, 3, 3, 1, 1);
2174 assert!(
2175 s2 <= s1,
2176 "More cycles should not increase simplicity: {s2} vs {s1}"
2177 );
2178 }
2179
2180 #[test]
2181 fn test_complexity_monotonic_depth() {
2182 let (s1, _, _) = compute_complexity(10, 10, 2, 3, 0, 1, 1);
2183 let (s2, _, _) = compute_complexity(10, 10, 10, 3, 0, 1, 1);
2184 assert!(
2185 s2 <= s1,
2186 "More depth should not increase simplicity: {s2} vs {s1}"
2187 );
2188 }
2189
2190 #[test]
2191 fn test_complexity_graph_skipped() {
2192 let (simplicity, _, _) = compute_complexity(100, 50, 5, 10, 2, 5, 3);
2195 assert!(simplicity >= 0.0 && simplicity <= 100.0);
2196 }
2197
2198 #[test]
2199 fn test_complexity_level_bands() {
2200 assert_eq!(ComplexityLevel::from_score(100.0), ComplexityLevel::Low);
2201 assert_eq!(ComplexityLevel::from_score(75.0), ComplexityLevel::Low);
2202 assert_eq!(ComplexityLevel::from_score(74.0), ComplexityLevel::Moderate);
2203 assert_eq!(ComplexityLevel::from_score(50.0), ComplexityLevel::Moderate);
2204 assert_eq!(ComplexityLevel::from_score(49.0), ComplexityLevel::High);
2205 assert_eq!(ComplexityLevel::from_score(25.0), ComplexityLevel::High);
2206 assert_eq!(ComplexityLevel::from_score(24.0), ComplexityLevel::VeryHigh);
2207 assert_eq!(ComplexityLevel::from_score(0.0), ComplexityLevel::VeryHigh);
2208 }
2209
2210 #[test]
2211 fn test_completeness_declaration_display() {
2212 assert_eq!(CompletenessDeclaration::Complete.to_string(), "complete");
2213 assert_eq!(
2214 CompletenessDeclaration::IncompleteFirstPartyOnly.to_string(),
2215 "incomplete (first-party only)"
2216 );
2217 assert_eq!(CompletenessDeclaration::Unknown.to_string(), "unknown");
2218 }
2219
2220 #[test]
2223 fn crypto_completeness_all_documented() {
2224 let m = CryptographyMetrics {
2225 algorithms_count: 4,
2226 algorithms_with_family: 4,
2227 algorithms_with_primitive: 4,
2228 algorithms_with_security_level: 4,
2229 ..Default::default()
2230 };
2231 let score = m.crypto_completeness_score();
2232 assert!(
2233 (score - 100.0).abs() < 0.1,
2234 "fully documented → 100, got {score}"
2235 );
2236 }
2237
2238 #[test]
2239 fn crypto_completeness_partial() {
2240 let m = CryptographyMetrics {
2241 algorithms_count: 4,
2242 algorithms_with_family: 2, algorithms_with_primitive: 4, algorithms_with_security_level: 0, ..Default::default()
2246 };
2247 let score = m.crypto_completeness_score();
2249 assert!((score - 50.0).abs() < 0.1, "partial → 50, got {score}");
2250 }
2251
2252 #[test]
2253 fn crypto_identifier_full_oid_coverage() {
2254 let m = CryptographyMetrics {
2255 algorithms_count: 5,
2256 algorithms_with_oid: 5,
2257 ..Default::default()
2258 };
2259 assert!((m.crypto_identifier_score() - 100.0).abs() < 0.1);
2260 }
2261
2262 #[test]
2263 fn crypto_identifier_no_oids() {
2264 let m = CryptographyMetrics {
2265 algorithms_count: 5,
2266 algorithms_with_oid: 0,
2267 ..Default::default()
2268 };
2269 assert!((m.crypto_identifier_score() - 0.0).abs() < 0.1);
2270 }
2271
2272 #[test]
2273 fn algorithm_strength_weak_penalty() {
2274 let m = CryptographyMetrics {
2275 algorithms_count: 5,
2276 weak_algorithm_count: 2,
2277 ..Default::default()
2278 };
2279 let score = m.algorithm_strength_score();
2281 assert!((score - 70.0).abs() < 0.1, "2 weak → 70, got {score}");
2282 }
2283
2284 #[test]
2285 fn algorithm_strength_quantum_vulnerable() {
2286 let m = CryptographyMetrics {
2287 algorithms_count: 10,
2288 quantum_vulnerable_count: 10,
2289 ..Default::default()
2290 };
2291 let score = m.algorithm_strength_score();
2293 assert!(
2294 (score - 70.0).abs() < 0.1,
2295 "all quantum vuln → 70, got {score}"
2296 );
2297 }
2298
2299 #[test]
2300 fn crypto_lifecycle_compromised_keys() {
2301 let m = CryptographyMetrics {
2302 keys_count: 3,
2303 keys_with_state: 3,
2304 keys_with_protection: 3,
2305 keys_with_lifecycle_dates: 3,
2306 compromised_keys: 1,
2307 ..Default::default()
2308 };
2309 let score = m.crypto_lifecycle_score();
2310 assert!(score < 85.0);
2312 assert!(score > 50.0);
2313 }
2314
2315 #[test]
2316 fn crypto_lifecycle_expired_certs() {
2317 let m = CryptographyMetrics {
2318 certificates_count: 4,
2319 certs_with_validity_dates: 4,
2320 expired_certificates: 2,
2321 expiring_soon_certificates: 1,
2322 ..Default::default()
2323 };
2324 let score = m.crypto_lifecycle_score();
2325 assert!(score < 70.0);
2327 }
2328
2329 #[test]
2330 fn pqc_readiness_all_quantum_safe() {
2331 let m = CryptographyMetrics {
2332 algorithms_count: 5,
2333 quantum_safe_count: 5,
2334 hybrid_pqc_count: 2,
2335 weak_algorithm_count: 0,
2336 ..Default::default()
2337 };
2338 let score = m.pqc_readiness_score();
2340 assert!(
2341 (score - 100.0).abs() < 0.1,
2342 "all safe + hybrid → 100, got {score}"
2343 );
2344 }
2345
2346 #[test]
2347 fn pqc_readiness_no_quantum_safe() {
2348 let m = CryptographyMetrics {
2349 algorithms_count: 5,
2350 quantum_safe_count: 0,
2351 hybrid_pqc_count: 0,
2352 weak_algorithm_count: 0,
2353 ..Default::default()
2354 };
2355 let score = m.pqc_readiness_score();
2357 assert!(
2358 (score - 25.0).abs() < 0.1,
2359 "no safe, no weak → 25, got {score}"
2360 );
2361 }
2362
2363 #[test]
2364 fn crypto_dependency_all_resolved() {
2365 let m = CryptographyMetrics {
2366 certificates_count: 2,
2367 keys_count: 3,
2368 protocols_count: 1,
2369 certs_with_signature_algo_ref: 2,
2370 keys_with_algorithm_ref: 3,
2371 protocols_with_cipher_suites: 1,
2372 ..Default::default()
2373 };
2374 assert!((m.crypto_dependency_score() - 100.0).abs() < 0.1);
2375 }
2376
2377 #[test]
2378 fn crypto_dependency_none_resolved() {
2379 let m = CryptographyMetrics {
2380 certificates_count: 2,
2381 keys_count: 3,
2382 protocols_count: 1,
2383 ..Default::default()
2384 };
2385 assert!((m.crypto_dependency_score() - 0.0).abs() < 0.1);
2386 }
2387
2388 #[test]
2389 fn quality_score_none_when_no_crypto() {
2390 let m = CryptographyMetrics::default();
2391 assert!(m.quality_score().is_none());
2392 }
2393
2394 #[test]
2395 fn quantum_readiness_pct_zero_algorithms() {
2396 let m = CryptographyMetrics::default();
2397 assert!((m.quantum_readiness_pct() - 0.0).abs() < 0.01);
2398 }
2399}