1use std::collections::{BTreeMap, HashMap, HashSet};
6
7use crate::model::{
8 CompletenessDeclaration, ComponentType, CreatorType, CryptoAssetType, CryptoMaterialState,
9 CryptoPrimitive, EolStatus, ExternalRefType, HashAlgorithm, NormalizedSbom, StalenessLevel,
10};
11use serde::{Deserialize, Serialize};
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
15pub struct CompletenessMetrics {
16 pub components_with_version: f32,
18 pub components_with_purl: f32,
20 pub components_with_cpe: f32,
22 pub components_with_supplier: f32,
24 pub components_with_hashes: f32,
26 pub components_with_licenses: f32,
28 pub components_with_description: f32,
30 pub has_creator_info: bool,
32 pub has_timestamp: bool,
34 pub has_serial_number: bool,
36 pub total_components: usize,
38}
39
40impl CompletenessMetrics {
41 #[must_use]
43 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
44 let total = sbom.components.len();
45 if total == 0 {
46 return Self::empty();
47 }
48
49 let mut with_version = 0;
50 let mut with_purl = 0;
51 let mut with_cpe = 0;
52 let mut with_supplier = 0;
53 let mut with_hashes = 0;
54 let mut with_licenses = 0;
55 let mut with_description = 0;
56
57 for comp in sbom.components.values() {
58 if comp.version.is_some() {
59 with_version += 1;
60 }
61 if comp.identifiers.purl.is_some() {
62 with_purl += 1;
63 }
64 if !comp.identifiers.cpe.is_empty() {
65 with_cpe += 1;
66 }
67 if comp.supplier.is_some() {
68 with_supplier += 1;
69 }
70 if !comp.hashes.is_empty() {
71 with_hashes += 1;
72 }
73 if !comp.licenses.declared.is_empty() || comp.licenses.concluded.is_some() {
74 with_licenses += 1;
75 }
76 if comp.description.is_some() {
77 with_description += 1;
78 }
79 }
80
81 let pct = |count: usize| (count as f32 / total as f32) * 100.0;
82
83 Self {
84 components_with_version: pct(with_version),
85 components_with_purl: pct(with_purl),
86 components_with_cpe: pct(with_cpe),
87 components_with_supplier: pct(with_supplier),
88 components_with_hashes: pct(with_hashes),
89 components_with_licenses: pct(with_licenses),
90 components_with_description: pct(with_description),
91 has_creator_info: !sbom.document.creators.is_empty(),
92 has_timestamp: true, has_serial_number: sbom.document.serial_number.is_some(),
94 total_components: total,
95 }
96 }
97
98 #[must_use]
100 pub const fn empty() -> Self {
101 Self {
102 components_with_version: 0.0,
103 components_with_purl: 0.0,
104 components_with_cpe: 0.0,
105 components_with_supplier: 0.0,
106 components_with_hashes: 0.0,
107 components_with_licenses: 0.0,
108 components_with_description: 0.0,
109 has_creator_info: false,
110 has_timestamp: false,
111 has_serial_number: false,
112 total_components: 0,
113 }
114 }
115
116 #[must_use]
118 pub fn overall_score(&self, weights: &CompletenessWeights) -> f32 {
119 let mut score = 0.0;
120 let mut total_weight = 0.0;
121
122 score += self.components_with_version * weights.version;
124 total_weight += weights.version * 100.0;
125
126 score += self.components_with_purl * weights.purl;
127 total_weight += weights.purl * 100.0;
128
129 score += self.components_with_cpe * weights.cpe;
130 total_weight += weights.cpe * 100.0;
131
132 score += self.components_with_supplier * weights.supplier;
133 total_weight += weights.supplier * 100.0;
134
135 score += self.components_with_hashes * weights.hashes;
136 total_weight += weights.hashes * 100.0;
137
138 score += self.components_with_licenses * weights.licenses;
139 total_weight += weights.licenses * 100.0;
140
141 if self.has_creator_info {
143 score += 100.0 * weights.creator_info;
144 }
145 total_weight += weights.creator_info * 100.0;
146
147 if self.has_serial_number {
148 score += 100.0 * weights.serial_number;
149 }
150 total_weight += weights.serial_number * 100.0;
151
152 if total_weight > 0.0 {
153 (score / total_weight) * 100.0
154 } else {
155 0.0
156 }
157 }
158}
159
160#[derive(Debug, Clone)]
162pub struct CompletenessWeights {
163 pub version: f32,
164 pub purl: f32,
165 pub cpe: f32,
166 pub supplier: f32,
167 pub hashes: f32,
168 pub licenses: f32,
169 pub creator_info: f32,
170 pub serial_number: f32,
171}
172
173impl Default for CompletenessWeights {
174 fn default() -> Self {
175 Self {
176 version: 1.0,
177 purl: 1.5, cpe: 0.5, supplier: 1.0,
180 hashes: 1.0,
181 licenses: 1.2, creator_info: 0.3,
183 serial_number: 0.2,
184 }
185 }
186}
187
188#[derive(Debug, Clone, Serialize, Deserialize)]
194pub struct HashQualityMetrics {
195 pub components_with_any_hash: usize,
197 pub components_with_strong_hash: usize,
199 pub components_with_weak_only: usize,
201 pub algorithm_distribution: BTreeMap<String, usize>,
203 pub total_hashes: usize,
205}
206
207impl HashQualityMetrics {
208 #[must_use]
210 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
211 let mut with_any = 0;
212 let mut with_strong = 0;
213 let mut with_weak_only = 0;
214 let mut distribution: BTreeMap<String, usize> = BTreeMap::new();
215 let mut total_hashes = 0;
216
217 for comp in sbom.components.values() {
218 if comp.hashes.is_empty() {
219 continue;
220 }
221 with_any += 1;
222 total_hashes += comp.hashes.len();
223
224 let mut has_strong = false;
225 let mut has_weak = false;
226
227 for hash in &comp.hashes {
228 let label = hash_algorithm_label(&hash.algorithm);
229 *distribution.entry(label).or_insert(0) += 1;
230
231 if is_strong_hash(&hash.algorithm) {
232 has_strong = true;
233 } else {
234 has_weak = true;
235 }
236 }
237
238 if has_strong {
239 with_strong += 1;
240 } else if has_weak {
241 with_weak_only += 1;
242 }
243 }
244
245 Self {
246 components_with_any_hash: with_any,
247 components_with_strong_hash: with_strong,
248 components_with_weak_only: with_weak_only,
249 algorithm_distribution: distribution,
250 total_hashes,
251 }
252 }
253
254 #[must_use]
259 pub fn quality_score(&self, total_components: usize) -> f32 {
260 if total_components == 0 {
261 return 0.0;
262 }
263
264 let any_coverage = self.components_with_any_hash as f32 / total_components as f32;
265 let strong_coverage = self.components_with_strong_hash as f32 / total_components as f32;
266 let weak_only_ratio = self.components_with_weak_only as f32 / total_components as f32;
267
268 let base = any_coverage * 60.0;
269 let strong_bonus = strong_coverage * 40.0;
270 let weak_penalty = weak_only_ratio * 10.0;
271
272 (base + strong_bonus - weak_penalty).clamp(0.0, 100.0)
273 }
274}
275
276fn is_strong_hash(algo: &HashAlgorithm) -> bool {
278 matches!(
279 algo,
280 HashAlgorithm::Sha256
281 | HashAlgorithm::Sha384
282 | HashAlgorithm::Sha512
283 | HashAlgorithm::Sha3_256
284 | HashAlgorithm::Sha3_384
285 | HashAlgorithm::Sha3_512
286 | HashAlgorithm::Blake2b256
287 | HashAlgorithm::Blake2b384
288 | HashAlgorithm::Blake2b512
289 | HashAlgorithm::Blake3
290 | HashAlgorithm::Streebog256
291 | HashAlgorithm::Streebog512
292 )
293}
294
295fn hash_algorithm_label(algo: &HashAlgorithm) -> String {
297 match algo {
298 HashAlgorithm::Md5 => "MD5".to_string(),
299 HashAlgorithm::Sha1 => "SHA-1".to_string(),
300 HashAlgorithm::Sha256 => "SHA-256".to_string(),
301 HashAlgorithm::Sha384 => "SHA-384".to_string(),
302 HashAlgorithm::Sha512 => "SHA-512".to_string(),
303 HashAlgorithm::Sha3_256 => "SHA3-256".to_string(),
304 HashAlgorithm::Sha3_384 => "SHA3-384".to_string(),
305 HashAlgorithm::Sha3_512 => "SHA3-512".to_string(),
306 HashAlgorithm::Blake2b256 => "BLAKE2b-256".to_string(),
307 HashAlgorithm::Blake2b384 => "BLAKE2b-384".to_string(),
308 HashAlgorithm::Blake2b512 => "BLAKE2b-512".to_string(),
309 HashAlgorithm::Blake3 => "BLAKE3".to_string(),
310 HashAlgorithm::Streebog256 => "Streebog-256".to_string(),
311 HashAlgorithm::Streebog512 => "Streebog-512".to_string(),
312 HashAlgorithm::Other(s) => s.clone(),
313 }
314}
315
316#[derive(Debug, Clone, Serialize, Deserialize)]
322pub struct IdentifierMetrics {
323 pub valid_purls: usize,
325 pub invalid_purls: usize,
327 pub valid_cpes: usize,
329 pub invalid_cpes: usize,
331 pub with_swid: usize,
333 pub ecosystems: Vec<String>,
335 pub missing_all_identifiers: usize,
337}
338
339impl IdentifierMetrics {
340 #[must_use]
342 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
343 let mut valid_purls = 0;
344 let mut invalid_purls = 0;
345 let mut valid_cpes = 0;
346 let mut invalid_cpes = 0;
347 let mut with_swid = 0;
348 let mut missing_all = 0;
349 let mut ecosystems = std::collections::HashSet::new();
350
351 for comp in sbom.components.values() {
352 let has_purl = comp.identifiers.purl.is_some();
353 let has_cpe = !comp.identifiers.cpe.is_empty();
354 let has_swid = comp.identifiers.swid.is_some();
355
356 if let Some(ref purl) = comp.identifiers.purl {
357 if is_valid_purl(purl) {
358 valid_purls += 1;
359 if let Some(eco) = extract_ecosystem_from_purl(purl) {
361 ecosystems.insert(eco);
362 }
363 } else {
364 invalid_purls += 1;
365 }
366 }
367
368 for cpe in &comp.identifiers.cpe {
369 if is_valid_cpe(cpe) {
370 valid_cpes += 1;
371 } else {
372 invalid_cpes += 1;
373 }
374 }
375
376 if has_swid {
377 with_swid += 1;
378 }
379
380 if !has_purl && !has_cpe && !has_swid {
381 missing_all += 1;
382 }
383 }
384
385 let mut ecosystem_list: Vec<String> = ecosystems.into_iter().collect();
386 ecosystem_list.sort();
387
388 Self {
389 valid_purls,
390 invalid_purls,
391 valid_cpes,
392 invalid_cpes,
393 with_swid,
394 ecosystems: ecosystem_list,
395 missing_all_identifiers: missing_all,
396 }
397 }
398
399 #[must_use]
401 pub fn quality_score(&self, total_components: usize) -> f32 {
402 if total_components == 0 {
403 return 0.0;
404 }
405
406 let with_valid_id = self.valid_purls + self.valid_cpes + self.with_swid;
407 let coverage =
408 (with_valid_id.min(total_components) as f32 / total_components as f32) * 100.0;
409
410 let invalid_count = self.invalid_purls + self.invalid_cpes;
412 let penalty = (invalid_count as f32 / total_components as f32) * 20.0;
413
414 (coverage - penalty).clamp(0.0, 100.0)
415 }
416}
417
418#[derive(Debug, Clone, Serialize, Deserialize)]
420pub struct LicenseMetrics {
421 pub with_declared: usize,
423 pub with_concluded: usize,
425 pub valid_spdx_expressions: usize,
427 pub non_standard_licenses: usize,
429 pub noassertion_count: usize,
431 pub deprecated_licenses: usize,
433 pub restrictive_licenses: usize,
435 pub copyleft_license_ids: Vec<String>,
437 pub unique_licenses: Vec<String>,
439}
440
441impl LicenseMetrics {
442 #[must_use]
444 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
445 let mut with_declared = 0;
446 let mut with_concluded = 0;
447 let mut valid_spdx = 0;
448 let mut non_standard = 0;
449 let mut noassertion = 0;
450 let mut deprecated = 0;
451 let mut restrictive = 0;
452 let mut licenses = HashSet::new();
453 let mut copyleft_ids = HashSet::new();
454
455 for comp in sbom.components.values() {
456 if !comp.licenses.declared.is_empty() {
457 with_declared += 1;
458 for lic in &comp.licenses.declared {
459 let expr = &lic.expression;
460 licenses.insert(expr.clone());
461
462 if expr == "NOASSERTION" {
463 noassertion += 1;
464 } else if is_valid_spdx_license(expr) {
465 valid_spdx += 1;
466 } else {
467 non_standard += 1;
468 }
469
470 if is_deprecated_spdx_license(expr) {
471 deprecated += 1;
472 }
473 if is_restrictive_license(expr) {
474 restrictive += 1;
475 copyleft_ids.insert(expr.clone());
476 }
477 }
478 }
479
480 if comp.licenses.concluded.is_some() {
481 with_concluded += 1;
482 }
483 }
484
485 let mut license_list: Vec<String> = licenses.into_iter().collect();
486 license_list.sort();
487
488 let mut copyleft_list: Vec<String> = copyleft_ids.into_iter().collect();
489 copyleft_list.sort();
490
491 Self {
492 with_declared,
493 with_concluded,
494 valid_spdx_expressions: valid_spdx,
495 non_standard_licenses: non_standard,
496 noassertion_count: noassertion,
497 deprecated_licenses: deprecated,
498 restrictive_licenses: restrictive,
499 copyleft_license_ids: copyleft_list,
500 unique_licenses: license_list,
501 }
502 }
503
504 #[must_use]
506 pub fn quality_score(&self, total_components: usize) -> f32 {
507 if total_components == 0 {
508 return 0.0;
509 }
510
511 let coverage = (self.with_declared as f32 / total_components as f32) * 60.0;
512
513 let spdx_ratio = if self.with_declared > 0 {
515 self.valid_spdx_expressions as f32 / self.with_declared as f32
516 } else {
517 0.0
518 };
519 let spdx_bonus = spdx_ratio * 30.0;
520
521 let noassertion_penalty =
523 (self.noassertion_count as f32 / total_components.max(1) as f32) * 10.0;
524
525 let deprecated_penalty = (self.deprecated_licenses as f32 * 2.0).min(10.0);
527
528 (coverage + spdx_bonus - noassertion_penalty - deprecated_penalty).clamp(0.0, 100.0)
529 }
530}
531
532#[derive(Debug, Clone, Serialize, Deserialize)]
534pub struct VulnerabilityMetrics {
535 pub components_with_vulns: usize,
537 pub total_vulnerabilities: usize,
539 pub with_cvss: usize,
541 pub with_cwe: usize,
543 pub with_remediation: usize,
545 pub with_vex_status: usize,
547}
548
549impl VulnerabilityMetrics {
550 #[must_use]
552 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
553 let mut components_with_vulns = 0;
554 let mut total_vulns = 0;
555 let mut with_cvss = 0;
556 let mut with_cwe = 0;
557 let mut with_remediation = 0;
558 let mut with_vex = 0;
559
560 for comp in sbom.components.values() {
561 if !comp.vulnerabilities.is_empty() {
562 components_with_vulns += 1;
563 }
564
565 for vuln in &comp.vulnerabilities {
566 total_vulns += 1;
567
568 if !vuln.cvss.is_empty() {
569 with_cvss += 1;
570 }
571 if !vuln.cwes.is_empty() {
572 with_cwe += 1;
573 }
574 if vuln.remediation.is_some() {
575 with_remediation += 1;
576 }
577 }
578
579 if comp.vex_status.is_some()
580 || comp.vulnerabilities.iter().any(|v| v.vex_status.is_some())
581 {
582 with_vex += 1;
583 }
584 }
585
586 Self {
587 components_with_vulns,
588 total_vulnerabilities: total_vulns,
589 with_cvss,
590 with_cwe,
591 with_remediation,
592 with_vex_status: with_vex,
593 }
594 }
595
596 #[must_use]
603 pub fn documentation_score(&self) -> Option<f32> {
604 if self.total_vulnerabilities == 0 {
605 return None; }
607
608 let cvss_ratio = self.with_cvss as f32 / self.total_vulnerabilities as f32;
609 let cwe_ratio = self.with_cwe as f32 / self.total_vulnerabilities as f32;
610 let remediation_ratio = self.with_remediation as f32 / self.total_vulnerabilities as f32;
611
612 Some(
613 remediation_ratio
614 .mul_add(30.0, cvss_ratio.mul_add(40.0, cwe_ratio * 30.0))
615 .min(100.0),
616 )
617 }
618}
619
620const MAX_EDGES_FOR_GRAPH_ANALYSIS: usize = 50_000;
626
627#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
633#[non_exhaustive]
634pub enum ComplexityLevel {
635 Low,
637 Moderate,
639 High,
641 VeryHigh,
643}
644
645impl ComplexityLevel {
646 #[must_use]
648 pub const fn from_score(simplicity: f32) -> Self {
649 match simplicity as u32 {
650 75..=100 => Self::Low,
651 50..=74 => Self::Moderate,
652 25..=49 => Self::High,
653 _ => Self::VeryHigh,
654 }
655 }
656
657 #[must_use]
659 pub const fn label(&self) -> &'static str {
660 match self {
661 Self::Low => "Low",
662 Self::Moderate => "Moderate",
663 Self::High => "High",
664 Self::VeryHigh => "Very High",
665 }
666 }
667}
668
669impl std::fmt::Display for ComplexityLevel {
670 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
671 f.write_str(self.label())
672 }
673}
674
675#[derive(Debug, Clone, Serialize, Deserialize)]
678pub struct ComplexityFactors {
679 pub dependency_volume: f32,
681 pub normalized_depth: f32,
683 pub fanout_concentration: f32,
685 pub cycle_ratio: f32,
687 pub fragmentation: f32,
689}
690
691#[derive(Debug, Clone, Serialize, Deserialize)]
693pub struct DependencyMetrics {
694 pub total_dependencies: usize,
696 pub components_with_deps: usize,
698 pub max_depth: Option<usize>,
700 pub avg_depth: Option<f32>,
702 pub orphan_components: usize,
704 pub root_components: usize,
706 pub cycle_count: usize,
708 pub island_count: usize,
710 pub graph_analysis_skipped: bool,
712 pub max_out_degree: usize,
714 pub software_complexity_index: Option<f32>,
716 pub complexity_level: Option<ComplexityLevel>,
718 pub complexity_factors: Option<ComplexityFactors>,
720}
721
722impl DependencyMetrics {
723 #[must_use]
725 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
726 use crate::model::CanonicalId;
727
728 let total_deps = sbom.edges.len();
729
730 let mut children: HashMap<&str, Vec<&str>> = HashMap::new();
732 let mut has_outgoing: HashSet<&str> = HashSet::new();
733 let mut has_incoming: HashSet<&str> = HashSet::new();
734
735 for edge in &sbom.edges {
736 children
737 .entry(edge.from.value())
738 .or_default()
739 .push(edge.to.value());
740 has_outgoing.insert(edge.from.value());
741 has_incoming.insert(edge.to.value());
742 }
743
744 let all_ids: Vec<&str> = sbom.components.keys().map(CanonicalId::value).collect();
745
746 let orphans = all_ids
747 .iter()
748 .filter(|c| !has_outgoing.contains(*c) && !has_incoming.contains(*c))
749 .count();
750
751 let roots: Vec<&str> = has_outgoing
752 .iter()
753 .filter(|c| !has_incoming.contains(*c))
754 .copied()
755 .collect();
756 let root_count = roots.len();
757
758 let max_out_degree = children.values().map(Vec::len).max().unwrap_or(0);
760
761 if total_deps > MAX_EDGES_FOR_GRAPH_ANALYSIS {
763 return Self {
764 total_dependencies: total_deps,
765 components_with_deps: has_outgoing.len(),
766 max_depth: None,
767 avg_depth: None,
768 orphan_components: orphans,
769 root_components: root_count,
770 cycle_count: 0,
771 island_count: 0,
772 graph_analysis_skipped: true,
773 max_out_degree,
774 software_complexity_index: None,
775 complexity_level: None,
776 complexity_factors: None,
777 };
778 }
779
780 let (max_depth, avg_depth) = compute_depth(&roots, &children);
782
783 let cycle_count = detect_cycles(&all_ids, &children);
785
786 let island_count = count_islands(&all_ids, &sbom.edges);
788
789 let component_count = all_ids.len();
791 let (complexity_index, complexity_lvl, factors) = compute_complexity(
792 total_deps,
793 component_count,
794 max_depth.unwrap_or(0),
795 max_out_degree,
796 cycle_count,
797 orphans,
798 island_count,
799 );
800
801 Self {
802 total_dependencies: total_deps,
803 components_with_deps: has_outgoing.len(),
804 max_depth,
805 avg_depth,
806 orphan_components: orphans,
807 root_components: root_count,
808 cycle_count,
809 island_count,
810 graph_analysis_skipped: false,
811 max_out_degree,
812 software_complexity_index: Some(complexity_index),
813 complexity_level: Some(complexity_lvl),
814 complexity_factors: Some(factors),
815 }
816 }
817
818 #[must_use]
820 pub fn quality_score(&self, total_components: usize) -> f32 {
821 if total_components == 0 {
822 return 0.0;
823 }
824
825 let coverage = if total_components > 1 {
827 (self.components_with_deps as f32 / (total_components - 1) as f32) * 100.0
828 } else {
829 100.0 };
831
832 let orphan_ratio = self.orphan_components as f32 / total_components as f32;
834 let orphan_penalty = orphan_ratio * 10.0;
835
836 let cycle_penalty = (self.cycle_count as f32 * 5.0).min(20.0);
838
839 let island_penalty = if total_components > 5 && self.island_count > 3 {
841 ((self.island_count - 3) as f32 * 3.0).min(15.0)
842 } else {
843 0.0
844 };
845
846 (coverage - orphan_penalty - cycle_penalty - island_penalty).clamp(0.0, 100.0)
847 }
848}
849
850fn compute_depth(
852 roots: &[&str],
853 children: &HashMap<&str, Vec<&str>>,
854) -> (Option<usize>, Option<f32>) {
855 use std::collections::VecDeque;
856
857 if roots.is_empty() {
858 return (None, None);
859 }
860
861 let mut visited: HashSet<&str> = HashSet::new();
862 let mut queue: VecDeque<(&str, usize)> = VecDeque::new();
863 let mut max_d: usize = 0;
864 let mut total_depth: usize = 0;
865 let mut count: usize = 0;
866
867 for &root in roots {
868 if visited.insert(root) {
869 queue.push_back((root, 0));
870 }
871 }
872
873 while let Some((node, depth)) = queue.pop_front() {
874 max_d = max_d.max(depth);
875 total_depth += depth;
876 count += 1;
877
878 if let Some(kids) = children.get(node) {
879 for &kid in kids {
880 if visited.insert(kid) {
881 queue.push_back((kid, depth + 1));
882 }
883 }
884 }
885 }
886
887 let avg = if count > 0 {
888 Some(total_depth as f32 / count as f32)
889 } else {
890 None
891 };
892
893 (Some(max_d), avg)
894}
895
896fn detect_cycles(all_nodes: &[&str], children: &HashMap<&str, Vec<&str>>) -> usize {
898 const WHITE: u8 = 0;
899 const GRAY: u8 = 1;
900 const BLACK: u8 = 2;
901
902 let mut color: HashMap<&str, u8> = HashMap::with_capacity(all_nodes.len());
903 for &node in all_nodes {
904 color.insert(node, WHITE);
905 }
906
907 let mut cycles = 0;
908
909 fn dfs<'a>(
910 node: &'a str,
911 children: &HashMap<&str, Vec<&'a str>>,
912 color: &mut HashMap<&'a str, u8>,
913 cycles: &mut usize,
914 ) {
915 color.insert(node, GRAY);
916
917 if let Some(kids) = children.get(node) {
918 for &kid in kids {
919 match color.get(kid).copied().unwrap_or(WHITE) {
920 GRAY => *cycles += 1, WHITE => dfs(kid, children, color, cycles),
922 _ => {}
923 }
924 }
925 }
926
927 color.insert(node, BLACK);
928 }
929
930 for &node in all_nodes {
931 if color.get(node).copied().unwrap_or(WHITE) == WHITE {
932 dfs(node, children, &mut color, &mut cycles);
933 }
934 }
935
936 cycles
937}
938
939fn count_islands(all_nodes: &[&str], edges: &[crate::model::DependencyEdge]) -> usize {
941 if all_nodes.is_empty() {
942 return 0;
943 }
944
945 let node_idx: HashMap<&str, usize> =
947 all_nodes.iter().enumerate().map(|(i, &n)| (n, i)).collect();
948
949 let mut parent: Vec<usize> = (0..all_nodes.len()).collect();
950 let mut rank: Vec<u8> = vec![0; all_nodes.len()];
951
952 fn find(parent: &mut Vec<usize>, x: usize) -> usize {
953 if parent[x] != x {
954 parent[x] = find(parent, parent[x]); }
956 parent[x]
957 }
958
959 fn union(parent: &mut Vec<usize>, rank: &mut [u8], a: usize, b: usize) {
960 let ra = find(parent, a);
961 let rb = find(parent, b);
962 if ra != rb {
963 if rank[ra] < rank[rb] {
964 parent[ra] = rb;
965 } else if rank[ra] > rank[rb] {
966 parent[rb] = ra;
967 } else {
968 parent[rb] = ra;
969 rank[ra] += 1;
970 }
971 }
972 }
973
974 for edge in edges {
975 if let (Some(&a), Some(&b)) = (
976 node_idx.get(edge.from.value()),
977 node_idx.get(edge.to.value()),
978 ) {
979 union(&mut parent, &mut rank, a, b);
980 }
981 }
982
983 let mut roots = HashSet::new();
985 for i in 0..all_nodes.len() {
986 roots.insert(find(&mut parent, i));
987 }
988
989 roots.len()
990}
991
992fn compute_complexity(
997 edges: usize,
998 components: usize,
999 max_depth: usize,
1000 max_out_degree: usize,
1001 cycle_count: usize,
1002 _orphans: usize,
1003 islands: usize,
1004) -> (f32, ComplexityLevel, ComplexityFactors) {
1005 if components == 0 {
1006 let factors = ComplexityFactors {
1007 dependency_volume: 0.0,
1008 normalized_depth: 0.0,
1009 fanout_concentration: 0.0,
1010 cycle_ratio: 0.0,
1011 fragmentation: 0.0,
1012 };
1013 return (100.0, ComplexityLevel::Low, factors);
1014 }
1015
1016 let edge_ratio = edges as f64 / components as f64;
1018 let dependency_volume = ((1.0 + edge_ratio).ln() / 20.0_f64.ln()).min(1.0) as f32;
1019
1020 let normalized_depth = (max_depth as f32 / 15.0).min(1.0);
1022
1023 let fanout_denom = (components as f32 * 0.25).max(4.0);
1026 let fanout_concentration = (max_out_degree as f32 / fanout_denom).min(1.0);
1027
1028 let cycle_threshold = (components as f32 * 0.05).max(1.0);
1030 let cycle_ratio = (cycle_count as f32 / cycle_threshold).min(1.0);
1031
1032 let extra_islands = islands.saturating_sub(1);
1035 let fragmentation = if components > 1 {
1036 (extra_islands as f32 / (components - 1) as f32).min(1.0)
1037 } else {
1038 0.0
1039 };
1040
1041 let factors = ComplexityFactors {
1042 dependency_volume,
1043 normalized_depth,
1044 fanout_concentration,
1045 cycle_ratio,
1046 fragmentation,
1047 };
1048
1049 let raw_complexity = 0.30 * dependency_volume
1050 + 0.20 * normalized_depth
1051 + 0.20 * fanout_concentration
1052 + 0.20 * cycle_ratio
1053 + 0.10 * fragmentation;
1054
1055 let simplicity_index = (100.0 - raw_complexity * 100.0).clamp(0.0, 100.0);
1056 let level = ComplexityLevel::from_score(simplicity_index);
1057
1058 (simplicity_index, level, factors)
1059}
1060
1061#[derive(Debug, Clone, Serialize, Deserialize)]
1067pub struct ProvenanceMetrics {
1068 pub has_tool_creator: bool,
1070 pub has_tool_version: bool,
1072 pub has_org_creator: bool,
1074 pub has_contact_email: bool,
1076 pub has_serial_number: bool,
1078 pub has_document_name: bool,
1080 pub timestamp_age_days: u32,
1082 pub is_fresh: bool,
1084 pub has_primary_component: bool,
1086 pub lifecycle_phase: Option<String>,
1088 pub completeness_declaration: CompletenessDeclaration,
1090 pub has_signature: bool,
1092 pub has_citations: bool,
1094 pub citations_count: usize,
1096}
1097
1098const FRESHNESS_THRESHOLD_DAYS: u32 = 90;
1100
1101impl ProvenanceMetrics {
1102 #[must_use]
1104 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
1105 let doc = &sbom.document;
1106
1107 let has_tool_creator = doc
1108 .creators
1109 .iter()
1110 .any(|c| c.creator_type == CreatorType::Tool);
1111 let has_tool_version = doc.creators.iter().any(|c| {
1112 c.creator_type == CreatorType::Tool
1113 && (c.name.contains(' ') || c.name.contains('/') || c.name.contains('@'))
1114 });
1115 let has_org_creator = doc
1116 .creators
1117 .iter()
1118 .any(|c| c.creator_type == CreatorType::Organization);
1119 let has_contact_email = doc.creators.iter().any(|c| c.email.is_some());
1120
1121 let age_days = (chrono::Utc::now() - doc.created).num_days().max(0) as u32;
1122
1123 Self {
1124 has_tool_creator,
1125 has_tool_version,
1126 has_org_creator,
1127 has_contact_email,
1128 has_serial_number: doc.serial_number.is_some(),
1129 has_document_name: doc.name.is_some(),
1130 timestamp_age_days: age_days,
1131 is_fresh: age_days < FRESHNESS_THRESHOLD_DAYS,
1132 has_primary_component: sbom.primary_component_id.is_some(),
1133 lifecycle_phase: doc.lifecycle_phase.clone(),
1134 completeness_declaration: doc.completeness_declaration.clone(),
1135 has_signature: doc.signature.is_some(),
1136 has_citations: doc.citations_count > 0,
1137 citations_count: doc.citations_count,
1138 }
1139 }
1140
1141 #[must_use]
1148 pub fn quality_score(&self, is_cyclonedx: bool) -> f32 {
1149 let mut score = 0.0;
1150 let mut total_weight = 0.0;
1151
1152 let completeness_declared =
1153 self.completeness_declaration != CompletenessDeclaration::Unknown;
1154
1155 let checks: &[(bool, f32)] = &[
1156 (self.has_tool_creator, 15.0),
1157 (self.has_tool_version, 5.0),
1158 (self.has_org_creator, 12.0),
1159 (self.has_contact_email, 8.0),
1160 (self.has_serial_number, 8.0),
1161 (self.has_document_name, 5.0),
1162 (self.is_fresh, 12.0),
1163 (self.has_primary_component, 12.0),
1164 (completeness_declared, 8.0),
1165 (self.has_signature, 5.0),
1166 ];
1167
1168 for &(present, weight) in checks {
1169 if present {
1170 score += weight;
1171 }
1172 total_weight += weight;
1173 }
1174
1175 if is_cyclonedx {
1177 let weight = 10.0;
1178 if self.lifecycle_phase.is_some() {
1179 score += weight;
1180 }
1181 total_weight += weight;
1182
1183 let citations_weight = 5.0;
1185 if self.has_citations {
1186 score += citations_weight;
1187 }
1188 total_weight += citations_weight;
1189 }
1190
1191 if total_weight > 0.0 {
1192 (score / total_weight) * 100.0
1193 } else {
1194 0.0
1195 }
1196 }
1197}
1198
1199#[derive(Debug, Clone, Serialize, Deserialize)]
1205pub struct AuditabilityMetrics {
1206 pub components_with_vcs: usize,
1208 pub components_with_website: usize,
1210 pub components_with_advisories: usize,
1212 pub components_with_any_external_ref: usize,
1214 pub has_security_contact: bool,
1216 pub has_vuln_disclosure_url: bool,
1218}
1219
1220impl AuditabilityMetrics {
1221 #[must_use]
1223 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
1224 let mut with_vcs = 0;
1225 let mut with_website = 0;
1226 let mut with_advisories = 0;
1227 let mut with_any = 0;
1228
1229 for comp in sbom.components.values() {
1230 if comp.external_refs.is_empty() {
1231 continue;
1232 }
1233 with_any += 1;
1234
1235 let has_vcs = comp
1236 .external_refs
1237 .iter()
1238 .any(|r| r.ref_type == ExternalRefType::Vcs);
1239 let has_website = comp
1240 .external_refs
1241 .iter()
1242 .any(|r| r.ref_type == ExternalRefType::Website);
1243 let has_advisories = comp
1244 .external_refs
1245 .iter()
1246 .any(|r| r.ref_type == ExternalRefType::Advisories);
1247
1248 if has_vcs {
1249 with_vcs += 1;
1250 }
1251 if has_website {
1252 with_website += 1;
1253 }
1254 if has_advisories {
1255 with_advisories += 1;
1256 }
1257 }
1258
1259 Self {
1260 components_with_vcs: with_vcs,
1261 components_with_website: with_website,
1262 components_with_advisories: with_advisories,
1263 components_with_any_external_ref: with_any,
1264 has_security_contact: sbom.document.security_contact.is_some(),
1265 has_vuln_disclosure_url: sbom.document.vulnerability_disclosure_url.is_some(),
1266 }
1267 }
1268
1269 #[must_use]
1273 pub fn quality_score(&self, total_components: usize) -> f32 {
1274 if total_components == 0 {
1275 return 0.0;
1276 }
1277
1278 let ref_coverage =
1280 (self.components_with_any_external_ref as f32 / total_components as f32) * 40.0;
1281 let vcs_coverage = (self.components_with_vcs as f32 / total_components as f32) * 20.0;
1282
1283 let security_contact_score = if self.has_security_contact { 20.0 } else { 0.0 };
1285 let disclosure_score = if self.has_vuln_disclosure_url {
1286 20.0
1287 } else {
1288 0.0
1289 };
1290
1291 (ref_coverage + vcs_coverage + security_contact_score + disclosure_score).min(100.0)
1292 }
1293}
1294
1295#[derive(Debug, Clone, Serialize, Deserialize)]
1301pub struct LifecycleMetrics {
1302 pub eol_components: usize,
1304 pub stale_components: usize,
1306 pub deprecated_components: usize,
1308 pub archived_components: usize,
1310 pub outdated_components: usize,
1312 pub enriched_components: usize,
1314 pub enrichment_coverage: f32,
1316}
1317
1318impl LifecycleMetrics {
1319 #[must_use]
1325 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
1326 let total = sbom.components.len();
1327 let mut eol = 0;
1328 let mut stale = 0;
1329 let mut deprecated = 0;
1330 let mut archived = 0;
1331 let mut outdated = 0;
1332 let mut enriched = 0;
1333
1334 for comp in sbom.components.values() {
1335 let has_lifecycle_data = comp.eol.is_some() || comp.staleness.is_some();
1336 if has_lifecycle_data {
1337 enriched += 1;
1338 }
1339
1340 if let Some(ref eol_info) = comp.eol
1341 && eol_info.status == EolStatus::EndOfLife
1342 {
1343 eol += 1;
1344 }
1345
1346 if let Some(ref stale_info) = comp.staleness {
1347 match stale_info.level {
1348 StalenessLevel::Stale | StalenessLevel::Abandoned => stale += 1,
1349 StalenessLevel::Deprecated => deprecated += 1,
1350 StalenessLevel::Archived => archived += 1,
1351 _ => {}
1352 }
1353 if stale_info.is_deprecated {
1354 deprecated += 1;
1355 }
1356 if stale_info.is_archived {
1357 archived += 1;
1358 }
1359 if stale_info.latest_version.is_some() {
1360 outdated += 1;
1361 }
1362 }
1363 }
1364
1365 let coverage = if total > 0 {
1366 (enriched as f32 / total as f32) * 100.0
1367 } else {
1368 0.0
1369 };
1370
1371 Self {
1372 eol_components: eol,
1373 stale_components: stale,
1374 deprecated_components: deprecated,
1375 archived_components: archived,
1376 outdated_components: outdated,
1377 enriched_components: enriched,
1378 enrichment_coverage: coverage,
1379 }
1380 }
1381
1382 #[must_use]
1384 pub fn has_data(&self) -> bool {
1385 self.enriched_components > 0
1386 }
1387
1388 #[must_use]
1393 pub fn quality_score(&self) -> Option<f32> {
1394 if !self.has_data() {
1395 return None;
1396 }
1397
1398 let mut score = 100.0_f32;
1399
1400 score -= (self.eol_components as f32 * 15.0).min(60.0);
1402 score -= (self.stale_components as f32 * 5.0).min(30.0);
1404 score -= ((self.deprecated_components + self.archived_components) as f32 * 3.0).min(20.0);
1406 score -= (self.outdated_components as f32 * 1.0).min(10.0);
1408
1409 Some(score.clamp(0.0, 100.0))
1410 }
1411}
1412
1413#[derive(Debug, Clone, Default, Serialize, Deserialize)]
1423pub struct CryptographyMetrics {
1424 pub total_crypto_components: usize,
1426 pub algorithms_count: usize,
1428 pub certificates_count: usize,
1430 pub keys_count: usize,
1432 pub protocols_count: usize,
1434 pub quantum_safe_count: usize,
1436 pub quantum_vulnerable_count: usize,
1438 pub weak_algorithm_count: usize,
1440 pub hybrid_pqc_count: usize,
1442 pub expired_certificates: usize,
1444 pub expiring_soon_certificates: usize,
1446 pub compromised_keys: usize,
1448 pub inadequate_key_sizes: usize,
1450 pub weak_algorithm_names: Vec<String>,
1452
1453 pub algorithms_with_oid: usize,
1456 pub algorithms_with_family: usize,
1458 pub algorithms_with_primitive: usize,
1460 pub algorithms_with_security_level: usize,
1462
1463 pub certs_with_signature_algo_ref: usize,
1466 pub keys_with_algorithm_ref: usize,
1468 pub protocols_with_cipher_suites: usize,
1470
1471 pub keys_with_state: usize,
1474 pub keys_with_protection: usize,
1476 pub keys_with_lifecycle_dates: usize,
1478
1479 pub certs_with_validity_dates: usize,
1482}
1483
1484impl CryptographyMetrics {
1485 #[must_use]
1487 pub fn from_sbom(sbom: &NormalizedSbom) -> Self {
1488 let mut m = Self::default();
1489
1490 for comp in sbom.components.values() {
1491 if comp.component_type != ComponentType::Cryptographic {
1492 continue;
1493 }
1494 m.total_crypto_components += 1;
1495
1496 let Some(cp) = &comp.crypto_properties else {
1497 continue;
1498 };
1499
1500 match cp.asset_type {
1501 CryptoAssetType::Algorithm => {
1502 m.algorithms_count += 1;
1503 if cp.oid.is_some() {
1504 m.algorithms_with_oid += 1;
1505 }
1506 if let Some(algo) = &cp.algorithm_properties {
1507 if algo.algorithm_family.is_some() {
1508 m.algorithms_with_family += 1;
1509 }
1510 if !matches!(algo.primitive, CryptoPrimitive::Other(_)) {
1511 m.algorithms_with_primitive += 1;
1512 }
1513 if algo.classical_security_level.is_some()
1514 || algo.nist_quantum_security_level.is_some()
1515 {
1516 m.algorithms_with_security_level += 1;
1517 }
1518 if algo.is_quantum_safe() {
1519 m.quantum_safe_count += 1;
1520 } else if algo.nist_quantum_security_level == Some(0) {
1521 m.quantum_vulnerable_count += 1;
1522 }
1523 if algo.is_weak_by_name(&comp.name) {
1524 m.weak_algorithm_count += 1;
1525 m.weak_algorithm_names.push(comp.name.clone());
1526 }
1527 if algo.is_hybrid_pqc() {
1528 m.hybrid_pqc_count += 1;
1529 }
1530 }
1531 }
1532 CryptoAssetType::Certificate => {
1533 m.certificates_count += 1;
1534 if let Some(cert) = &cp.certificate_properties {
1535 if cert.not_valid_before.is_some() && cert.not_valid_after.is_some() {
1536 m.certs_with_validity_dates += 1;
1537 }
1538 if cert.signature_algorithm_ref.is_some() {
1539 m.certs_with_signature_algo_ref += 1;
1540 }
1541 if cert.is_expired() {
1542 m.expired_certificates += 1;
1543 } else if cert.is_expiring_soon(90) {
1544 m.expiring_soon_certificates += 1;
1545 }
1546 }
1547 }
1548 CryptoAssetType::RelatedCryptoMaterial => {
1549 m.keys_count += 1;
1550 if let Some(mat) = &cp.related_crypto_material_properties {
1551 if mat.state.is_some() {
1552 m.keys_with_state += 1;
1553 }
1554 if mat.secured_by.is_some() {
1555 m.keys_with_protection += 1;
1556 }
1557 if mat.creation_date.is_some() || mat.activation_date.is_some() {
1558 m.keys_with_lifecycle_dates += 1;
1559 }
1560 if mat.algorithm_ref.is_some() {
1561 m.keys_with_algorithm_ref += 1;
1562 }
1563 if mat.state == Some(CryptoMaterialState::Compromised) {
1564 m.compromised_keys += 1;
1565 }
1566 if let Some(size) = mat.size {
1568 let is_symmetric = matches!(
1569 mat.material_type,
1570 crate::model::CryptoMaterialType::SymmetricKey
1571 | crate::model::CryptoMaterialType::SecretKey
1572 );
1573 if (is_symmetric && size < 128) || (!is_symmetric && size < 2048) {
1574 m.inadequate_key_sizes += 1;
1575 }
1576 }
1577 }
1578 }
1579 CryptoAssetType::Protocol => {
1580 m.protocols_count += 1;
1581 if let Some(proto) = &cp.protocol_properties
1582 && !proto.cipher_suites.is_empty()
1583 {
1584 m.protocols_with_cipher_suites += 1;
1585 }
1586 }
1587 _ => {}
1588 }
1589 }
1590
1591 m
1592 }
1593
1594 #[must_use]
1596 pub fn has_data(&self) -> bool {
1597 self.total_crypto_components > 0
1598 }
1599
1600 #[must_use]
1603 pub fn quantum_readiness_score(&self) -> f32 {
1604 if self.algorithms_count == 0 {
1605 return 100.0;
1606 }
1607 (self.quantum_safe_count as f32 / self.algorithms_count as f32) * 100.0
1608 }
1609
1610 #[must_use]
1612 pub fn quality_score(&self) -> Option<f32> {
1613 if !self.has_data() {
1614 return None;
1615 }
1616
1617 let mut score = 100.0_f32;
1618
1619 score -= (self.weak_algorithm_count as f32 * 15.0).min(50.0);
1621 score -= (self.quantum_vulnerable_count as f32 * 8.0).min(40.0);
1623 score -= (self.expired_certificates as f32 * 10.0).min(30.0);
1625 score -= (self.compromised_keys as f32 * 20.0).min(40.0);
1627 score -= (self.inadequate_key_sizes as f32 * 5.0).min(20.0);
1629 score -= (self.expiring_soon_certificates as f32 * 3.0).min(15.0);
1631 score += (self.hybrid_pqc_count as f32 * 2.0).min(10.0);
1633
1634 Some(score.clamp(0.0, 100.0))
1635 }
1636
1637 #[must_use]
1641 pub fn crypto_completeness_score(&self) -> f32 {
1642 if self.algorithms_count == 0 {
1643 return 100.0;
1644 }
1645 let family_pct = self.algorithms_with_family as f32 / self.algorithms_count as f32;
1646 let primitive_pct = self.algorithms_with_primitive as f32 / self.algorithms_count as f32;
1647 let level_pct = self.algorithms_with_security_level as f32 / self.algorithms_count as f32;
1648 (family_pct * 40.0 + primitive_pct * 30.0 + level_pct * 30.0).clamp(0.0, 100.0)
1649 }
1650
1651 #[must_use]
1653 pub fn crypto_identifier_score(&self) -> f32 {
1654 if self.algorithms_count == 0 {
1655 return 100.0;
1656 }
1657 let oid_pct = self.algorithms_with_oid as f32 / self.algorithms_count as f32;
1658 (oid_pct * 100.0).clamp(0.0, 100.0)
1659 }
1660
1661 #[must_use]
1663 pub fn algorithm_strength_score(&self) -> f32 {
1664 if self.algorithms_count == 0 {
1665 return 100.0;
1666 }
1667 let mut score = 100.0_f32;
1668 score -= (self.weak_algorithm_count as f32 * 15.0).min(60.0);
1669 score -= (self.inadequate_key_sizes as f32 * 8.0).min(30.0);
1670 if self.algorithms_count > 0 {
1671 let vuln_pct = self.quantum_vulnerable_count as f32 / self.algorithms_count as f32;
1672 score -= vuln_pct * 30.0;
1673 }
1674 score.clamp(0.0, 100.0)
1675 }
1676
1677 #[must_use]
1679 pub fn crypto_dependency_score(&self) -> f32 {
1680 let linkable = self.certificates_count + self.keys_count + self.protocols_count;
1681 if linkable == 0 {
1682 return 100.0;
1683 }
1684 let resolved = self.certs_with_signature_algo_ref
1685 + self.keys_with_algorithm_ref
1686 + self.protocols_with_cipher_suites;
1687 let pct = resolved as f32 / linkable as f32;
1688 (pct * 100.0).clamp(0.0, 100.0)
1689 }
1690
1691 #[must_use]
1693 pub fn crypto_lifecycle_score(&self) -> f32 {
1694 let mut score = 100.0_f32;
1695
1696 if self.keys_count > 0 {
1697 let state_pct = self.keys_with_state as f32 / self.keys_count as f32;
1698 let protection_pct = self.keys_with_protection as f32 / self.keys_count as f32;
1699 let lifecycle_pct = self.keys_with_lifecycle_dates as f32 / self.keys_count as f32;
1700 let key_completeness =
1701 (state_pct * 0.4 + protection_pct * 0.3 + lifecycle_pct * 0.3) * 100.0;
1702 score = score * 0.5 + key_completeness * 0.5;
1703 score -= (self.compromised_keys as f32 * 20.0).min(40.0);
1704 score -= (self.inadequate_key_sizes as f32 * 5.0).min(20.0);
1705 }
1706
1707 if self.certificates_count > 0 {
1708 let validity_pct =
1709 self.certs_with_validity_dates as f32 / self.certificates_count as f32;
1710 score -= (1.0 - validity_pct) * 15.0;
1711 score -= (self.expired_certificates as f32 * 15.0).min(45.0);
1712 score -= (self.expiring_soon_certificates as f32 * 5.0).min(20.0);
1713 }
1714
1715 score.clamp(0.0, 100.0)
1716 }
1717
1718 #[must_use]
1720 pub fn pqc_readiness_score(&self) -> f32 {
1721 if self.algorithms_count == 0 {
1722 return 100.0;
1723 }
1724 let mut score = 0.0_f32;
1725 let qs_pct = self.quantum_safe_count as f32 / self.algorithms_count as f32;
1726 score += qs_pct * 60.0;
1727 if self.hybrid_pqc_count > 0 {
1728 score += 15.0;
1729 }
1730 if self.weak_algorithm_count == 0 {
1731 score += 25.0;
1732 } else {
1733 score += (25.0 - self.weak_algorithm_count as f32 * 5.0).max(0.0);
1734 }
1735 score.clamp(0.0, 100.0)
1736 }
1737
1738 #[must_use]
1740 pub fn quantum_readiness_pct(&self) -> f32 {
1741 if self.algorithms_count == 0 {
1742 return 0.0;
1743 }
1744 (self.quantum_safe_count as f32 / self.algorithms_count as f32) * 100.0
1745 }
1746
1747 #[must_use]
1749 pub const fn cbom_category_labels() -> [&'static str; 8] {
1750 ["Crpt", "OIDs", "Algo", "Refs", "Life", "PQC", "Prov", "Lic"]
1751 }
1752}
1753
1754fn is_valid_purl(purl: &str) -> bool {
1759 purl.starts_with("pkg:") && purl.contains('/')
1761}
1762
1763fn extract_ecosystem_from_purl(purl: &str) -> Option<String> {
1764 if let Some(rest) = purl.strip_prefix("pkg:")
1766 && let Some(slash_idx) = rest.find('/')
1767 {
1768 return Some(rest[..slash_idx].to_string());
1769 }
1770 None
1771}
1772
1773fn is_valid_cpe(cpe: &str) -> bool {
1774 cpe.starts_with("cpe:2.3:") || cpe.starts_with("cpe:/")
1776}
1777
1778fn is_valid_spdx_license(expr: &str) -> bool {
1779 const COMMON_SPDX: &[&str] = &[
1781 "MIT",
1782 "Apache-2.0",
1783 "GPL-2.0",
1784 "GPL-3.0",
1785 "BSD-2-Clause",
1786 "BSD-3-Clause",
1787 "ISC",
1788 "MPL-2.0",
1789 "LGPL-2.1",
1790 "LGPL-3.0",
1791 "AGPL-3.0",
1792 "Unlicense",
1793 "CC0-1.0",
1794 "0BSD",
1795 "EPL-2.0",
1796 "CDDL-1.0",
1797 "Artistic-2.0",
1798 "GPL-2.0-only",
1799 "GPL-2.0-or-later",
1800 "GPL-3.0-only",
1801 "GPL-3.0-or-later",
1802 "LGPL-2.1-only",
1803 "LGPL-2.1-or-later",
1804 "LGPL-3.0-only",
1805 "LGPL-3.0-or-later",
1806 ];
1807
1808 let trimmed = expr.trim();
1810 COMMON_SPDX.contains(&trimmed)
1811 || trimmed.contains(" AND ")
1812 || trimmed.contains(" OR ")
1813 || trimmed.contains(" WITH ")
1814}
1815
1816fn is_deprecated_spdx_license(expr: &str) -> bool {
1821 const DEPRECATED: &[&str] = &[
1822 "GPL-2.0",
1823 "GPL-2.0+",
1824 "GPL-3.0",
1825 "GPL-3.0+",
1826 "LGPL-2.0",
1827 "LGPL-2.0+",
1828 "LGPL-2.1",
1829 "LGPL-2.1+",
1830 "LGPL-3.0",
1831 "LGPL-3.0+",
1832 "AGPL-1.0",
1833 "AGPL-3.0",
1834 "GFDL-1.1",
1835 "GFDL-1.2",
1836 "GFDL-1.3",
1837 "BSD-2-Clause-FreeBSD",
1838 "BSD-2-Clause-NetBSD",
1839 "eCos-2.0",
1840 "Nunit",
1841 "StandardML-NJ",
1842 "wxWindows",
1843 ];
1844 let trimmed = expr.trim();
1845 DEPRECATED.contains(&trimmed)
1846}
1847
1848fn is_restrictive_license(expr: &str) -> bool {
1853 let trimmed = expr.trim().to_uppercase();
1854 trimmed.starts_with("GPL")
1855 || trimmed.starts_with("LGPL")
1856 || trimmed.starts_with("AGPL")
1857 || trimmed.starts_with("EUPL")
1858 || trimmed.starts_with("SSPL")
1859 || trimmed.starts_with("OSL")
1860 || trimmed.starts_with("CPAL")
1861 || trimmed.starts_with("CC-BY-SA")
1862 || trimmed.starts_with("CC-BY-NC")
1863}
1864
1865#[cfg(test)]
1866mod tests {
1867 use super::*;
1868
1869 #[test]
1870 fn test_purl_validation() {
1871 assert!(is_valid_purl("pkg:npm/@scope/name@1.0.0"));
1872 assert!(is_valid_purl("pkg:maven/group/artifact@1.0"));
1873 assert!(!is_valid_purl("npm:something"));
1874 assert!(!is_valid_purl("invalid"));
1875 }
1876
1877 #[test]
1878 fn test_cpe_validation() {
1879 assert!(is_valid_cpe("cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*"));
1880 assert!(is_valid_cpe("cpe:/a:vendor:product:1.0"));
1881 assert!(!is_valid_cpe("something:else"));
1882 }
1883
1884 #[test]
1885 fn test_spdx_license_validation() {
1886 assert!(is_valid_spdx_license("MIT"));
1887 assert!(is_valid_spdx_license("Apache-2.0"));
1888 assert!(is_valid_spdx_license("MIT AND Apache-2.0"));
1889 assert!(is_valid_spdx_license("GPL-2.0 OR MIT"));
1890 }
1891
1892 #[test]
1893 fn test_strong_hash_classification() {
1894 assert!(is_strong_hash(&HashAlgorithm::Sha256));
1895 assert!(is_strong_hash(&HashAlgorithm::Sha3_256));
1896 assert!(is_strong_hash(&HashAlgorithm::Blake3));
1897 assert!(!is_strong_hash(&HashAlgorithm::Md5));
1898 assert!(!is_strong_hash(&HashAlgorithm::Sha1));
1899 assert!(!is_strong_hash(&HashAlgorithm::Other("custom".to_string())));
1900 }
1901
1902 #[test]
1903 fn test_deprecated_license_detection() {
1904 assert!(is_deprecated_spdx_license("GPL-2.0"));
1905 assert!(is_deprecated_spdx_license("LGPL-2.1"));
1906 assert!(is_deprecated_spdx_license("AGPL-3.0"));
1907 assert!(!is_deprecated_spdx_license("GPL-2.0-only"));
1908 assert!(!is_deprecated_spdx_license("MIT"));
1909 assert!(!is_deprecated_spdx_license("Apache-2.0"));
1910 }
1911
1912 #[test]
1913 fn test_restrictive_license_detection() {
1914 assert!(is_restrictive_license("GPL-3.0-only"));
1915 assert!(is_restrictive_license("LGPL-2.1-or-later"));
1916 assert!(is_restrictive_license("AGPL-3.0-only"));
1917 assert!(is_restrictive_license("EUPL-1.2"));
1918 assert!(is_restrictive_license("CC-BY-SA-4.0"));
1919 assert!(!is_restrictive_license("MIT"));
1920 assert!(!is_restrictive_license("Apache-2.0"));
1921 assert!(!is_restrictive_license("BSD-3-Clause"));
1922 }
1923
1924 #[test]
1925 fn test_hash_quality_score_no_components() {
1926 let metrics = HashQualityMetrics {
1927 components_with_any_hash: 0,
1928 components_with_strong_hash: 0,
1929 components_with_weak_only: 0,
1930 algorithm_distribution: BTreeMap::new(),
1931 total_hashes: 0,
1932 };
1933 assert_eq!(metrics.quality_score(0), 0.0);
1934 }
1935
1936 #[test]
1937 fn test_hash_quality_score_all_strong() {
1938 let metrics = HashQualityMetrics {
1939 components_with_any_hash: 10,
1940 components_with_strong_hash: 10,
1941 components_with_weak_only: 0,
1942 algorithm_distribution: BTreeMap::new(),
1943 total_hashes: 10,
1944 };
1945 assert_eq!(metrics.quality_score(10), 100.0);
1946 }
1947
1948 #[test]
1949 fn test_hash_quality_score_weak_only_penalty() {
1950 let metrics = HashQualityMetrics {
1951 components_with_any_hash: 10,
1952 components_with_strong_hash: 0,
1953 components_with_weak_only: 10,
1954 algorithm_distribution: BTreeMap::new(),
1955 total_hashes: 10,
1956 };
1957 assert_eq!(metrics.quality_score(10), 50.0);
1959 }
1960
1961 #[test]
1962 fn test_lifecycle_no_enrichment_returns_none() {
1963 let metrics = LifecycleMetrics {
1964 eol_components: 0,
1965 stale_components: 0,
1966 deprecated_components: 0,
1967 archived_components: 0,
1968 outdated_components: 0,
1969 enriched_components: 0,
1970 enrichment_coverage: 0.0,
1971 };
1972 assert!(!metrics.has_data());
1973 assert!(metrics.quality_score().is_none());
1974 }
1975
1976 #[test]
1977 fn test_lifecycle_with_eol_penalty() {
1978 let metrics = LifecycleMetrics {
1979 eol_components: 2,
1980 stale_components: 0,
1981 deprecated_components: 0,
1982 archived_components: 0,
1983 outdated_components: 0,
1984 enriched_components: 10,
1985 enrichment_coverage: 100.0,
1986 };
1987 assert_eq!(metrics.quality_score(), Some(70.0));
1989 }
1990
1991 #[test]
1992 fn test_cycle_detection_no_cycles() {
1993 let children: HashMap<&str, Vec<&str>> =
1994 HashMap::from([("a", vec!["b"]), ("b", vec!["c"])]);
1995 let all_nodes = vec!["a", "b", "c"];
1996 assert_eq!(detect_cycles(&all_nodes, &children), 0);
1997 }
1998
1999 #[test]
2000 fn test_cycle_detection_with_cycle() {
2001 let children: HashMap<&str, Vec<&str>> =
2002 HashMap::from([("a", vec!["b"]), ("b", vec!["c"]), ("c", vec!["a"])]);
2003 let all_nodes = vec!["a", "b", "c"];
2004 assert_eq!(detect_cycles(&all_nodes, &children), 1);
2005 }
2006
2007 #[test]
2008 fn test_depth_computation() {
2009 let children: HashMap<&str, Vec<&str>> =
2010 HashMap::from([("root", vec!["a", "b"]), ("a", vec!["c"])]);
2011 let roots = vec!["root"];
2012 let (max_d, avg_d) = compute_depth(&roots, &children);
2013 assert_eq!(max_d, Some(2)); assert!(avg_d.is_some());
2015 }
2016
2017 #[test]
2018 fn test_depth_empty_roots() {
2019 let children: HashMap<&str, Vec<&str>> = HashMap::new();
2020 let roots: Vec<&str> = vec![];
2021 let (max_d, avg_d) = compute_depth(&roots, &children);
2022 assert_eq!(max_d, None);
2023 assert_eq!(avg_d, None);
2024 }
2025
2026 #[test]
2027 fn test_provenance_quality_score() {
2028 let metrics = ProvenanceMetrics {
2029 has_tool_creator: true,
2030 has_tool_version: true,
2031 has_org_creator: true,
2032 has_contact_email: true,
2033 has_serial_number: true,
2034 has_document_name: true,
2035 timestamp_age_days: 10,
2036 is_fresh: true,
2037 has_primary_component: true,
2038 lifecycle_phase: Some("build".to_string()),
2039 completeness_declaration: CompletenessDeclaration::Complete,
2040 has_signature: true,
2041 has_citations: true,
2042 citations_count: 3,
2043 };
2044 assert_eq!(metrics.quality_score(true), 100.0);
2046 }
2047
2048 #[test]
2049 fn test_provenance_score_without_cyclonedx() {
2050 let metrics = ProvenanceMetrics {
2051 has_tool_creator: true,
2052 has_tool_version: true,
2053 has_org_creator: true,
2054 has_contact_email: true,
2055 has_serial_number: true,
2056 has_document_name: true,
2057 timestamp_age_days: 10,
2058 is_fresh: true,
2059 has_primary_component: true,
2060 lifecycle_phase: None,
2061 completeness_declaration: CompletenessDeclaration::Complete,
2062 has_signature: true,
2063 has_citations: false,
2064 citations_count: 0,
2065 };
2066 assert_eq!(metrics.quality_score(false), 100.0);
2068 }
2069
2070 #[test]
2071 fn test_complexity_empty_graph() {
2072 let (simplicity, level, factors) = compute_complexity(0, 0, 0, 0, 0, 0, 0);
2073 assert_eq!(simplicity, 100.0);
2074 assert_eq!(level, ComplexityLevel::Low);
2075 assert_eq!(factors.dependency_volume, 0.0);
2076 }
2077
2078 #[test]
2079 fn test_complexity_single_node() {
2080 let (simplicity, level, _) = compute_complexity(0, 1, 0, 0, 0, 1, 1);
2082 assert!(
2083 simplicity >= 80.0,
2084 "Single node simplicity {simplicity} should be >= 80"
2085 );
2086 assert_eq!(level, ComplexityLevel::Low);
2087 }
2088
2089 #[test]
2090 fn test_complexity_monotonic_edges() {
2091 let (s1, _, _) = compute_complexity(5, 10, 2, 3, 0, 1, 1);
2093 let (s2, _, _) = compute_complexity(20, 10, 2, 3, 0, 1, 1);
2094 assert!(
2095 s2 <= s1,
2096 "More edges should not increase simplicity: {s2} vs {s1}"
2097 );
2098 }
2099
2100 #[test]
2101 fn test_complexity_monotonic_cycles() {
2102 let (s1, _, _) = compute_complexity(10, 10, 2, 3, 0, 1, 1);
2103 let (s2, _, _) = compute_complexity(10, 10, 2, 3, 3, 1, 1);
2104 assert!(
2105 s2 <= s1,
2106 "More cycles should not increase simplicity: {s2} vs {s1}"
2107 );
2108 }
2109
2110 #[test]
2111 fn test_complexity_monotonic_depth() {
2112 let (s1, _, _) = compute_complexity(10, 10, 2, 3, 0, 1, 1);
2113 let (s2, _, _) = compute_complexity(10, 10, 10, 3, 0, 1, 1);
2114 assert!(
2115 s2 <= s1,
2116 "More depth should not increase simplicity: {s2} vs {s1}"
2117 );
2118 }
2119
2120 #[test]
2121 fn test_complexity_graph_skipped() {
2122 let (simplicity, _, _) = compute_complexity(100, 50, 5, 10, 2, 5, 3);
2125 assert!(simplicity >= 0.0 && simplicity <= 100.0);
2126 }
2127
2128 #[test]
2129 fn test_complexity_level_bands() {
2130 assert_eq!(ComplexityLevel::from_score(100.0), ComplexityLevel::Low);
2131 assert_eq!(ComplexityLevel::from_score(75.0), ComplexityLevel::Low);
2132 assert_eq!(ComplexityLevel::from_score(74.0), ComplexityLevel::Moderate);
2133 assert_eq!(ComplexityLevel::from_score(50.0), ComplexityLevel::Moderate);
2134 assert_eq!(ComplexityLevel::from_score(49.0), ComplexityLevel::High);
2135 assert_eq!(ComplexityLevel::from_score(25.0), ComplexityLevel::High);
2136 assert_eq!(ComplexityLevel::from_score(24.0), ComplexityLevel::VeryHigh);
2137 assert_eq!(ComplexityLevel::from_score(0.0), ComplexityLevel::VeryHigh);
2138 }
2139
2140 #[test]
2141 fn test_completeness_declaration_display() {
2142 assert_eq!(CompletenessDeclaration::Complete.to_string(), "complete");
2143 assert_eq!(
2144 CompletenessDeclaration::IncompleteFirstPartyOnly.to_string(),
2145 "incomplete (first-party only)"
2146 );
2147 assert_eq!(CompletenessDeclaration::Unknown.to_string(), "unknown");
2148 }
2149
2150 #[test]
2153 fn crypto_completeness_all_documented() {
2154 let m = CryptographyMetrics {
2155 algorithms_count: 4,
2156 algorithms_with_family: 4,
2157 algorithms_with_primitive: 4,
2158 algorithms_with_security_level: 4,
2159 ..Default::default()
2160 };
2161 let score = m.crypto_completeness_score();
2162 assert!(
2163 (score - 100.0).abs() < 0.1,
2164 "fully documented → 100, got {score}"
2165 );
2166 }
2167
2168 #[test]
2169 fn crypto_completeness_partial() {
2170 let m = CryptographyMetrics {
2171 algorithms_count: 4,
2172 algorithms_with_family: 2, algorithms_with_primitive: 4, algorithms_with_security_level: 0, ..Default::default()
2176 };
2177 let score = m.crypto_completeness_score();
2179 assert!((score - 50.0).abs() < 0.1, "partial → 50, got {score}");
2180 }
2181
2182 #[test]
2183 fn crypto_identifier_full_oid_coverage() {
2184 let m = CryptographyMetrics {
2185 algorithms_count: 5,
2186 algorithms_with_oid: 5,
2187 ..Default::default()
2188 };
2189 assert!((m.crypto_identifier_score() - 100.0).abs() < 0.1);
2190 }
2191
2192 #[test]
2193 fn crypto_identifier_no_oids() {
2194 let m = CryptographyMetrics {
2195 algorithms_count: 5,
2196 algorithms_with_oid: 0,
2197 ..Default::default()
2198 };
2199 assert!((m.crypto_identifier_score() - 0.0).abs() < 0.1);
2200 }
2201
2202 #[test]
2203 fn algorithm_strength_weak_penalty() {
2204 let m = CryptographyMetrics {
2205 algorithms_count: 5,
2206 weak_algorithm_count: 2,
2207 ..Default::default()
2208 };
2209 let score = m.algorithm_strength_score();
2211 assert!((score - 70.0).abs() < 0.1, "2 weak → 70, got {score}");
2212 }
2213
2214 #[test]
2215 fn algorithm_strength_quantum_vulnerable() {
2216 let m = CryptographyMetrics {
2217 algorithms_count: 10,
2218 quantum_vulnerable_count: 10,
2219 ..Default::default()
2220 };
2221 let score = m.algorithm_strength_score();
2223 assert!(
2224 (score - 70.0).abs() < 0.1,
2225 "all quantum vuln → 70, got {score}"
2226 );
2227 }
2228
2229 #[test]
2230 fn crypto_lifecycle_compromised_keys() {
2231 let m = CryptographyMetrics {
2232 keys_count: 3,
2233 keys_with_state: 3,
2234 keys_with_protection: 3,
2235 keys_with_lifecycle_dates: 3,
2236 compromised_keys: 1,
2237 ..Default::default()
2238 };
2239 let score = m.crypto_lifecycle_score();
2240 assert!(score < 85.0);
2242 assert!(score > 50.0);
2243 }
2244
2245 #[test]
2246 fn crypto_lifecycle_expired_certs() {
2247 let m = CryptographyMetrics {
2248 certificates_count: 4,
2249 certs_with_validity_dates: 4,
2250 expired_certificates: 2,
2251 expiring_soon_certificates: 1,
2252 ..Default::default()
2253 };
2254 let score = m.crypto_lifecycle_score();
2255 assert!(score < 70.0);
2257 }
2258
2259 #[test]
2260 fn pqc_readiness_all_quantum_safe() {
2261 let m = CryptographyMetrics {
2262 algorithms_count: 5,
2263 quantum_safe_count: 5,
2264 hybrid_pqc_count: 2,
2265 weak_algorithm_count: 0,
2266 ..Default::default()
2267 };
2268 let score = m.pqc_readiness_score();
2270 assert!(
2271 (score - 100.0).abs() < 0.1,
2272 "all safe + hybrid → 100, got {score}"
2273 );
2274 }
2275
2276 #[test]
2277 fn pqc_readiness_no_quantum_safe() {
2278 let m = CryptographyMetrics {
2279 algorithms_count: 5,
2280 quantum_safe_count: 0,
2281 hybrid_pqc_count: 0,
2282 weak_algorithm_count: 0,
2283 ..Default::default()
2284 };
2285 let score = m.pqc_readiness_score();
2287 assert!(
2288 (score - 25.0).abs() < 0.1,
2289 "no safe, no weak → 25, got {score}"
2290 );
2291 }
2292
2293 #[test]
2294 fn crypto_dependency_all_resolved() {
2295 let m = CryptographyMetrics {
2296 certificates_count: 2,
2297 keys_count: 3,
2298 protocols_count: 1,
2299 certs_with_signature_algo_ref: 2,
2300 keys_with_algorithm_ref: 3,
2301 protocols_with_cipher_suites: 1,
2302 ..Default::default()
2303 };
2304 assert!((m.crypto_dependency_score() - 100.0).abs() < 0.1);
2305 }
2306
2307 #[test]
2308 fn crypto_dependency_none_resolved() {
2309 let m = CryptographyMetrics {
2310 certificates_count: 2,
2311 keys_count: 3,
2312 protocols_count: 1,
2313 ..Default::default()
2314 };
2315 assert!((m.crypto_dependency_score() - 0.0).abs() < 0.1);
2316 }
2317
2318 #[test]
2319 fn quality_score_none_when_no_crypto() {
2320 let m = CryptographyMetrics::default();
2321 assert!(m.quality_score().is_none());
2322 }
2323
2324 #[test]
2325 fn quantum_readiness_pct_zero_algorithms() {
2326 let m = CryptographyMetrics::default();
2327 assert!((m.quantum_readiness_pct() - 0.0).abs() < 0.01);
2328 }
2329}