Skip to main content

tokmd_analysis_types/
lib.rs

1//! # tokmd-analysis-types
2//!
3//! **Tier 0 (Analysis Contract)**
4//!
5//! Pure data structures for analysis receipts. No I/O or business logic.
6//!
7//! ## What belongs here
8//! * Analysis-specific receipt types and findings
9//! * Schema definitions for analysis outputs
10//! * Type enums for classification results
11//!
12//! ## What does NOT belong here
13//! * Analysis computation logic (use tokmd-analysis)
14//! * Formatting logic (use tokmd-analysis-format)
15//! * File I/O operations
16
17pub mod findings;
18
19use std::collections::BTreeMap;
20
21use serde::{Deserialize, Serialize};
22use tokmd_types::{ScanStatus, ToolInfo};
23
24/// Schema version for analysis receipts.
25/// v7: Added coupling normalization (Jaccard/Lift), commit intent classification, near-duplicate detection.
26/// v8: Near-dup clusters, selection metadata, max_pairs guardrail, runtime stats.
27pub const ANALYSIS_SCHEMA_VERSION: u32 = 8;
28
29#[derive(Debug, Clone, Serialize, Deserialize)]
30pub struct AnalysisReceipt {
31    pub schema_version: u32,
32    pub generated_at_ms: u128,
33    pub tool: ToolInfo,
34    pub mode: String,
35    pub status: ScanStatus,
36    pub warnings: Vec<String>,
37    pub source: AnalysisSource,
38    pub args: AnalysisArgsMeta,
39    pub archetype: Option<Archetype>,
40    pub topics: Option<TopicClouds>,
41    pub entropy: Option<EntropyReport>,
42    pub predictive_churn: Option<PredictiveChurnReport>,
43    pub corporate_fingerprint: Option<CorporateFingerprint>,
44    pub license: Option<LicenseReport>,
45    pub derived: Option<DerivedReport>,
46    pub assets: Option<AssetReport>,
47    pub deps: Option<DependencyReport>,
48    pub git: Option<GitReport>,
49    pub imports: Option<ImportReport>,
50    pub dup: Option<DuplicateReport>,
51    pub complexity: Option<ComplexityReport>,
52    pub api_surface: Option<ApiSurfaceReport>,
53    pub fun: Option<FunReport>,
54}
55
56#[derive(Debug, Clone, Serialize, Deserialize)]
57pub struct AnalysisSource {
58    pub inputs: Vec<String>,
59    pub export_path: Option<String>,
60    pub base_receipt_path: Option<String>,
61    pub export_schema_version: Option<u32>,
62    pub export_generated_at_ms: Option<u128>,
63    pub base_signature: Option<String>,
64    pub module_roots: Vec<String>,
65    pub module_depth: usize,
66    pub children: String,
67}
68
69#[derive(Debug, Clone, Serialize, Deserialize)]
70pub struct AnalysisArgsMeta {
71    pub preset: String,
72    pub format: String,
73    pub window_tokens: Option<usize>,
74    pub git: Option<bool>,
75    pub max_files: Option<usize>,
76    pub max_bytes: Option<u64>,
77    pub max_commits: Option<usize>,
78    pub max_commit_files: Option<usize>,
79    pub max_file_bytes: Option<u64>,
80    pub import_granularity: String,
81}
82
83// ---------------
84// Project context
85// ---------------
86
87#[derive(Debug, Clone, Serialize, Deserialize)]
88pub struct Archetype {
89    pub kind: String,
90    pub evidence: Vec<String>,
91}
92
93// -----------------
94// Semantic topics
95// -----------------
96
97#[derive(Debug, Clone, Serialize, Deserialize)]
98pub struct TopicClouds {
99    pub per_module: BTreeMap<String, Vec<TopicTerm>>,
100    pub overall: Vec<TopicTerm>,
101}
102
103#[derive(Debug, Clone, Serialize, Deserialize)]
104pub struct TopicTerm {
105    pub term: String,
106    pub score: f64,
107    pub tf: u32,
108    pub df: u32,
109}
110
111// -----------------
112// Entropy profiling
113// -----------------
114
115#[derive(Debug, Clone, Serialize, Deserialize)]
116pub struct EntropyReport {
117    pub suspects: Vec<EntropyFinding>,
118}
119
120#[derive(Debug, Clone, Serialize, Deserialize)]
121pub struct EntropyFinding {
122    pub path: String,
123    pub module: String,
124    pub entropy_bits_per_byte: f32,
125    pub sample_bytes: u32,
126    pub class: EntropyClass,
127}
128
129#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
130#[serde(rename_all = "snake_case")]
131pub enum EntropyClass {
132    Low,
133    Normal,
134    Suspicious,
135    High,
136}
137
138// -----------------
139// Predictive churn
140// -----------------
141
142#[derive(Debug, Clone, Serialize, Deserialize)]
143pub struct PredictiveChurnReport {
144    pub per_module: BTreeMap<String, ChurnTrend>,
145}
146
147#[derive(Debug, Clone, Serialize, Deserialize)]
148pub struct ChurnTrend {
149    pub slope: f64,
150    pub r2: f64,
151    pub recent_change: i64,
152    pub classification: TrendClass,
153}
154
155#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
156#[serde(rename_all = "snake_case")]
157pub enum TrendClass {
158    Rising,
159    Flat,
160    Falling,
161}
162
163// ---------------------
164// Corporate fingerprint
165// ---------------------
166
167#[derive(Debug, Clone, Serialize, Deserialize)]
168pub struct CorporateFingerprint {
169    pub domains: Vec<DomainStat>,
170}
171
172#[derive(Debug, Clone, Serialize, Deserialize)]
173pub struct DomainStat {
174    pub domain: String,
175    pub commits: u32,
176    pub pct: f32,
177}
178
179// -------------
180// License radar
181// -------------
182
183#[derive(Debug, Clone, Serialize, Deserialize)]
184pub struct LicenseReport {
185    pub findings: Vec<LicenseFinding>,
186    pub effective: Option<String>,
187}
188
189#[derive(Debug, Clone, Serialize, Deserialize)]
190pub struct LicenseFinding {
191    pub spdx: String,
192    pub confidence: f32,
193    pub source_path: String,
194    pub source_kind: LicenseSourceKind,
195}
196
197#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
198#[serde(rename_all = "snake_case")]
199pub enum LicenseSourceKind {
200    Metadata,
201    Text,
202}
203
204// -----------------
205// Derived analytics
206// -----------------
207
208#[derive(Debug, Clone, Serialize, Deserialize)]
209pub struct DerivedReport {
210    pub totals: DerivedTotals,
211    pub doc_density: RatioReport,
212    pub whitespace: RatioReport,
213    pub verbosity: RateReport,
214    pub max_file: MaxFileReport,
215    pub lang_purity: LangPurityReport,
216    pub nesting: NestingReport,
217    pub test_density: TestDensityReport,
218    pub boilerplate: BoilerplateReport,
219    pub polyglot: PolyglotReport,
220    pub distribution: DistributionReport,
221    pub histogram: Vec<HistogramBucket>,
222    pub top: TopOffenders,
223    pub tree: Option<String>,
224    pub reading_time: ReadingTimeReport,
225    pub context_window: Option<ContextWindowReport>,
226    pub cocomo: Option<CocomoReport>,
227    pub todo: Option<TodoReport>,
228    pub integrity: IntegrityReport,
229}
230
231#[derive(Debug, Clone, Serialize, Deserialize)]
232pub struct DerivedTotals {
233    pub files: usize,
234    pub code: usize,
235    pub comments: usize,
236    pub blanks: usize,
237    pub lines: usize,
238    pub bytes: usize,
239    pub tokens: usize,
240}
241
242#[derive(Debug, Clone, Serialize, Deserialize)]
243pub struct RatioReport {
244    pub total: RatioRow,
245    pub by_lang: Vec<RatioRow>,
246    pub by_module: Vec<RatioRow>,
247}
248
249#[derive(Debug, Clone, Serialize, Deserialize)]
250pub struct RatioRow {
251    pub key: String,
252    pub numerator: usize,
253    pub denominator: usize,
254    pub ratio: f64,
255}
256
257#[derive(Debug, Clone, Serialize, Deserialize)]
258pub struct RateReport {
259    pub total: RateRow,
260    pub by_lang: Vec<RateRow>,
261    pub by_module: Vec<RateRow>,
262}
263
264#[derive(Debug, Clone, Serialize, Deserialize)]
265pub struct RateRow {
266    pub key: String,
267    pub numerator: usize,
268    pub denominator: usize,
269    pub rate: f64,
270}
271
272#[derive(Debug, Clone, Serialize, Deserialize)]
273pub struct MaxFileReport {
274    pub overall: FileStatRow,
275    pub by_lang: Vec<MaxFileRow>,
276    pub by_module: Vec<MaxFileRow>,
277}
278
279#[derive(Debug, Clone, Serialize, Deserialize)]
280pub struct MaxFileRow {
281    pub key: String,
282    pub file: FileStatRow,
283}
284
285#[derive(Debug, Clone, Serialize, Deserialize)]
286pub struct FileStatRow {
287    pub path: String,
288    pub module: String,
289    pub lang: String,
290    pub code: usize,
291    pub comments: usize,
292    pub blanks: usize,
293    pub lines: usize,
294    pub bytes: usize,
295    pub tokens: usize,
296    pub doc_pct: Option<f64>,
297    pub bytes_per_line: Option<f64>,
298    pub depth: usize,
299}
300
301#[derive(Debug, Clone, Serialize, Deserialize)]
302pub struct LangPurityReport {
303    pub rows: Vec<LangPurityRow>,
304}
305
306#[derive(Debug, Clone, Serialize, Deserialize)]
307pub struct LangPurityRow {
308    pub module: String,
309    pub lang_count: usize,
310    pub dominant_lang: String,
311    pub dominant_lines: usize,
312    pub dominant_pct: f64,
313}
314
315#[derive(Debug, Clone, Serialize, Deserialize)]
316pub struct NestingReport {
317    pub max: usize,
318    pub avg: f64,
319    pub by_module: Vec<NestingRow>,
320}
321
322#[derive(Debug, Clone, Serialize, Deserialize)]
323pub struct NestingRow {
324    pub key: String,
325    pub max: usize,
326    pub avg: f64,
327}
328
329#[derive(Debug, Clone, Serialize, Deserialize)]
330pub struct TestDensityReport {
331    pub test_lines: usize,
332    pub prod_lines: usize,
333    pub test_files: usize,
334    pub prod_files: usize,
335    pub ratio: f64,
336}
337
338#[derive(Debug, Clone, Serialize, Deserialize)]
339pub struct BoilerplateReport {
340    pub infra_lines: usize,
341    pub logic_lines: usize,
342    pub ratio: f64,
343    pub infra_langs: Vec<String>,
344}
345
346#[derive(Debug, Clone, Serialize, Deserialize)]
347pub struct PolyglotReport {
348    pub lang_count: usize,
349    pub entropy: f64,
350    pub dominant_lang: String,
351    pub dominant_lines: usize,
352    pub dominant_pct: f64,
353}
354
355#[derive(Debug, Clone, Serialize, Deserialize)]
356pub struct DistributionReport {
357    pub count: usize,
358    pub min: usize,
359    pub max: usize,
360    pub mean: f64,
361    pub median: f64,
362    pub p90: f64,
363    pub p99: f64,
364    pub gini: f64,
365}
366
367#[derive(Debug, Clone, Serialize, Deserialize)]
368pub struct HistogramBucket {
369    pub label: String,
370    pub min: usize,
371    pub max: Option<usize>,
372    pub files: usize,
373    pub pct: f64,
374}
375
376#[derive(Debug, Clone, Serialize, Deserialize)]
377pub struct TopOffenders {
378    pub largest_lines: Vec<FileStatRow>,
379    pub largest_tokens: Vec<FileStatRow>,
380    pub largest_bytes: Vec<FileStatRow>,
381    pub least_documented: Vec<FileStatRow>,
382    pub most_dense: Vec<FileStatRow>,
383}
384
385#[derive(Debug, Clone, Serialize, Deserialize)]
386pub struct ReadingTimeReport {
387    pub minutes: f64,
388    pub lines_per_minute: usize,
389    pub basis_lines: usize,
390}
391
392#[derive(Debug, Clone, Serialize, Deserialize)]
393pub struct TodoReport {
394    pub total: usize,
395    pub density_per_kloc: f64,
396    pub tags: Vec<TodoTagRow>,
397}
398
399#[derive(Debug, Clone, Serialize, Deserialize)]
400pub struct TodoTagRow {
401    pub tag: String,
402    pub count: usize,
403}
404
405#[derive(Debug, Clone, Serialize, Deserialize)]
406pub struct ContextWindowReport {
407    pub window_tokens: usize,
408    pub total_tokens: usize,
409    pub pct: f64,
410    pub fits: bool,
411}
412
413#[derive(Debug, Clone, Serialize, Deserialize)]
414pub struct CocomoReport {
415    pub mode: String,
416    pub kloc: f64,
417    pub effort_pm: f64,
418    pub duration_months: f64,
419    pub staff: f64,
420    pub a: f64,
421    pub b: f64,
422    pub c: f64,
423    pub d: f64,
424}
425
426#[derive(Debug, Clone, Serialize, Deserialize)]
427pub struct IntegrityReport {
428    pub algo: String,
429    pub hash: String,
430    pub entries: usize,
431}
432
433// -------------
434// Asset metrics
435// -------------
436
437#[derive(Debug, Clone, Serialize, Deserialize)]
438pub struct AssetReport {
439    pub total_files: usize,
440    pub total_bytes: u64,
441    pub categories: Vec<AssetCategoryRow>,
442    pub top_files: Vec<AssetFileRow>,
443}
444
445#[derive(Debug, Clone, Serialize, Deserialize)]
446pub struct AssetCategoryRow {
447    pub category: String,
448    pub files: usize,
449    pub bytes: u64,
450    pub extensions: Vec<String>,
451}
452
453#[derive(Debug, Clone, Serialize, Deserialize)]
454pub struct AssetFileRow {
455    pub path: String,
456    pub bytes: u64,
457    pub category: String,
458    pub extension: String,
459}
460
461// -----------------
462// Dependency metrics
463// -----------------
464
465#[derive(Debug, Clone, Serialize, Deserialize)]
466pub struct DependencyReport {
467    pub total: usize,
468    pub lockfiles: Vec<LockfileReport>,
469}
470
471#[derive(Debug, Clone, Serialize, Deserialize)]
472pub struct LockfileReport {
473    pub path: String,
474    pub kind: String,
475    pub dependencies: usize,
476}
477
478// ---------
479// Git report
480// ---------
481
482#[derive(Debug, Clone, Serialize, Deserialize)]
483pub struct GitReport {
484    pub commits_scanned: usize,
485    pub files_seen: usize,
486    pub hotspots: Vec<HotspotRow>,
487    pub bus_factor: Vec<BusFactorRow>,
488    pub freshness: FreshnessReport,
489    pub coupling: Vec<CouplingRow>,
490    /// Code age bucket distribution plus recent refresh trend.
491    #[serde(skip_serializing_if = "Option::is_none")]
492    pub age_distribution: Option<CodeAgeDistributionReport>,
493    /// Commit intent classification (feat/fix/refactor/etc.).
494    #[serde(default, skip_serializing_if = "Option::is_none")]
495    pub intent: Option<CommitIntentReport>,
496}
497
498#[derive(Debug, Clone, Serialize, Deserialize)]
499pub struct HotspotRow {
500    pub path: String,
501    pub commits: usize,
502    pub lines: usize,
503    pub score: usize,
504}
505
506#[derive(Debug, Clone, Serialize, Deserialize)]
507pub struct BusFactorRow {
508    pub module: String,
509    pub authors: usize,
510}
511
512#[derive(Debug, Clone, Serialize, Deserialize)]
513pub struct FreshnessReport {
514    pub threshold_days: usize,
515    pub stale_files: usize,
516    pub total_files: usize,
517    pub stale_pct: f64,
518    pub by_module: Vec<ModuleFreshnessRow>,
519}
520
521#[derive(Debug, Clone, Serialize, Deserialize)]
522pub struct ModuleFreshnessRow {
523    pub module: String,
524    pub avg_days: f64,
525    pub p90_days: f64,
526    pub stale_pct: f64,
527}
528
529#[derive(Debug, Clone, Serialize, Deserialize)]
530pub struct CouplingRow {
531    pub left: String,
532    pub right: String,
533    pub count: usize,
534    /// Jaccard similarity: count / (n_left + n_right - count). Range (0.0, 1.0].
535    #[serde(default, skip_serializing_if = "Option::is_none")]
536    pub jaccard: Option<f64>,
537    /// Lift: (count * N) / (n_left * n_right), where N = commits_considered.
538    #[serde(default, skip_serializing_if = "Option::is_none")]
539    pub lift: Option<f64>,
540    /// Commits touching left module (within commits_considered universe).
541    #[serde(default, skip_serializing_if = "Option::is_none")]
542    pub n_left: Option<usize>,
543    /// Commits touching right module (within commits_considered universe).
544    #[serde(default, skip_serializing_if = "Option::is_none")]
545    pub n_right: Option<usize>,
546}
547
548#[derive(Debug, Clone, Serialize, Deserialize)]
549pub struct CodeAgeDistributionReport {
550    pub buckets: Vec<CodeAgeBucket>,
551    pub recent_refreshes: usize,
552    pub prior_refreshes: usize,
553    pub refresh_trend: TrendClass,
554}
555
556#[derive(Debug, Clone, Serialize, Deserialize)]
557pub struct CodeAgeBucket {
558    pub label: String,
559    pub min_days: usize,
560    pub max_days: Option<usize>,
561    pub files: usize,
562    pub pct: f64,
563}
564
565// --------------------------
566// Commit intent classification
567// --------------------------
568
569// Re-export from tokmd-types (Tier 0) so existing consumers keep working.
570pub use tokmd_types::CommitIntentKind;
571
572/// Overall commit intent classification report.
573#[derive(Debug, Clone, Serialize, Deserialize)]
574pub struct CommitIntentReport {
575    /// Aggregate counts across all scanned commits.
576    pub overall: CommitIntentCounts,
577    /// Per-module intent breakdown.
578    pub by_module: Vec<ModuleIntentRow>,
579    /// Percentage of commits classified as "other" (unrecognized).
580    pub unknown_pct: f64,
581    /// Corrective ratio: (fix + revert) / total. Range [0.0, 1.0].
582    #[serde(default, skip_serializing_if = "Option::is_none")]
583    pub corrective_ratio: Option<f64>,
584}
585
586/// Counts per intent kind.
587#[derive(Debug, Clone, Serialize, Deserialize, Default)]
588pub struct CommitIntentCounts {
589    pub feat: usize,
590    pub fix: usize,
591    pub refactor: usize,
592    pub docs: usize,
593    pub test: usize,
594    pub chore: usize,
595    pub ci: usize,
596    pub build: usize,
597    pub perf: usize,
598    pub style: usize,
599    pub revert: usize,
600    pub other: usize,
601    pub total: usize,
602}
603
604impl CommitIntentCounts {
605    /// Increment the count for a given intent kind.
606    pub fn increment(&mut self, kind: CommitIntentKind) {
607        match kind {
608            CommitIntentKind::Feat => self.feat += 1,
609            CommitIntentKind::Fix => self.fix += 1,
610            CommitIntentKind::Refactor => self.refactor += 1,
611            CommitIntentKind::Docs => self.docs += 1,
612            CommitIntentKind::Test => self.test += 1,
613            CommitIntentKind::Chore => self.chore += 1,
614            CommitIntentKind::Ci => self.ci += 1,
615            CommitIntentKind::Build => self.build += 1,
616            CommitIntentKind::Perf => self.perf += 1,
617            CommitIntentKind::Style => self.style += 1,
618            CommitIntentKind::Revert => self.revert += 1,
619            CommitIntentKind::Other => self.other += 1,
620        }
621        self.total += 1;
622    }
623}
624
625/// Per-module intent breakdown row.
626#[derive(Debug, Clone, Serialize, Deserialize)]
627pub struct ModuleIntentRow {
628    pub module: String,
629    pub counts: CommitIntentCounts,
630}
631
632// ----------------------------
633// Near-duplicate detection
634// ----------------------------
635
636/// Scope for near-duplicate comparison partitioning.
637#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
638#[serde(rename_all = "kebab-case")]
639pub enum NearDupScope {
640    /// Compare files within the same module.
641    #[default]
642    Module,
643    /// Compare files within the same language.
644    Lang,
645    /// Compare all files globally.
646    Global,
647}
648
649/// Parameters for near-duplicate detection.
650#[derive(Debug, Clone, Serialize, Deserialize)]
651pub struct NearDupParams {
652    pub scope: NearDupScope,
653    pub threshold: f64,
654    pub max_files: usize,
655    /// Maximum pairs to emit (truncation guardrail).
656    #[serde(default, skip_serializing_if = "Option::is_none")]
657    pub max_pairs: Option<usize>,
658    /// Effective per-file byte limit used for eligibility filtering.
659    #[serde(default, skip_serializing_if = "Option::is_none")]
660    pub max_file_bytes: Option<u64>,
661    /// How files were selected for analysis.
662    #[serde(default, skip_serializing_if = "Option::is_none")]
663    pub selection_method: Option<String>,
664    /// Algorithm constants used for fingerprinting.
665    #[serde(default, skip_serializing_if = "Option::is_none")]
666    pub algorithm: Option<NearDupAlgorithm>,
667    /// Glob patterns used to exclude files from near-dup analysis.
668    #[serde(default, skip_serializing_if = "Vec::is_empty")]
669    pub exclude_patterns: Vec<String>,
670}
671
672/// Algorithm constants for near-duplicate fingerprinting.
673#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
674pub struct NearDupAlgorithm {
675    /// Number of tokens per k-gram shingle.
676    pub k_gram_size: usize,
677    /// Winnowing window size.
678    pub window_size: usize,
679    /// Skip fingerprints appearing in more than this many files.
680    pub max_postings: usize,
681}
682
683/// Report of near-duplicate file pairs.
684#[derive(Debug, Clone, Serialize, Deserialize)]
685pub struct NearDuplicateReport {
686    pub params: NearDupParams,
687    pub pairs: Vec<NearDupPairRow>,
688    pub files_analyzed: usize,
689    pub files_skipped: usize,
690    /// Number of files eligible before the max_files cap.
691    #[serde(default, skip_serializing_if = "Option::is_none")]
692    pub eligible_files: Option<usize>,
693    /// Connected-component clusters derived from pairs.
694    #[serde(default, skip_serializing_if = "Option::is_none")]
695    pub clusters: Option<Vec<NearDupCluster>>,
696    /// Whether the pairs list was truncated by `max_pairs`.
697    /// Clusters are built from the complete pair set before truncation.
698    #[serde(default)]
699    pub truncated: bool,
700    /// Number of files excluded by glob patterns.
701    #[serde(default, skip_serializing_if = "Option::is_none")]
702    pub excluded_by_pattern: Option<usize>,
703    /// Runtime performance statistics.
704    #[serde(default, skip_serializing_if = "Option::is_none")]
705    pub stats: Option<NearDupStats>,
706}
707
708/// A connected component of near-duplicate files.
709#[derive(Debug, Clone, Serialize, Deserialize)]
710pub struct NearDupCluster {
711    /// Files in this cluster, sorted alphabetically.
712    pub files: Vec<String>,
713    /// Maximum pairwise similarity in the cluster.
714    pub max_similarity: f64,
715    /// Most-connected file (tie-break alphabetical).
716    pub representative: String,
717    /// Number of pairs within this cluster.
718    pub pair_count: usize,
719}
720
721/// Runtime statistics for near-duplicate detection.
722#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
723pub struct NearDupStats {
724    /// Time spent computing fingerprints (milliseconds).
725    pub fingerprinting_ms: u64,
726    /// Time spent computing pair similarities (milliseconds).
727    pub pairing_ms: u64,
728    /// Total bytes of source files processed.
729    pub bytes_processed: u64,
730}
731
732/// A pair of near-duplicate files with similarity score.
733#[derive(Debug, Clone, Serialize, Deserialize)]
734pub struct NearDupPairRow {
735    pub left: String,
736    pub right: String,
737    pub similarity: f64,
738    pub shared_fingerprints: usize,
739    pub left_fingerprints: usize,
740    pub right_fingerprints: usize,
741}
742
743// -----------------
744// Import graph info
745// -----------------
746
747#[derive(Debug, Clone, Serialize, Deserialize)]
748pub struct ImportReport {
749    pub granularity: String,
750    pub edges: Vec<ImportEdge>,
751}
752
753#[derive(Debug, Clone, Serialize, Deserialize)]
754pub struct ImportEdge {
755    pub from: String,
756    pub to: String,
757    pub count: usize,
758}
759
760// -------------------
761// Duplication metrics
762// -------------------
763
764#[derive(Debug, Clone, Serialize, Deserialize)]
765pub struct DuplicateReport {
766    pub groups: Vec<DuplicateGroup>,
767    pub wasted_bytes: u64,
768    pub strategy: String,
769    /// Duplication density summary overall and by module.
770    #[serde(skip_serializing_if = "Option::is_none")]
771    pub density: Option<DuplicationDensityReport>,
772    /// Near-duplicate file pairs detected by fingerprint similarity.
773    #[serde(default, skip_serializing_if = "Option::is_none")]
774    pub near: Option<NearDuplicateReport>,
775}
776
777#[derive(Debug, Clone, Serialize, Deserialize)]
778pub struct DuplicateGroup {
779    pub hash: String,
780    pub bytes: u64,
781    pub files: Vec<String>,
782}
783
784#[derive(Debug, Clone, Serialize, Deserialize)]
785pub struct DuplicationDensityReport {
786    pub duplicate_groups: usize,
787    pub duplicate_files: usize,
788    pub duplicated_bytes: u64,
789    pub wasted_bytes: u64,
790    pub wasted_pct_of_codebase: f64,
791    pub by_module: Vec<ModuleDuplicationDensityRow>,
792}
793
794#[derive(Debug, Clone, Serialize, Deserialize)]
795pub struct ModuleDuplicationDensityRow {
796    pub module: String,
797    pub duplicate_files: usize,
798    pub wasted_files: usize,
799    pub duplicated_bytes: u64,
800    pub wasted_bytes: u64,
801    pub module_bytes: u64,
802    pub density: f64,
803}
804
805// -------------------
806// Halstead metrics
807// -------------------
808
809/// Halstead software science metrics computed from operator/operand token counts.
810#[derive(Debug, Clone, Serialize, Deserialize)]
811pub struct HalsteadMetrics {
812    /// Number of distinct operators (n1).
813    pub distinct_operators: usize,
814    /// Number of distinct operands (n2).
815    pub distinct_operands: usize,
816    /// Total number of operators (N1).
817    pub total_operators: usize,
818    /// Total number of operands (N2).
819    pub total_operands: usize,
820    /// Program vocabulary: n1 + n2.
821    pub vocabulary: usize,
822    /// Program length: N1 + N2.
823    pub length: usize,
824    /// Volume: N * log2(n).
825    pub volume: f64,
826    /// Difficulty: (n1/2) * (N2/n2).
827    pub difficulty: f64,
828    /// Effort: D * V.
829    pub effort: f64,
830    /// Estimated programming time in seconds: E / 18.
831    pub time_seconds: f64,
832    /// Estimated number of bugs: V / 3000.
833    pub estimated_bugs: f64,
834}
835
836// -------------------
837// Maintainability Index
838// -------------------
839
840/// Composite maintainability index based on the SEI formula.
841///
842/// MI = 171 - 5.2 * ln(V) - 0.23 * CC - 16.2 * ln(LOC)
843///
844/// When Halstead volume is unavailable, a simplified formula is used.
845#[derive(Debug, Clone, Serialize, Deserialize)]
846pub struct MaintainabilityIndex {
847    /// Maintainability index score (0-171 scale, higher is better).
848    pub score: f64,
849    /// Average cyclomatic complexity used in calculation.
850    pub avg_cyclomatic: f64,
851    /// Average lines of code per file used in calculation.
852    pub avg_loc: f64,
853    /// Average Halstead volume (if Halstead metrics were computed).
854    #[serde(skip_serializing_if = "Option::is_none")]
855    pub avg_halstead_volume: Option<f64>,
856    /// Letter grade: "A" (>=85), "B" (65-84), "C" (<65).
857    pub grade: String,
858}
859
860/// Complexity-to-size ratio heuristic for technical debt estimation.
861#[derive(Debug, Clone, Serialize, Deserialize)]
862pub struct TechnicalDebtRatio {
863    /// Complexity points per KLOC (higher means denser debt).
864    pub ratio: f64,
865    /// Aggregate complexity points used in the ratio.
866    pub complexity_points: usize,
867    /// KLOC basis used in the ratio denominator.
868    pub code_kloc: f64,
869    /// Bucketed interpretation of debt ratio.
870    pub level: TechnicalDebtLevel,
871}
872
873#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
874#[serde(rename_all = "snake_case")]
875pub enum TechnicalDebtLevel {
876    Low,
877    Moderate,
878    High,
879    Critical,
880}
881
882// -------------------
883// Complexity metrics
884// -------------------
885
886#[derive(Debug, Clone, Serialize, Deserialize)]
887pub struct ComplexityReport {
888    pub total_functions: usize,
889    pub avg_function_length: f64,
890    pub max_function_length: usize,
891    pub avg_cyclomatic: f64,
892    pub max_cyclomatic: usize,
893    /// Average cognitive complexity across files.
894    #[serde(skip_serializing_if = "Option::is_none")]
895    pub avg_cognitive: Option<f64>,
896    /// Maximum cognitive complexity found.
897    #[serde(skip_serializing_if = "Option::is_none")]
898    pub max_cognitive: Option<usize>,
899    /// Average nesting depth across files.
900    #[serde(skip_serializing_if = "Option::is_none")]
901    pub avg_nesting_depth: Option<f64>,
902    /// Maximum nesting depth found.
903    #[serde(skip_serializing_if = "Option::is_none")]
904    pub max_nesting_depth: Option<usize>,
905    pub high_risk_files: usize,
906    /// Histogram of cyclomatic complexity distribution.
907    #[serde(skip_serializing_if = "Option::is_none")]
908    pub histogram: Option<ComplexityHistogram>,
909    /// Halstead software science metrics (requires `halstead` feature).
910    #[serde(skip_serializing_if = "Option::is_none")]
911    pub halstead: Option<HalsteadMetrics>,
912    /// Composite maintainability index.
913    #[serde(skip_serializing_if = "Option::is_none")]
914    pub maintainability_index: Option<MaintainabilityIndex>,
915    /// Complexity-to-size debt heuristic.
916    #[serde(skip_serializing_if = "Option::is_none")]
917    pub technical_debt: Option<TechnicalDebtRatio>,
918    pub files: Vec<FileComplexity>,
919}
920
921#[derive(Debug, Clone, Serialize, Deserialize)]
922pub struct FileComplexity {
923    pub path: String,
924    pub module: String,
925    pub function_count: usize,
926    pub max_function_length: usize,
927    pub cyclomatic_complexity: usize,
928    /// Cognitive complexity for this file.
929    #[serde(skip_serializing_if = "Option::is_none")]
930    pub cognitive_complexity: Option<usize>,
931    /// Maximum nesting depth in this file.
932    #[serde(skip_serializing_if = "Option::is_none")]
933    pub max_nesting: Option<usize>,
934    pub risk_level: ComplexityRisk,
935    /// Function-level complexity details (only when --detail-functions is used).
936    #[serde(skip_serializing_if = "Option::is_none")]
937    pub functions: Option<Vec<FunctionComplexityDetail>>,
938}
939
940/// Function-level complexity details.
941#[derive(Debug, Clone, Serialize, Deserialize)]
942pub struct FunctionComplexityDetail {
943    /// Function name.
944    pub name: String,
945    /// Start line (1-indexed).
946    pub line_start: usize,
947    /// End line (1-indexed).
948    pub line_end: usize,
949    /// Function length in lines.
950    pub length: usize,
951    /// Cyclomatic complexity.
952    pub cyclomatic: usize,
953    /// Cognitive complexity (if computed).
954    #[serde(skip_serializing_if = "Option::is_none")]
955    pub cognitive: Option<usize>,
956    /// Maximum nesting depth within the function.
957    #[serde(skip_serializing_if = "Option::is_none")]
958    pub max_nesting: Option<usize>,
959    /// Number of parameters.
960    #[serde(skip_serializing_if = "Option::is_none")]
961    pub param_count: Option<usize>,
962}
963
964#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
965#[serde(rename_all = "snake_case")]
966pub enum ComplexityRisk {
967    Low,
968    Moderate,
969    High,
970    Critical,
971}
972
973/// Histogram of cyclomatic complexity distribution across files.
974///
975/// Used to visualize the distribution of complexity values in a codebase.
976/// Default bucket boundaries are 0-4, 5-9, 10-14, 15-19, 20-24, 25-29, 30+.
977#[derive(Debug, Clone, Serialize, Deserialize)]
978pub struct ComplexityHistogram {
979    /// Bucket boundaries (e.g., [0, 5, 10, 15, 20, 25, 30]).
980    pub buckets: Vec<u32>,
981    /// Count of files in each bucket.
982    pub counts: Vec<u32>,
983    /// Total files analyzed.
984    pub total: u32,
985}
986
987impl ComplexityHistogram {
988    /// Generate an ASCII bar chart visualization of the histogram.
989    ///
990    /// # Arguments
991    /// * `width` - Maximum width of the bars in characters
992    ///
993    /// # Returns
994    /// A multi-line string with labeled bars showing distribution
995    pub fn to_ascii(&self, width: usize) -> String {
996        let max_count = self.counts.iter().max().copied().unwrap_or(1).max(1);
997        let mut output = String::new();
998        for (i, count) in self.counts.iter().enumerate() {
999            let label = if i < self.buckets.len() - 1 {
1000                format!("{:>2}-{:<2}", self.buckets[i], self.buckets[i + 1] - 1)
1001            } else {
1002                format!("{:>2}+ ", self.buckets.get(i).copied().unwrap_or(30))
1003            };
1004            let bar_len = (*count as f64 / max_count as f64 * width as f64) as usize;
1005            let bar = "\u{2588}".repeat(bar_len);
1006            output.push_str(&format!("{} |{} {}\n", label, bar, count));
1007        }
1008        output
1009    }
1010}
1011
1012// -------------------
1013// Baseline/Ratchet types
1014// -------------------
1015
1016/// Schema version for baseline files.
1017/// v1: Initial baseline format with complexity and determinism tracking.
1018pub const BASELINE_VERSION: u32 = 1;
1019
1020/// Complexity baseline for tracking trends over time.
1021///
1022/// Used by the ratchet system to enforce that complexity metrics
1023/// do not regress across commits. The baseline captures a snapshot
1024/// of complexity at a known-good state.
1025#[derive(Debug, Clone, Serialize, Deserialize)]
1026pub struct ComplexityBaseline {
1027    /// Schema version for forward compatibility.
1028    pub baseline_version: u32,
1029    /// ISO 8601 timestamp when this baseline was generated.
1030    pub generated_at: String,
1031    /// Git commit SHA at which this baseline was captured, if available.
1032    pub commit: Option<String>,
1033    /// Aggregate complexity metrics.
1034    pub metrics: BaselineMetrics,
1035    /// Per-file baseline entries for granular tracking.
1036    pub files: Vec<FileBaselineEntry>,
1037    /// Complexity section mirroring analysis receipt structure for ratchet compatibility.
1038    ///
1039    /// This allows using the same JSON pointers (e.g., `/complexity/avg_cyclomatic`)
1040    /// when comparing baselines against current analysis receipts.
1041    #[serde(skip_serializing_if = "Option::is_none")]
1042    pub complexity: Option<BaselineComplexitySection>,
1043    /// Determinism baseline for reproducibility verification.
1044    ///
1045    /// Present when the baseline was generated with `--determinism`.
1046    #[serde(skip_serializing_if = "Option::is_none")]
1047    pub determinism: Option<DeterminismBaseline>,
1048}
1049
1050impl ComplexityBaseline {
1051    /// Creates a new empty baseline with default values.
1052    pub fn new() -> Self {
1053        Self {
1054            baseline_version: BASELINE_VERSION,
1055            generated_at: String::new(),
1056            commit: None,
1057            metrics: BaselineMetrics::default(),
1058            files: Vec::new(),
1059            complexity: None,
1060            determinism: None,
1061        }
1062    }
1063
1064    /// Creates a baseline from an analysis receipt.
1065    ///
1066    /// Extracts complexity information from the receipt's complexity report
1067    /// and derived totals to build a baseline snapshot.
1068    pub fn from_analysis(receipt: &AnalysisReceipt) -> Self {
1069        let generated_at = chrono_timestamp_iso8601(receipt.generated_at_ms);
1070
1071        let (metrics, files, complexity) = if let Some(ref complexity_report) = receipt.complexity {
1072            let total_code_lines = receipt
1073                .derived
1074                .as_ref()
1075                .map(|d| d.totals.code as u64)
1076                .unwrap_or(0);
1077            let total_files = receipt
1078                .derived
1079                .as_ref()
1080                .map(|d| d.totals.files as u64)
1081                .unwrap_or(0);
1082
1083            let metrics = BaselineMetrics {
1084                total_code_lines,
1085                total_files,
1086                avg_cyclomatic: complexity_report.avg_cyclomatic,
1087                max_cyclomatic: complexity_report.max_cyclomatic as u32,
1088                avg_cognitive: complexity_report.avg_cognitive.unwrap_or(0.0),
1089                max_cognitive: complexity_report.max_cognitive.unwrap_or(0) as u32,
1090                avg_nesting_depth: complexity_report.avg_nesting_depth.unwrap_or(0.0),
1091                max_nesting_depth: complexity_report.max_nesting_depth.unwrap_or(0) as u32,
1092                function_count: complexity_report.total_functions as u64,
1093                avg_function_length: complexity_report.avg_function_length,
1094            };
1095
1096            let files: Vec<FileBaselineEntry> = complexity_report
1097                .files
1098                .iter()
1099                .map(|f| FileBaselineEntry {
1100                    path: f.path.clone(),
1101                    code_lines: 0, // Not available in FileComplexity
1102                    cyclomatic: f.cyclomatic_complexity as u32,
1103                    cognitive: f.cognitive_complexity.unwrap_or(0) as u32,
1104                    max_nesting: f.max_nesting.unwrap_or(0) as u32,
1105                    function_count: f.function_count as u32,
1106                    content_hash: None,
1107                })
1108                .collect();
1109
1110            // Build complexity section mirroring analysis receipt structure
1111            let complexity_section = BaselineComplexitySection {
1112                total_functions: complexity_report.total_functions,
1113                avg_function_length: complexity_report.avg_function_length,
1114                max_function_length: complexity_report.max_function_length,
1115                avg_cyclomatic: complexity_report.avg_cyclomatic,
1116                max_cyclomatic: complexity_report.max_cyclomatic,
1117                avg_cognitive: complexity_report.avg_cognitive,
1118                max_cognitive: complexity_report.max_cognitive,
1119                avg_nesting_depth: complexity_report.avg_nesting_depth,
1120                max_nesting_depth: complexity_report.max_nesting_depth,
1121                high_risk_files: complexity_report.high_risk_files,
1122            };
1123
1124            (metrics, files, Some(complexity_section))
1125        } else {
1126            (BaselineMetrics::default(), Vec::new(), None)
1127        };
1128
1129        Self {
1130            baseline_version: BASELINE_VERSION,
1131            generated_at,
1132            commit: None,
1133            metrics,
1134            files,
1135            complexity,
1136            determinism: None,
1137        }
1138    }
1139}
1140
1141impl Default for ComplexityBaseline {
1142    fn default() -> Self {
1143        Self::new()
1144    }
1145}
1146
1147/// Complexity section mirroring analysis receipt structure for ratchet compatibility.
1148///
1149/// This provides the same field names as `ComplexityReport` so that JSON pointers
1150/// like `/complexity/avg_cyclomatic` work consistently across baselines and receipts.
1151#[derive(Debug, Clone, Serialize, Deserialize)]
1152pub struct BaselineComplexitySection {
1153    /// Total number of functions analyzed.
1154    pub total_functions: usize,
1155    /// Average function length in lines.
1156    pub avg_function_length: f64,
1157    /// Maximum function length found.
1158    pub max_function_length: usize,
1159    /// Average cyclomatic complexity across all files.
1160    pub avg_cyclomatic: f64,
1161    /// Maximum cyclomatic complexity found in any file.
1162    pub max_cyclomatic: usize,
1163    /// Average cognitive complexity across all files.
1164    #[serde(skip_serializing_if = "Option::is_none")]
1165    pub avg_cognitive: Option<f64>,
1166    /// Maximum cognitive complexity found.
1167    #[serde(skip_serializing_if = "Option::is_none")]
1168    pub max_cognitive: Option<usize>,
1169    /// Average nesting depth across all files.
1170    #[serde(skip_serializing_if = "Option::is_none")]
1171    pub avg_nesting_depth: Option<f64>,
1172    /// Maximum nesting depth found.
1173    #[serde(skip_serializing_if = "Option::is_none")]
1174    pub max_nesting_depth: Option<usize>,
1175    /// Number of high-risk files.
1176    pub high_risk_files: usize,
1177}
1178
1179/// Aggregate baseline metrics for the entire codebase.
1180#[derive(Debug, Clone, Serialize, Deserialize)]
1181pub struct BaselineMetrics {
1182    /// Total lines of code across all files.
1183    pub total_code_lines: u64,
1184    /// Total number of source files.
1185    pub total_files: u64,
1186    /// Average cyclomatic complexity across all functions.
1187    pub avg_cyclomatic: f64,
1188    /// Maximum cyclomatic complexity found in any function.
1189    pub max_cyclomatic: u32,
1190    /// Average cognitive complexity across all functions.
1191    pub avg_cognitive: f64,
1192    /// Maximum cognitive complexity found in any function.
1193    pub max_cognitive: u32,
1194    /// Average nesting depth across all functions.
1195    pub avg_nesting_depth: f64,
1196    /// Maximum nesting depth found in any function.
1197    pub max_nesting_depth: u32,
1198    /// Total number of functions analyzed.
1199    pub function_count: u64,
1200    /// Average function length in lines.
1201    pub avg_function_length: f64,
1202}
1203
1204impl Default for BaselineMetrics {
1205    fn default() -> Self {
1206        Self {
1207            total_code_lines: 0,
1208            total_files: 0,
1209            avg_cyclomatic: 0.0,
1210            max_cyclomatic: 0,
1211            avg_cognitive: 0.0,
1212            max_cognitive: 0,
1213            avg_nesting_depth: 0.0,
1214            max_nesting_depth: 0,
1215            function_count: 0,
1216            avg_function_length: 0.0,
1217        }
1218    }
1219}
1220
1221/// Per-file baseline entry for granular complexity tracking.
1222#[derive(Debug, Clone, Serialize, Deserialize)]
1223pub struct FileBaselineEntry {
1224    /// Normalized file path (forward slashes).
1225    pub path: String,
1226    /// Lines of code in this file.
1227    pub code_lines: u64,
1228    /// Cyclomatic complexity for this file.
1229    pub cyclomatic: u32,
1230    /// Cognitive complexity for this file.
1231    pub cognitive: u32,
1232    /// Maximum nesting depth in this file.
1233    pub max_nesting: u32,
1234    /// Number of functions in this file.
1235    pub function_count: u32,
1236    /// BLAKE3 hash of file content for change detection.
1237    pub content_hash: Option<String>,
1238}
1239
1240/// Build determinism baseline for reproducibility verification.
1241///
1242/// Tracks hashes of build artifacts and source inputs to detect
1243/// non-deterministic builds.
1244#[derive(Debug, Clone, Serialize, Deserialize)]
1245pub struct DeterminismBaseline {
1246    /// Schema version for forward compatibility.
1247    pub baseline_version: u32,
1248    /// ISO 8601 timestamp when this baseline was generated.
1249    pub generated_at: String,
1250    /// Hash of the final build artifact.
1251    pub build_hash: String,
1252    /// Hash of all source files combined.
1253    pub source_hash: String,
1254    /// Hash of Cargo.lock if present (Rust projects).
1255    pub cargo_lock_hash: Option<String>,
1256}
1257
1258/// Helper to convert milliseconds timestamp to RFC 3339 / ISO 8601 string.
1259fn chrono_timestamp_iso8601(ms: u128) -> String {
1260    // Convert milliseconds to seconds and remaining millis
1261    let total_secs = (ms / 1000) as i64;
1262    let millis = (ms % 1000) as u32;
1263
1264    // Constants for date calculation
1265    const SECS_PER_MIN: i64 = 60;
1266    const SECS_PER_HOUR: i64 = 3600;
1267    const SECS_PER_DAY: i64 = 86400;
1268
1269    // Days since Unix epoch (1970-01-01)
1270    let days = total_secs / SECS_PER_DAY;
1271    let day_secs = total_secs % SECS_PER_DAY;
1272
1273    // Handle negative timestamps (before epoch)
1274    let (days, day_secs) = if day_secs < 0 {
1275        (days - 1, day_secs + SECS_PER_DAY)
1276    } else {
1277        (days, day_secs)
1278    };
1279
1280    // Time of day
1281    let hour = day_secs / SECS_PER_HOUR;
1282    let min = (day_secs % SECS_PER_HOUR) / SECS_PER_MIN;
1283    let sec = day_secs % SECS_PER_MIN;
1284
1285    // Convert days since epoch to year/month/day
1286    // Using algorithm from Howard Hinnant's date library
1287    let z = days + 719468; // shift to March 1, year 0
1288    let era = if z >= 0 { z } else { z - 146096 } / 146097;
1289    let doe = (z - era * 146097) as u32; // day of era [0, 146096]
1290    let yoe = (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365; // year of era
1291    let y = yoe as i64 + era * 400;
1292    let doy = doe - (365 * yoe + yoe / 4 - yoe / 100); // day of year
1293    let mp = (5 * doy + 2) / 153; // month pseudo
1294    let d = doy - (153 * mp + 2) / 5 + 1; // day
1295    let m = if mp < 10 { mp + 3 } else { mp - 9 }; // month
1296    let y = if m <= 2 { y + 1 } else { y }; // year
1297
1298    // Format as RFC 3339: YYYY-MM-DDTHH:MM:SS.sssZ
1299    format!(
1300        "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}.{:03}Z",
1301        y, m, d, hour, min, sec, millis
1302    )
1303}
1304
1305// -------------------
1306// API Surface metrics
1307// -------------------
1308
1309/// Public API surface analysis report.
1310///
1311/// Computes public export ratios per language and module by scanning
1312/// source files for exported symbols (pub fn, export function, etc.).
1313#[derive(Debug, Clone, Serialize, Deserialize)]
1314pub struct ApiSurfaceReport {
1315    /// Total items discovered across all languages.
1316    pub total_items: usize,
1317    /// Items with public visibility.
1318    pub public_items: usize,
1319    /// Items with internal/private visibility.
1320    pub internal_items: usize,
1321    /// Ratio of public to total items (0.0-1.0).
1322    pub public_ratio: f64,
1323    /// Ratio of documented public items (0.0-1.0).
1324    pub documented_ratio: f64,
1325    /// Per-language breakdown.
1326    pub by_language: BTreeMap<String, LangApiSurface>,
1327    /// Per-module breakdown.
1328    pub by_module: Vec<ModuleApiRow>,
1329    /// Top exporters (files with most public items).
1330    pub top_exporters: Vec<ApiExportItem>,
1331}
1332
1333/// Per-language API surface breakdown.
1334#[derive(Debug, Clone, Serialize, Deserialize)]
1335pub struct LangApiSurface {
1336    /// Total items in this language.
1337    pub total_items: usize,
1338    /// Public items in this language.
1339    pub public_items: usize,
1340    /// Internal items in this language.
1341    pub internal_items: usize,
1342    /// Public ratio for this language.
1343    pub public_ratio: f64,
1344}
1345
1346/// Per-module API surface row.
1347#[derive(Debug, Clone, Serialize, Deserialize)]
1348pub struct ModuleApiRow {
1349    /// Module path.
1350    pub module: String,
1351    /// Total items in this module.
1352    pub total_items: usize,
1353    /// Public items in this module.
1354    pub public_items: usize,
1355    /// Public ratio for this module.
1356    pub public_ratio: f64,
1357}
1358
1359/// A file that exports many public items.
1360#[derive(Debug, Clone, Serialize, Deserialize)]
1361pub struct ApiExportItem {
1362    /// File path.
1363    pub path: String,
1364    /// Language of the file.
1365    pub lang: String,
1366    /// Number of public items exported.
1367    pub public_items: usize,
1368    /// Total items in the file.
1369    pub total_items: usize,
1370}
1371
1372// ---------
1373// Fun stuff
1374// ---------
1375
1376#[derive(Debug, Clone, Serialize, Deserialize)]
1377pub struct FunReport {
1378    pub eco_label: Option<EcoLabel>,
1379}
1380
1381#[derive(Debug, Clone, Serialize, Deserialize)]
1382pub struct EcoLabel {
1383    pub score: f64,
1384    pub label: String,
1385    pub bytes: u64,
1386    pub notes: String,
1387}
1388
1389// =========================
1390// Ecosystem Envelope (v1) — re-exported from tokmd-envelope
1391// =========================
1392
1393/// Schema identifier for ecosystem envelope format.
1394/// v1: Initial envelope specification for multi-sensor integration.
1395pub const ENVELOPE_SCHEMA: &str = tokmd_envelope::SENSOR_REPORT_SCHEMA;
1396
1397// Re-export all envelope types with backwards-compatible aliases
1398pub use tokmd_envelope::Artifact;
1399pub use tokmd_envelope::Finding;
1400pub use tokmd_envelope::FindingLocation;
1401pub use tokmd_envelope::FindingSeverity;
1402pub use tokmd_envelope::GateItem;
1403pub use tokmd_envelope::GateResults as GatesEnvelope;
1404pub use tokmd_envelope::SensorReport as Envelope;
1405pub use tokmd_envelope::ToolMeta as EnvelopeTool;
1406pub use tokmd_envelope::Verdict;
1407
1408// Also re-export the canonical names for new code
1409pub use tokmd_envelope::GateResults;
1410pub use tokmd_envelope::SensorReport;
1411pub use tokmd_envelope::ToolMeta;