cuenv_ci/compiler/
mod.rs

1//! Compiler from cuenv task definitions to IR v1.4
2//!
3//! Transforms a cuenv `Project` with tasks into an intermediate representation
4//! suitable for emitting orchestrator-native CI configurations.
5//!
6//! ## Stage Contributors
7//!
8//! The compiler applies stage contributors (Nix, 1Password, Cachix) during
9//! compilation to inject setup/teardown tasks into the IR stages.
10
11pub mod digest;
12
13use crate::flake::{FlakeLockAnalyzer, FlakeLockError, PurityAnalysis};
14use crate::ir::{
15    CachePolicy, IntermediateRepresentation, IrValidator, ManualTriggerConfig, OutputDeclaration,
16    OutputType, PurityMode, Runtime, SecretConfig, Task as IrTask, TriggerCondition,
17    WorkflowDispatchInputDef,
18};
19use crate::stages;
20use cuenv_core::ci::{CI, ManualTrigger, Pipeline};
21use cuenv_core::manifest::Project;
22use cuenv_core::tasks::{Task, TaskDefinition, TaskGroup};
23use digest::DigestBuilder;
24use std::collections::{HashMap, HashSet};
25use std::path::{Path, PathBuf};
26use thiserror::Error;
27use uuid::Uuid;
28
29/// Compiler errors
30#[derive(Debug, Error)]
31pub enum CompilerError {
32    #[error("Task graph validation failed: {0}")]
33    ValidationFailed(String),
34
35    #[error("Task '{0}' uses shell script but IR requires command array")]
36    ShellScriptNotSupported(String),
37
38    #[error("Invalid task structure: {0}")]
39    InvalidTaskStructure(String),
40
41    #[error("Flake lock error: {0}")]
42    FlakeLock(#[from] FlakeLockError),
43}
44
45/// Compiler for transforming cuenv tasks to IR
46pub struct Compiler {
47    /// Project being compiled
48    project: Project,
49
50    /// Compiler options
51    options: CompilerOptions,
52}
53
54/// Compiler configuration options
55#[derive(Debug, Clone, Default)]
56pub struct CompilerOptions {
57    /// Default purity mode for runtimes
58    pub purity_mode: PurityMode,
59
60    /// Whether to validate inputs exist at compile time
61    pub validate_inputs: bool,
62
63    /// Default cache policy for tasks
64    pub default_cache_policy: CachePolicy,
65
66    /// Path to flake.lock file (optional, auto-detected if not set)
67    pub flake_lock_path: Option<PathBuf>,
68
69    /// Project root directory (for locating flake.lock)
70    pub project_root: Option<PathBuf>,
71
72    /// Manual overrides for input digests (for Override mode)
73    /// Maps input name to override digest value
74    pub input_overrides: HashMap<String, String>,
75
76    /// Pipeline being compiled (for environment-aware compilation)
77    ///
78    /// When set, the compiler will set `ir.pipeline.environment` from
79    /// the pipeline's environment, enabling contributors to self-detect
80    /// their requirements.
81    pub pipeline: Option<Pipeline>,
82}
83
84impl Compiler {
85    /// Create a new compiler for the given project
86    #[must_use]
87    pub fn new(project: Project) -> Self {
88        Self {
89            project,
90            options: CompilerOptions::default(),
91        }
92    }
93
94    /// Create a compiler with custom options
95    #[must_use]
96    pub fn with_options(project: Project, options: CompilerOptions) -> Self {
97        Self { project, options }
98    }
99
100    /// Analyze flake.lock for purity and compute runtime digest
101    ///
102    /// If a flake.lock file is found, analyzes it for unlocked inputs
103    /// and computes a deterministic digest based on the locked content.
104    ///
105    /// # Returns
106    /// - `Some(Ok((digest, purity)))` if analysis succeeded
107    /// - `Some(Err(e))` if analysis failed
108    /// - `None` if no flake.lock was found (not a flake-based project)
109    #[must_use]
110    pub fn analyze_flake_purity(&self) -> Option<Result<(String, PurityMode), CompilerError>> {
111        let lock_path = self.resolve_flake_lock_path();
112
113        if !lock_path.exists() {
114            return None;
115        }
116
117        Some(self.perform_flake_analysis(&lock_path))
118    }
119
120    /// Resolve the path to flake.lock
121    fn resolve_flake_lock_path(&self) -> PathBuf {
122        // Use explicit path if provided
123        if let Some(path) = &self.options.flake_lock_path {
124            return path.clone();
125        }
126
127        // Otherwise, look in project root
128        if let Some(root) = &self.options.project_root {
129            return root.join("flake.lock");
130        }
131
132        // Default: current directory
133        PathBuf::from("flake.lock")
134    }
135
136    /// Perform flake purity analysis and apply purity mode
137    fn perform_flake_analysis(
138        &self,
139        lock_path: &Path,
140    ) -> Result<(String, PurityMode), CompilerError> {
141        let analyzer = FlakeLockAnalyzer::from_path(lock_path)?;
142        let analysis = analyzer.analyze();
143
144        self.apply_purity_mode(&analysis)
145    }
146
147    /// Apply purity mode enforcement based on analysis results
148    ///
149    /// - **Strict**: Reject unlocked flakes with an error
150    /// - **Warning**: Log warnings and inject UUID into digest (non-deterministic)
151    /// - **Override**: Apply manual input overrides for deterministic builds
152    fn apply_purity_mode(
153        &self,
154        analysis: &PurityAnalysis,
155    ) -> Result<(String, PurityMode), CompilerError> {
156        match self.options.purity_mode {
157            PurityMode::Strict => {
158                if !analysis.is_pure {
159                    let inputs: Vec<String> = analysis
160                        .unlocked_inputs
161                        .iter()
162                        .map(|u| format!("{}: {}", u.name, u.reason))
163                        .collect();
164                    return Err(CompilerError::FlakeLock(FlakeLockError::strict_violation(
165                        inputs,
166                    )));
167                }
168                Ok((analysis.locked_digest.clone(), PurityMode::Strict))
169            }
170
171            PurityMode::Warning => {
172                if analysis.is_pure {
173                    Ok((analysis.locked_digest.clone(), PurityMode::Warning))
174                } else {
175                    // Log warnings for each unlocked input
176                    for input in &analysis.unlocked_inputs {
177                        tracing::warn!(
178                            input = %input.name,
179                            reason = %input.reason,
180                            "Unlocked flake input detected - cache key will be non-deterministic"
181                        );
182                    }
183
184                    // Inject UUID v4 into digest to force cache miss
185                    let uuid = Uuid::new_v4().to_string();
186                    let mut digest_builder = DigestBuilder::new();
187                    digest_builder.add_inputs(std::slice::from_ref(&analysis.locked_digest));
188                    digest_builder.add_impurity_uuid(&uuid);
189
190                    Ok((digest_builder.finalize(), PurityMode::Warning))
191                }
192            }
193
194            PurityMode::Override => {
195                // In override mode, apply manual input overrides
196                let mut effective_digest = analysis.locked_digest.clone();
197
198                if !self.options.input_overrides.is_empty() {
199                    let mut digest_builder = DigestBuilder::new();
200                    digest_builder.add_inputs(&[effective_digest]);
201
202                    // Add overrides to digest in deterministic order
203                    let mut sorted_overrides: Vec<_> =
204                        self.options.input_overrides.iter().collect();
205                    sorted_overrides.sort_by_key(|(k, _)| *k);
206
207                    for (key, value) in sorted_overrides {
208                        digest_builder.add_inputs(&[format!("override:{key}={value}")]);
209                    }
210
211                    effective_digest = digest_builder.finalize();
212                }
213
214                Ok((effective_digest, PurityMode::Override))
215            }
216        }
217    }
218
219    /// Compute a runtime configuration from the flake analysis
220    ///
221    /// This method creates a `Runtime` IR type with the computed digest
222    /// based on flake purity analysis.
223    ///
224    /// # Errors
225    ///
226    /// Returns `CompilerError` if flake purity analysis fails.
227    pub fn compute_runtime(
228        &self,
229        id: impl Into<String>,
230        flake_ref: impl Into<String>,
231        output: impl Into<String>,
232        system: impl Into<String>,
233    ) -> Result<Runtime, CompilerError> {
234        let (digest, purity) = match self.analyze_flake_purity() {
235            Some(result) => result?,
236            None => {
237                // No flake.lock found - use placeholder digest
238                // This handles non-flake projects gracefully
239                ("sha256:no-flake-lock".to_string(), self.options.purity_mode)
240            }
241        };
242
243        Ok(Runtime {
244            id: id.into(),
245            flake: flake_ref.into(),
246            output: output.into(),
247            system: system.into(),
248            digest,
249            purity,
250        })
251    }
252
253    /// Compile project tasks to IR
254    ///
255    /// # Errors
256    ///
257    /// Returns `CompilerError` if task compilation fails.
258    pub fn compile(&self) -> Result<IntermediateRepresentation, CompilerError> {
259        let mut ir = IntermediateRepresentation::new(&self.project.name);
260
261        // Set pipeline context from options (enables environment-aware contributors)
262        if let Some(ref pipeline) = self.options.pipeline {
263            ir.pipeline.environment.clone_from(&pipeline.environment);
264        }
265
266        // Set up trigger conditions from CI configuration
267        if let Some(ci_config) = &self.project.ci
268            && let Some(first_pipeline) = ci_config.pipelines.first()
269        {
270            ir.pipeline.trigger = Some(self.build_trigger_condition(first_pipeline, ci_config));
271        }
272
273        // Compile tasks
274        self.compile_tasks(&self.project.tasks, &mut ir)?;
275
276        // Apply stage contributors with fixed-point iteration
277        // Contributors self-detect their requirements and report modifications.
278        // Loop continues until no contributor reports changes (stable state).
279        let contributors = stages::default_contributors();
280        loop {
281            let mut any_modified = false;
282            for contributor in &contributors {
283                if contributor.is_active(&ir, &self.project) {
284                    let (contributions, modified) = contributor.contribute(&ir, &self.project);
285                    for (stage, task) in contributions {
286                        ir.stages.add(stage, task);
287                    }
288                    any_modified |= modified;
289                }
290            }
291            ir.stages.sort_by_priority();
292            if !any_modified {
293                break;
294            }
295        }
296
297        // Validate the IR
298        let validator = IrValidator::new(&ir);
299        validator.validate().map_err(|errors| {
300            let error_messages: Vec<String> = errors
301                .iter()
302                .map(std::string::ToString::to_string)
303                .collect();
304            CompilerError::ValidationFailed(error_messages.join(", "))
305        })?;
306
307        Ok(ir)
308    }
309
310    /// Build trigger condition for a pipeline from its configuration
311    fn build_trigger_condition(&self, pipeline: &Pipeline, ci_config: &CI) -> TriggerCondition {
312        let when = pipeline.when.as_ref();
313
314        // Extract branch patterns
315        let branches = when
316            .and_then(|w| w.branch.as_ref())
317            .map(cuenv_core::ci::StringOrVec::to_vec)
318            .unwrap_or_default();
319
320        // Extract pull_request setting
321        let pull_request = when.and_then(|w| w.pull_request);
322
323        // Extract scheduled cron expressions
324        let scheduled = when
325            .and_then(|w| w.scheduled.as_ref())
326            .map(cuenv_core::ci::StringOrVec::to_vec)
327            .unwrap_or_default();
328
329        // Extract release types
330        let release = when.and_then(|w| w.release.clone()).unwrap_or_default();
331
332        // Build manual trigger config
333        let manual = when.and_then(|w| w.manual.as_ref()).map(|m| match m {
334            ManualTrigger::Enabled(enabled) => ManualTriggerConfig {
335                enabled: *enabled,
336                inputs: HashMap::new(),
337            },
338            ManualTrigger::WithInputs(inputs) => ManualTriggerConfig {
339                enabled: true,
340                inputs: inputs
341                    .iter()
342                    .map(|(k, v)| {
343                        (
344                            k.clone(),
345                            WorkflowDispatchInputDef {
346                                description: v.description.clone(),
347                                required: v.required.unwrap_or(false),
348                                default: v.default.clone(),
349                                input_type: v.input_type.clone(),
350                                options: v.options.clone().unwrap_or_default(),
351                            },
352                        )
353                    })
354                    .collect(),
355            },
356        });
357
358        // Determine whether to derive paths from task inputs
359        let should_derive_paths = pipeline.derive_paths.unwrap_or_else(|| {
360            // Default: derive paths if we have branch/PR triggers (not scheduled-only)
361            !branches.is_empty() || pull_request.is_some()
362        });
363
364        // Derive paths from task inputs
365        let paths = if should_derive_paths {
366            self.derive_trigger_paths(pipeline)
367        } else {
368            Vec::new()
369        };
370
371        // Get paths_ignore from provider config
372        let paths_ignore = ci_config
373            .github_config_for_pipeline(&pipeline.name)
374            .paths_ignore
375            .unwrap_or_default();
376
377        TriggerCondition {
378            branches,
379            pull_request,
380            scheduled,
381            release,
382            manual,
383            paths,
384            paths_ignore,
385        }
386    }
387
388    /// Derive trigger paths from task inputs
389    fn derive_trigger_paths(&self, pipeline: &Pipeline) -> Vec<String> {
390        let mut paths = HashSet::new();
391
392        // Collect inputs from all pipeline tasks (including transitive deps)
393        for task_name in &pipeline.tasks {
394            self.collect_task_inputs(task_name, &mut paths);
395        }
396
397        // Add implicit CUE inputs (changes here should always trigger)
398        paths.insert("env.cue".to_string());
399        paths.insert("schema/**".to_string());
400        paths.insert("cue.mod/**".to_string());
401
402        // Sort for deterministic output
403        let mut result: Vec<_> = paths.into_iter().collect();
404        result.sort();
405        result
406    }
407
408    /// Recursively collect task inputs including dependencies
409    fn collect_task_inputs(&self, task_name: &str, paths: &mut HashSet<String>) {
410        if let Some(task) = self.find_task(task_name) {
411            // Add direct inputs
412            for input in task.iter_path_inputs() {
413                paths.insert(input.clone());
414            }
415            // Recurse into dependencies
416            for dep in &task.depends_on {
417                self.collect_task_inputs(dep, paths);
418            }
419        }
420    }
421
422    /// Find a task by name (handles dotted paths for nested tasks)
423    fn find_task(&self, name: &str) -> Option<&Task> {
424        let parts: Vec<&str> = name.split('.').collect();
425        let mut current_tasks = &self.project.tasks;
426
427        for (i, part) in parts.iter().enumerate() {
428            match current_tasks.get(*part) {
429                Some(TaskDefinition::Single(task)) if i == parts.len() - 1 => {
430                    return Some(task);
431                }
432                Some(TaskDefinition::Group(TaskGroup::Parallel(parallel))) => {
433                    current_tasks = &parallel.tasks;
434                }
435                _ => return None,
436            }
437        }
438        None
439    }
440
441    /// Compile task definitions into IR tasks
442    fn compile_tasks(
443        &self,
444        tasks: &HashMap<String, TaskDefinition>,
445        ir: &mut IntermediateRepresentation,
446    ) -> Result<(), CompilerError> {
447        // Sort keys for deterministic output
448        let mut sorted_keys: Vec<_> = tasks.keys().collect();
449        sorted_keys.sort();
450        for name in sorted_keys {
451            let task_def = &tasks[name];
452            self.compile_task_definition(name, task_def, ir)?;
453        }
454        Ok(())
455    }
456
457    /// Compile a single task definition (handles groups and single tasks)
458    fn compile_task_definition(
459        &self,
460        name: &str,
461        task_def: &TaskDefinition,
462        ir: &mut IntermediateRepresentation,
463    ) -> Result<(), CompilerError> {
464        match task_def {
465            TaskDefinition::Single(task) => {
466                let ir_task = self.compile_single_task(name, task)?;
467                ir.tasks.push(ir_task);
468            }
469            TaskDefinition::Group(group) => {
470                self.compile_task_group(name, group, ir)?;
471            }
472        }
473        Ok(())
474    }
475
476    /// Compile a task group (sequential or parallel)
477    fn compile_task_group(
478        &self,
479        prefix: &str,
480        group: &TaskGroup,
481        ir: &mut IntermediateRepresentation,
482    ) -> Result<(), CompilerError> {
483        match group {
484            TaskGroup::Sequential(tasks) => {
485                for (idx, task_def) in tasks.iter().enumerate() {
486                    let task_name = format!("{prefix}.{idx}");
487                    self.compile_task_definition(&task_name, task_def, ir)?;
488                }
489            }
490            TaskGroup::Parallel(parallel) => {
491                // Sort keys for deterministic output
492                let mut sorted_keys: Vec<_> = parallel.tasks.keys().collect();
493                sorted_keys.sort();
494                for name in sorted_keys {
495                    let task_def = &parallel.tasks[name];
496                    let task_name = format!("{prefix}.{name}");
497                    self.compile_task_definition(&task_name, task_def, ir)?;
498                }
499            }
500        }
501        Ok(())
502    }
503
504    /// Compile a single task to IR format
505    fn compile_single_task(&self, id: &str, task: &Task) -> Result<IrTask, CompilerError> {
506        // Convert command and args to array format
507        let command = if !task.command.is_empty() {
508            let mut cmd = vec![task.command.clone()];
509            cmd.extend(task.args.clone());
510            cmd
511        } else if let Some(script) = &task.script {
512            // For scripts, we need to use shell mode
513            // Note: This is a simplified approach; full implementation would
514            // need to handle shebang parsing for polyglot scripts
515            vec!["/bin/sh".to_string(), "-c".to_string(), script.clone()]
516        } else {
517            return Err(CompilerError::InvalidTaskStructure(format!(
518                "Task '{id}' has neither command nor script"
519            )));
520        };
521
522        // Determine shell mode
523        let shell = task.shell.is_some() || task.script.is_some();
524
525        // Convert environment variables (filter out complex JSON values)
526        let env: HashMap<String, String> = task
527            .env
528            .iter()
529            .filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
530            .collect();
531
532        // Extract secrets (simplified - would integrate with secret resolver)
533        let secrets: HashMap<String, SecretConfig> = HashMap::new();
534
535        // Convert inputs (path globs only for now)
536        let inputs: Vec<String> = task.iter_path_inputs().cloned().collect();
537
538        // Convert outputs
539        let outputs: Vec<OutputDeclaration> = task
540            .outputs
541            .iter()
542            .map(|path| OutputDeclaration {
543                path: path.clone(),
544                output_type: OutputType::Cas, // Default to CAS
545            })
546            .collect();
547
548        // Determine cache policy
549        let cache_policy = if task.labels.contains(&"deployment".to_string()) {
550            CachePolicy::Disabled
551        } else {
552            self.options.default_cache_policy
553        };
554
555        // Determine if this is a deployment task
556        let deployment = task.labels.contains(&"deployment".to_string());
557
558        Ok(IrTask {
559            id: id.to_string(),
560            runtime: None, // Would be set based on Nix flake configuration
561            command,
562            shell,
563            env,
564            secrets,
565            resources: None, // Would extract from task metadata if available
566            concurrency_group: None,
567            inputs,
568            outputs,
569            depends_on: task.depends_on.clone(),
570            cache_policy,
571            deployment,
572            manual_approval: false, // Would come from task metadata
573        })
574    }
575}
576
577#[cfg(test)]
578mod tests {
579    use super::*;
580    use cuenv_core::tasks::Task;
581
582    #[test]
583    fn test_compile_simple_task() {
584        let mut project = Project::new("test-project");
585        project.tasks.insert(
586            "build".to_string(),
587            TaskDefinition::Single(Box::new(Task {
588                command: "cargo".to_string(),
589                args: vec!["build".to_string()],
590                inputs: vec![cuenv_core::tasks::Input::Path("src/**/*.rs".to_string())],
591                outputs: vec!["target/debug/binary".to_string()],
592                ..Default::default()
593            })),
594        );
595
596        let compiler = Compiler::new(project);
597        let ir = compiler.compile().unwrap();
598
599        assert_eq!(ir.version, "1.4");
600        assert_eq!(ir.pipeline.name, "test-project");
601        assert_eq!(ir.tasks.len(), 1);
602        assert_eq!(ir.tasks[0].id, "build");
603        assert_eq!(ir.tasks[0].command, vec!["cargo", "build"]);
604        assert_eq!(ir.tasks[0].inputs, vec!["src/**/*.rs"]);
605    }
606
607    #[test]
608    fn test_compile_task_with_dependencies() {
609        let mut project = Project::new("test-project");
610
611        project.tasks.insert(
612            "test".to_string(),
613            TaskDefinition::Single(Box::new(Task {
614                command: "cargo".to_string(),
615                args: vec!["test".to_string()],
616                depends_on: vec!["build".to_string()],
617                ..Default::default()
618            })),
619        );
620
621        project.tasks.insert(
622            "build".to_string(),
623            TaskDefinition::Single(Box::new(Task {
624                command: "cargo".to_string(),
625                args: vec!["build".to_string()],
626                ..Default::default()
627            })),
628        );
629
630        let compiler = Compiler::new(project);
631        let ir = compiler.compile().unwrap();
632
633        assert_eq!(ir.tasks.len(), 2);
634
635        let test_task = ir.tasks.iter().find(|t| t.id == "test").unwrap();
636        assert_eq!(test_task.depends_on, vec!["build"]);
637    }
638
639    #[test]
640    fn test_compile_deployment_task() {
641        let mut project = Project::new("test-project");
642
643        project.tasks.insert(
644            "deploy".to_string(),
645            TaskDefinition::Single(Box::new(Task {
646                command: "kubectl".to_string(),
647                args: vec!["apply".to_string()],
648                labels: vec!["deployment".to_string()],
649                ..Default::default()
650            })),
651        );
652
653        let compiler = Compiler::new(project);
654        let ir = compiler.compile().unwrap();
655
656        assert_eq!(ir.tasks.len(), 1);
657        assert!(ir.tasks[0].deployment);
658        assert_eq!(ir.tasks[0].cache_policy, CachePolicy::Disabled);
659    }
660
661    #[test]
662    fn test_compile_script_task() {
663        let mut project = Project::new("test-project");
664
665        project.tasks.insert(
666            "script-task".to_string(),
667            TaskDefinition::Single(Box::new(Task {
668                script: Some("echo 'Running script'\nls -la".to_string()),
669                ..Default::default()
670            })),
671        );
672
673        let compiler = Compiler::new(project);
674        let ir = compiler.compile().unwrap();
675
676        assert_eq!(ir.tasks.len(), 1);
677        assert!(ir.tasks[0].shell);
678        assert_eq!(ir.tasks[0].command[0], "/bin/sh");
679        assert_eq!(ir.tasks[0].command[1], "-c");
680    }
681
682    #[test]
683    fn test_purity_analysis_pure_flake() {
684        use std::io::Write;
685        use tempfile::NamedTempFile;
686
687        let json = r#"{
688            "nodes": {
689                "nixpkgs": {
690                    "locked": {
691                        "type": "github",
692                        "owner": "NixOS",
693                        "repo": "nixpkgs",
694                        "rev": "abc123",
695                        "narHash": "sha256-xxxxxxxxxxxxx"
696                    }
697                },
698                "root": { "inputs": { "nixpkgs": "nixpkgs" } }
699            },
700            "root": "root",
701            "version": 7
702        }"#;
703
704        let mut temp_file = NamedTempFile::new().unwrap();
705        temp_file.write_all(json.as_bytes()).unwrap();
706
707        let project = Project::new("test-project");
708        let options = CompilerOptions {
709            purity_mode: PurityMode::Strict,
710            flake_lock_path: Some(temp_file.path().to_path_buf()),
711            ..Default::default()
712        };
713
714        let compiler = Compiler::with_options(project, options);
715        let result = compiler.analyze_flake_purity();
716
717        assert!(result.is_some());
718        let (digest, purity) = result.unwrap().unwrap();
719        assert!(digest.starts_with("sha256:"));
720        assert_eq!(purity, PurityMode::Strict);
721    }
722
723    #[test]
724    fn test_purity_strict_mode_rejects_unlocked() {
725        use std::io::Write;
726        use tempfile::NamedTempFile;
727
728        let json = r#"{
729            "nodes": {
730                "nixpkgs": {
731                    "original": { "type": "github", "owner": "NixOS", "repo": "nixpkgs" }
732                },
733                "root": { "inputs": { "nixpkgs": "nixpkgs" } }
734            },
735            "root": "root",
736            "version": 7
737        }"#;
738
739        let mut temp_file = NamedTempFile::new().unwrap();
740        temp_file.write_all(json.as_bytes()).unwrap();
741
742        let project = Project::new("test-project");
743        let options = CompilerOptions {
744            purity_mode: PurityMode::Strict,
745            flake_lock_path: Some(temp_file.path().to_path_buf()),
746            ..Default::default()
747        };
748
749        let compiler = Compiler::with_options(project, options);
750        let result = compiler.analyze_flake_purity();
751
752        assert!(result.is_some());
753        assert!(result.unwrap().is_err());
754    }
755
756    #[test]
757    fn test_purity_warning_mode_injects_uuid() {
758        use std::io::Write;
759        use tempfile::NamedTempFile;
760
761        let json = r#"{
762            "nodes": {
763                "nixpkgs": {
764                    "original": { "type": "github", "owner": "NixOS", "repo": "nixpkgs" }
765                },
766                "root": { "inputs": { "nixpkgs": "nixpkgs" } }
767            },
768            "root": "root",
769            "version": 7
770        }"#;
771
772        let mut temp_file = NamedTempFile::new().unwrap();
773        temp_file.write_all(json.as_bytes()).unwrap();
774
775        let project = Project::new("test-project");
776        let options = CompilerOptions {
777            purity_mode: PurityMode::Warning,
778            flake_lock_path: Some(temp_file.path().to_path_buf()),
779            ..Default::default()
780        };
781
782        let compiler = Compiler::with_options(project.clone(), options.clone());
783        let result1 = compiler.analyze_flake_purity().unwrap().unwrap();
784
785        let compiler2 = Compiler::with_options(project, options);
786        let result2 = compiler2.analyze_flake_purity().unwrap().unwrap();
787
788        // Each compile should produce different digests due to UUID injection
789        assert_ne!(result1.0, result2.0);
790        assert_eq!(result1.1, PurityMode::Warning);
791    }
792
793    #[test]
794    fn test_purity_override_mode_uses_overrides() {
795        use std::io::Write;
796        use tempfile::NamedTempFile;
797
798        let json = r#"{
799            "nodes": {
800                "nixpkgs": {
801                    "locked": {
802                        "type": "github",
803                        "narHash": "sha256-base"
804                    }
805                },
806                "root": { "inputs": { "nixpkgs": "nixpkgs" } }
807            },
808            "root": "root",
809            "version": 7
810        }"#;
811
812        let mut temp_file = NamedTempFile::new().unwrap();
813        temp_file.write_all(json.as_bytes()).unwrap();
814
815        let mut input_overrides = HashMap::new();
816        input_overrides.insert("nixpkgs".to_string(), "sha256-custom".to_string());
817
818        let project = Project::new("test-project");
819        let options = CompilerOptions {
820            purity_mode: PurityMode::Override,
821            flake_lock_path: Some(temp_file.path().to_path_buf()),
822            input_overrides,
823            ..Default::default()
824        };
825
826        let compiler = Compiler::with_options(project.clone(), options.clone());
827        let result1 = compiler.analyze_flake_purity().unwrap().unwrap();
828
829        // Same compiler, same overrides = deterministic digest
830        let compiler2 = Compiler::with_options(project, options);
831        let result2 = compiler2.analyze_flake_purity().unwrap().unwrap();
832
833        assert_eq!(result1.0, result2.0);
834        assert_eq!(result1.1, PurityMode::Override);
835    }
836
837    #[test]
838    fn test_compute_runtime() {
839        use std::io::Write;
840        use tempfile::NamedTempFile;
841
842        let json = r#"{
843            "nodes": {
844                "nixpkgs": {
845                    "locked": {
846                        "type": "github",
847                        "narHash": "sha256-test"
848                    }
849                },
850                "root": { "inputs": { "nixpkgs": "nixpkgs" } }
851            },
852            "root": "root",
853            "version": 7
854        }"#;
855
856        let mut temp_file = NamedTempFile::new().unwrap();
857        temp_file.write_all(json.as_bytes()).unwrap();
858
859        let project = Project::new("test-project");
860        let options = CompilerOptions {
861            purity_mode: PurityMode::Strict,
862            flake_lock_path: Some(temp_file.path().to_path_buf()),
863            ..Default::default()
864        };
865
866        let compiler = Compiler::with_options(project, options);
867        let runtime = compiler
868            .compute_runtime(
869                "nix-x86_64-linux",
870                "github:NixOS/nixpkgs",
871                "devShells.x86_64-linux.default",
872                "x86_64-linux",
873            )
874            .unwrap();
875
876        assert_eq!(runtime.id, "nix-x86_64-linux");
877        assert_eq!(runtime.flake, "github:NixOS/nixpkgs");
878        assert!(runtime.digest.starts_with("sha256:"));
879        assert_eq!(runtime.purity, PurityMode::Strict);
880    }
881}