1#![allow(clippy::too_many_lines)]
14
15pub mod digest;
16
17use crate::flake::{FlakeLockAnalyzer, FlakeLockError, PurityAnalysis};
18use crate::ir::{
19 ArtifactDownload, BuildStage, CachePolicy, IntermediateRepresentation, IrValidator,
20 ManualTriggerConfig, OutputDeclaration, OutputType, PurityMode, Runtime, SecretConfig,
21 Task as IrTask, TaskCondition, TriggerCondition, WorkflowDispatchInputDef,
22};
23use cuenv_core::ci::{
24 CI, Contributor, ContributorTask, ManualTrigger, Pipeline, PipelineTask, SecretRef,
25 TaskCondition as CueTaskCondition,
26};
27use cuenv_core::manifest::Project;
28use cuenv_core::tasks::{Task, TaskGroup, TaskNode};
29use digest::DigestBuilder;
30use std::collections::{BTreeMap, HashMap, HashSet};
31use std::path::{Path, PathBuf};
32use thiserror::Error;
33use uuid::Uuid;
34
35#[derive(Debug, Error)]
37pub enum CompilerError {
38 #[error("Task graph validation failed: {0}")]
39 ValidationFailed(String),
40
41 #[error("Task '{0}' not found")]
42 TaskNotFound(String),
43
44 #[error("Task '{0}' uses shell script but IR requires command array")]
45 ShellScriptNotSupported(String),
46
47 #[error("Invalid task structure: {0}")]
48 InvalidTaskStructure(String),
49
50 #[error("Flake lock error: {0}")]
51 FlakeLock(#[from] FlakeLockError),
52}
53
54pub struct Compiler {
56 project: Project,
58
59 options: CompilerOptions,
61}
62
63#[derive(Clone, Default)]
65pub struct CompilerOptions {
66 pub purity_mode: PurityMode,
68
69 pub validate_inputs: bool,
71
72 pub default_cache_policy: CachePolicy,
74
75 pub flake_lock_path: Option<PathBuf>,
77
78 pub project_root: Option<PathBuf>,
80
81 pub input_overrides: HashMap<String, String>,
84
85 pub pipeline_name: Option<String>,
89
90 pub pipeline: Option<Pipeline>,
96
97 pub ci_mode: bool,
103
104 pub module_root: Option<PathBuf>,
108
109 pub project_path: Option<String>,
115}
116
117impl std::fmt::Debug for CompilerOptions {
118 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
119 f.debug_struct("CompilerOptions")
120 .field("purity_mode", &self.purity_mode)
121 .field("validate_inputs", &self.validate_inputs)
122 .field("default_cache_policy", &self.default_cache_policy)
123 .field("flake_lock_path", &self.flake_lock_path)
124 .field("project_root", &self.project_root)
125 .field("input_overrides", &self.input_overrides)
126 .field("pipeline_name", &self.pipeline_name)
127 .field("pipeline", &self.pipeline)
128 .field("ci_mode", &self.ci_mode)
129 .field("module_root", &self.module_root)
130 .field("project_path", &self.project_path)
131 .finish()
132 }
133}
134
135impl Compiler {
136 #[must_use]
138 pub fn new(project: Project) -> Self {
139 Self {
140 project,
141 options: CompilerOptions::default(),
142 }
143 }
144
145 #[must_use]
147 pub const fn with_options(project: Project, options: CompilerOptions) -> Self {
148 Self { project, options }
149 }
150
151 #[must_use]
161 pub fn analyze_flake_purity(&self) -> Option<Result<(String, PurityMode), CompilerError>> {
162 let lock_path = self.resolve_flake_lock_path();
163
164 if !lock_path.exists() {
165 return None;
166 }
167
168 Some(self.perform_flake_analysis(&lock_path))
169 }
170
171 fn resolve_flake_lock_path(&self) -> PathBuf {
173 if let Some(path) = &self.options.flake_lock_path {
175 return path.clone();
176 }
177
178 if let Some(root) = &self.options.project_root {
180 return root.join("flake.lock");
181 }
182
183 PathBuf::from("flake.lock")
185 }
186
187 fn perform_flake_analysis(
189 &self,
190 lock_path: &Path,
191 ) -> Result<(String, PurityMode), CompilerError> {
192 let analyzer = FlakeLockAnalyzer::from_path(lock_path)?;
193 let analysis = analyzer.analyze();
194
195 self.apply_purity_mode(&analysis)
196 }
197
198 fn apply_purity_mode(
204 &self,
205 analysis: &PurityAnalysis,
206 ) -> Result<(String, PurityMode), CompilerError> {
207 match self.options.purity_mode {
208 PurityMode::Strict => {
209 if !analysis.is_pure {
210 let inputs: Vec<String> = analysis
211 .unlocked_inputs
212 .iter()
213 .map(|u| format!("{}: {}", u.name, u.reason))
214 .collect();
215 return Err(CompilerError::FlakeLock(FlakeLockError::strict_violation(
216 inputs,
217 )));
218 }
219 Ok((analysis.locked_digest.clone(), PurityMode::Strict))
220 }
221
222 PurityMode::Warning => {
223 if analysis.is_pure {
224 Ok((analysis.locked_digest.clone(), PurityMode::Warning))
225 } else {
226 for input in &analysis.unlocked_inputs {
228 tracing::warn!(
229 input = %input.name,
230 reason = %input.reason,
231 "Unlocked flake input detected - cache key will be non-deterministic"
232 );
233 }
234
235 let uuid = Uuid::new_v4().to_string();
237 let mut digest_builder = DigestBuilder::new();
238 digest_builder.add_inputs(std::slice::from_ref(&analysis.locked_digest));
239 digest_builder.add_impurity_uuid(&uuid);
240
241 Ok((digest_builder.finalize(), PurityMode::Warning))
242 }
243 }
244
245 PurityMode::Override => {
246 let mut effective_digest = analysis.locked_digest.clone();
248
249 if !self.options.input_overrides.is_empty() {
250 let mut digest_builder = DigestBuilder::new();
251 digest_builder.add_inputs(&[effective_digest]);
252
253 let mut sorted_overrides: Vec<_> =
255 self.options.input_overrides.iter().collect();
256 sorted_overrides.sort_by_key(|(k, _)| *k);
257
258 for (key, value) in sorted_overrides {
259 digest_builder.add_inputs(&[format!("override:{key}={value}")]);
260 }
261
262 effective_digest = digest_builder.finalize();
263 }
264
265 Ok((effective_digest, PurityMode::Override))
266 }
267 }
268 }
269
270 pub fn compute_runtime(
279 &self,
280 id: impl Into<String>,
281 flake_ref: impl Into<String>,
282 output: impl Into<String>,
283 system: impl Into<String>,
284 ) -> Result<Runtime, CompilerError> {
285 let (digest, purity) = match self.analyze_flake_purity() {
286 Some(result) => result?,
287 None => {
288 ("sha256:no-flake-lock".to_string(), self.options.purity_mode)
291 }
292 };
293
294 Ok(Runtime {
295 id: id.into(),
296 flake: flake_ref.into(),
297 output: output.into(),
298 system: system.into(),
299 digest,
300 purity,
301 })
302 }
303
304 pub fn compile(&self) -> Result<IntermediateRepresentation, CompilerError> {
310 let mut ir = IntermediateRepresentation::new(&self.project.name);
311
312 if let Some(ref pipeline) = self.options.pipeline {
314 ir.pipeline.environment.clone_from(&pipeline.environment);
315 ir.pipeline.pipeline_tasks = pipeline
316 .tasks
317 .iter()
318 .map(PipelineTask::task_name)
319 .map(String::from)
320 .collect();
321 ir.pipeline.pipeline_task_defs.clone_from(&pipeline.tasks);
323 }
324
325 if let Some(ref pipeline) = self.options.pipeline
327 && let Some(ci_config) = &self.project.ci
328 {
329 ir.pipeline.trigger = Some(self.build_trigger_condition(pipeline, ci_config));
330 }
331
332 self.compile_tasks(&self.project.tasks, &mut ir)?;
334
335 Self::fix_artifact_download_paths(&mut ir);
339
340 self.apply_cue_contributors(&mut ir);
342
343 let validator = IrValidator::new(&ir);
345 validator.validate().map_err(|errors| {
346 let error_messages: Vec<String> = errors
347 .iter()
348 .map(std::string::ToString::to_string)
349 .collect();
350 CompilerError::ValidationFailed(error_messages.join(", "))
351 })?;
352
353 Ok(ir)
354 }
355
356 pub fn compile_task(
365 &self,
366 task_name: &str,
367 ) -> Result<IntermediateRepresentation, CompilerError> {
368 let mut ir = IntermediateRepresentation::new(&self.project.name);
369
370 let Some(task_node) = self.find_task_node(task_name) else {
372 return Err(CompilerError::TaskNotFound(task_name.to_string()));
373 };
374
375 self.compile_task_node(task_name, task_node, &mut ir)?;
377
378 Ok(ir)
379 }
380
381 fn build_trigger_condition(&self, pipeline: &Pipeline, _ci_config: &CI) -> TriggerCondition {
383 let when = pipeline.when.as_ref();
384
385 let branches = when
387 .and_then(|w| w.branch.as_ref())
388 .map(cuenv_core::ci::StringOrVec::to_vec)
389 .unwrap_or_default();
390
391 let pull_request = when.and_then(|w| w.pull_request);
393
394 let scheduled = when
396 .and_then(|w| w.scheduled.as_ref())
397 .map(cuenv_core::ci::StringOrVec::to_vec)
398 .unwrap_or_default();
399
400 let release = when.and_then(|w| w.release.clone()).unwrap_or_default();
402
403 let manual = when.and_then(|w| w.manual.as_ref()).map(|m| match m {
405 ManualTrigger::Enabled(enabled) => ManualTriggerConfig {
406 enabled: *enabled,
407 inputs: BTreeMap::new(),
408 },
409 ManualTrigger::WithInputs(inputs) => ManualTriggerConfig {
410 enabled: true,
411 inputs: inputs
412 .iter()
413 .map(|(k, v)| {
414 (
415 k.clone(),
416 WorkflowDispatchInputDef {
417 description: v.description.clone(),
418 required: v.required.unwrap_or(false),
419 default: v.default.clone(),
420 input_type: v.input_type.clone(),
421 options: v.options.clone().unwrap_or_default(),
422 },
423 )
424 })
425 .collect(),
426 },
427 });
428
429 let should_derive_paths = pipeline.derive_paths.unwrap_or_else(|| {
431 !branches.is_empty() || pull_request.is_some()
433 });
434
435 let paths = if should_derive_paths {
437 self.derive_trigger_paths(pipeline)
438 } else {
439 Vec::new()
440 };
441
442 let paths_ignore = Vec::new();
445
446 TriggerCondition {
447 branches,
448 pull_request,
449 scheduled,
450 release,
451 manual,
452 paths,
453 paths_ignore,
454 }
455 }
456
457 fn derive_trigger_paths(&self, pipeline: &Pipeline) -> Vec<String> {
459 let mut task_inputs = HashSet::new();
460
461 for task in &pipeline.tasks {
464 self.collect_task_inputs(task.task_name(), &mut task_inputs);
465 }
466
467 let mut paths = HashSet::new();
468
469 let prefix_path = |path: &str| -> String {
472 match &self.options.project_path {
473 Some(pp) if pp == "." => path.to_string(),
474 Some(pp) => format!("{pp}/{path}"),
475 None => path.to_string(),
476 }
477 };
478
479 for input in &task_inputs {
481 paths.insert(prefix_path(input));
482 }
483
484 if task_inputs.is_empty() {
487 match &self.options.project_path {
488 Some(pp) if pp == "." => paths.insert("**".to_string()),
489 Some(pp) => paths.insert(format!("{pp}/**")),
490 None => paths.insert("**".to_string()),
491 };
492 }
493
494 paths.insert(prefix_path("env.cue"));
496 paths.insert(prefix_path("schema/**"));
497 paths.insert("cue.mod/**".to_string());
499
500 self.add_workspace_dependency_paths(&mut paths);
502
503 let mut result: Vec<_> = paths.into_iter().collect();
505 result.sort();
506 result
507 }
508
509 fn add_workspace_dependency_paths(&self, paths: &mut HashSet<String>) {
525 use cuenv_workspaces::{
526 CargoTomlDiscovery, PackageJsonDiscovery, PnpmWorkspaceDiscovery, Workspace,
527 WorkspaceDiscovery,
528 };
529
530 let Some(ref project_path) = self.options.project_path else {
531 return; };
533
534 if project_path == "." {
535 return; }
537
538 let module_root = self
539 .options
540 .module_root
541 .clone()
542 .or_else(|| self.options.project_root.clone())
543 .unwrap_or_else(|| PathBuf::from("."));
544
545 let workspace: Option<Workspace> = PackageJsonDiscovery
547 .discover(&module_root)
548 .ok()
549 .or_else(|| PnpmWorkspaceDiscovery.discover(&module_root).ok())
550 .or_else(|| CargoTomlDiscovery.discover(&module_root).ok());
551
552 let Some(workspace) = workspace else {
553 return; };
555
556 let project_path_buf = Path::new(project_path);
558 let Some(current_member) = workspace.find_member_by_path(project_path_buf) else {
559 return; };
561
562 let dep_paths = workspace.resolve_workspace_dependency_paths(¤t_member.name);
564
565 for dep_path in dep_paths {
567 let mut pattern = dep_path.clone();
568 pattern.push("**");
569 paths.insert(pattern.to_string_lossy().into_owned());
570 }
571 }
572
573 fn collect_task_inputs(&self, task_name: &str, paths: &mut HashSet<String>) {
575 if let Some(node) = self.find_task_node(task_name) {
576 self.collect_inputs_from_node(node, paths);
577 }
578 }
579
580 fn collect_inputs_from_node(&self, node: &TaskNode, paths: &mut HashSet<String>) {
582 match node {
583 TaskNode::Task(task) => {
584 for input in task.iter_path_inputs() {
586 paths.insert(input.clone());
587 }
588 for dep in &task.depends_on {
590 self.collect_task_inputs(dep.task_name(), paths);
591 }
592 }
593 TaskNode::Group(group) => {
594 for child_node in group.children.values() {
595 self.collect_inputs_from_node(child_node, paths);
596 }
597 }
598 TaskNode::Sequence(steps) => {
599 for child_node in steps {
600 self.collect_inputs_from_node(child_node, paths);
601 }
602 }
603 }
604 }
605
606 fn find_task_node(&self, name: &str) -> Option<&TaskNode> {
609 let parts: Vec<&str> = name.split('.').collect();
610 let mut current_tasks = &self.project.tasks;
611
612 for (i, part) in parts.iter().enumerate() {
613 match current_tasks.get(*part) {
614 Some(node) if i == parts.len() - 1 => {
615 return Some(node);
616 }
617 Some(TaskNode::Group(group)) => {
618 current_tasks = &group.children;
619 }
620 _ => return None,
621 }
622 }
623 None
624 }
625
626 fn find_task(&self, name: &str) -> Option<&Task> {
628 match self.find_task_node(name) {
629 Some(TaskNode::Task(task)) => Some(task),
630 _ => None,
631 }
632 }
633
634 fn expand_dependency_to_leaf_tasks(
646 &self,
647 dep_name: &str,
648 current_task_id: &str,
649 ) -> Vec<String> {
650 if let Some(node) = self.find_task_node(dep_name) {
652 let mut result = Vec::new();
653 Self::collect_leaf_task_names(dep_name, node, &mut result);
654 result.sort();
655 return result;
656 }
657
658 if let Some(parent_path) = current_task_id.rsplit_once('.').map(|(parent, _)| parent) {
661 let sibling_path = format!("{parent_path}.{dep_name}");
662 if let Some(node) = self.find_task_node(&sibling_path) {
663 let mut result = Vec::new();
664 Self::collect_leaf_task_names(&sibling_path, node, &mut result);
665 result.sort();
666 return result;
667 }
668 }
669
670 vec![dep_name.to_string()]
672 }
673
674 fn collect_leaf_task_names(prefix: &str, node: &TaskNode, result: &mut Vec<String>) {
677 match node {
678 TaskNode::Task(_) => {
679 result.push(prefix.to_string());
680 }
681 TaskNode::Group(group) => {
682 for (child_name, child_node) in &group.children {
683 Self::collect_leaf_task_names(
684 &format!("{prefix}.{child_name}"),
685 child_node,
686 result,
687 );
688 }
689 }
690 TaskNode::Sequence(steps) => {
691 for (idx, child_node) in steps.iter().enumerate() {
692 Self::collect_leaf_task_names(&format!("{prefix}.{idx}"), child_node, result);
693 }
694 }
695 }
696 }
697
698 fn compile_tasks(
700 &self,
701 tasks: &HashMap<String, TaskNode>,
702 ir: &mut IntermediateRepresentation,
703 ) -> Result<(), CompilerError> {
704 let mut sorted_keys: Vec<_> = tasks.keys().collect();
706 sorted_keys.sort();
707 for name in sorted_keys {
708 let task_node = &tasks[name];
709 self.compile_task_node(name, task_node, ir)?;
710 }
711 Ok(())
712 }
713
714 fn compile_task_node(
716 &self,
717 name: &str,
718 node: &TaskNode,
719 ir: &mut IntermediateRepresentation,
720 ) -> Result<(), CompilerError> {
721 match node {
722 TaskNode::Task(task) => {
723 let ir_task = self.compile_single_task(name, task)?;
724 ir.tasks.push(ir_task);
725 }
726 TaskNode::Group(group) => {
727 self.compile_task_group(name, group, ir)?;
728 }
729 TaskNode::Sequence(steps) => {
730 self.compile_task_sequence(name, steps, ir)?;
731 }
732 }
733 Ok(())
734 }
735
736 fn compile_task_group(
738 &self,
739 prefix: &str,
740 group: &TaskGroup,
741 ir: &mut IntermediateRepresentation,
742 ) -> Result<(), CompilerError> {
743 let mut sorted_keys: Vec<_> = group.children.keys().collect();
745 sorted_keys.sort();
746 for name in sorted_keys {
747 let child_node = &group.children[name];
748 let task_name = format!("{prefix}.{name}");
749 self.compile_task_node(&task_name, child_node, ir)?;
750 }
751 Ok(())
752 }
753
754 fn compile_task_sequence(
756 &self,
757 prefix: &str,
758 steps: &[TaskNode],
759 ir: &mut IntermediateRepresentation,
760 ) -> Result<(), CompilerError> {
761 for (idx, child_node) in steps.iter().enumerate() {
762 let task_name = format!("{prefix}.{idx}");
763 self.compile_task_node(&task_name, child_node, ir)?;
764 }
765 Ok(())
766 }
767
768 fn compile_single_task(&self, id: &str, task: &Task) -> Result<IrTask, CompilerError> {
770 let command = if !task.command.is_empty() {
772 let mut cmd = vec![task.command.clone()];
773 cmd.extend(task.args.clone());
774 cmd
775 } else if let Some(script) = &task.script {
776 vec!["/bin/sh".to_string(), "-c".to_string(), script.clone()]
780 } else {
781 return Err(CompilerError::InvalidTaskStructure(format!(
782 "Task '{id}' has neither command nor script"
783 )));
784 };
785
786 let shell = task.shell.is_some() || task.script.is_some();
788
789 let env: BTreeMap<String, String> = task
791 .env
792 .iter()
793 .filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
794 .collect();
795
796 let secrets: BTreeMap<String, SecretConfig> = BTreeMap::new();
798
799 let inputs: Vec<String> = task.iter_path_inputs().cloned().collect();
801
802 let output_type = if self.options.ci_mode {
804 OutputType::Orchestrator
805 } else {
806 OutputType::Cas
807 };
808 let outputs: Vec<OutputDeclaration> = task
809 .outputs
810 .iter()
811 .map(|path| OutputDeclaration {
812 path: path.clone(),
813 output_type,
814 })
815 .collect();
816
817 let artifact_downloads: Vec<ArtifactDownload> = if self.options.ci_mode {
819 task.iter_task_outputs()
820 .map(|task_ref| {
821 ArtifactDownload {
824 name: format!("{}-artifacts", task_ref.task.replace('.', "-")),
825 path: task_ref.task.replace('.', "/"),
826 filter: String::new(),
827 }
828 })
829 .collect()
830 } else {
831 vec![]
832 };
833
834 let cache_policy = if task.labels.contains(&"deployment".to_string()) {
836 CachePolicy::Disabled
837 } else {
838 self.options.default_cache_policy
839 };
840
841 let deployment = task.labels.contains(&"deployment".to_string());
843
844 Ok(IrTask {
845 id: id.to_string(),
846 runtime: None, command,
848 shell,
849 env,
850 secrets,
851 resources: None, concurrency_group: None,
853 inputs,
854 outputs,
855 depends_on: task
856 .depends_on
857 .iter()
858 .flat_map(|d| self.expand_dependency_to_leaf_tasks(d.task_name(), id))
859 .collect(),
860 cache_policy,
861 deployment,
862 manual_approval: false, matrix: None,
864 artifact_downloads,
865 params: BTreeMap::new(),
866 phase: None,
868 label: None,
869 priority: None,
870 contributor: None,
871 condition: None,
872 provider_hints: None,
873 })
874 }
875
876 fn fix_artifact_download_paths(ir: &mut IntermediateRepresentation) {
883 let task_outputs: HashMap<String, String> = ir
886 .tasks
887 .iter()
888 .filter_map(|task| {
889 task.outputs
890 .first()
891 .map(|output| (task.id.clone(), output.path.clone()))
892 })
893 .collect();
894
895 for task in &mut ir.tasks {
897 for download in &mut task.artifact_downloads {
898 let upstream_task_id = download
900 .name
901 .strip_suffix("-artifacts")
902 .map(|s| s.replace('-', "."))
903 .unwrap_or_default();
904
905 if let Some(output_path) = task_outputs.get(&upstream_task_id) {
907 download.path.clone_from(output_path);
908 }
909 }
910 }
911 }
912
913 fn apply_cue_contributors(&self, ir: &mut IntermediateRepresentation) {
918 let Some(ref ci_config) = self.project.ci else {
919 return;
920 };
921
922 for contributor in &ci_config.contributors {
923 if !self.cue_contributor_is_active(contributor, ir) {
925 continue;
926 }
927
928 let contributed_ids: HashSet<String> = ir
930 .tasks
931 .iter()
932 .filter(|t| t.phase.is_some())
933 .map(|t| t.id.clone())
934 .collect();
935
936 for contributor_task in &contributor.tasks {
938 let full_task_id = format!("cuenv:contributor:{}", contributor_task.id);
940
941 if contributed_ids.contains(&full_task_id) {
943 continue;
944 }
945
946 let task = Self::contributor_task_to_ir(contributor_task, &contributor.id);
947 ir.tasks.push(task);
948 }
949 }
950 }
951
952 fn cue_contributor_is_active(
954 &self,
955 contributor: &Contributor,
956 ir: &IntermediateRepresentation,
957 ) -> bool {
958 let Some(ref condition) = contributor.when else {
959 return true;
961 };
962
963 if let Some(always_val) = condition.always {
965 return always_val;
966 }
967
968 if !condition.runtime_type.is_empty() {
970 if let Some(ref runtime) = self.project.runtime {
971 let runtime_type = Self::get_runtime_type(runtime);
972 if !condition.runtime_type.iter().any(|t| t == runtime_type) {
973 return false;
974 }
975 } else {
976 return false;
978 }
979 }
980
981 if !condition.cuenv_source.is_empty() {
983 let source = self
984 .project
985 .config
986 .as_ref()
987 .and_then(|c| c.ci.as_ref())
988 .and_then(|ci| ci.cuenv.as_ref())
989 .map_or("release", |c| c.source.as_str());
990 if !condition.cuenv_source.iter().any(|s| s == source) {
991 return false;
992 }
993 }
994
995 if !condition.secrets_provider.is_empty()
997 && !self.has_secrets_provider(&condition.secrets_provider, ir)
998 {
999 return false;
1000 }
1001
1002 if !condition.provider_config.is_empty()
1004 && !self.has_provider_config(&condition.provider_config)
1005 {
1006 return false;
1007 }
1008
1009 if !condition.task_command.is_empty()
1011 && !Self::has_task_command(&condition.task_command, ir)
1012 {
1013 return false;
1014 }
1015
1016 if !condition.task_labels.is_empty() && !self.has_task_labels(&condition.task_labels) {
1018 return false;
1019 }
1020
1021 if !condition.environment.is_empty() {
1023 let Some(ref pipeline) = self.options.pipeline else {
1024 return false;
1025 };
1026 let Some(ref env_name) = pipeline.environment else {
1027 return false;
1028 };
1029 if !condition.environment.iter().any(|e| e == env_name) {
1030 return false;
1031 }
1032 }
1033
1034 if !condition.workspace_member.is_empty() {
1036 let module_root = self
1038 .options
1039 .module_root
1040 .clone()
1041 .or_else(|| self.options.project_root.clone())
1042 .unwrap_or_else(|| std::path::PathBuf::from("."));
1043
1044 let detected = self.detect_workspace_managers(&module_root);
1045
1046 if !condition
1047 .workspace_member
1048 .iter()
1049 .any(|t| detected.contains(&t.to_lowercase()))
1050 {
1051 return false;
1052 }
1053 }
1054
1055 true
1056 }
1057
1058 fn detect_workspace_managers(&self, module_root: &std::path::Path) -> Vec<String> {
1063 use cuenv_workspaces::{PackageJsonDiscovery, WorkspaceDiscovery};
1064
1065 if let Ok(workspace) = PackageJsonDiscovery.discover(module_root) {
1067 if let Some(ref project_path) = self.options.project_path {
1069 let path = std::path::Path::new(project_path);
1071 if workspace.contains_path(path) || workspace.lockfile.is_some() {
1072 return vec![workspace.manager.to_string().to_lowercase()];
1073 }
1074 } else {
1075 return vec![workspace.manager.to_string().to_lowercase()];
1077 }
1078 }
1079
1080 cuenv_workspaces::detection::detect_package_managers(module_root)
1082 .unwrap_or_default()
1083 .into_iter()
1084 .map(|m| m.to_string().to_lowercase())
1085 .collect()
1086 }
1087
1088 fn get_runtime_type(runtime: &cuenv_core::manifest::Runtime) -> &'static str {
1090 match runtime {
1091 cuenv_core::manifest::Runtime::Nix(_) => "nix",
1092 cuenv_core::manifest::Runtime::Devenv(_) => "devenv",
1093 cuenv_core::manifest::Runtime::Container(_) => "container",
1094 cuenv_core::manifest::Runtime::Dagger(_) => "dagger",
1095 cuenv_core::manifest::Runtime::Oci(_) => "oci",
1096 cuenv_core::manifest::Runtime::Tools(_) => "tools",
1097 }
1098 }
1099
1100 fn has_secrets_provider(&self, providers: &[String], ir: &IntermediateRepresentation) -> bool {
1102 let Some(ref env_name) = ir.pipeline.environment else {
1103 return false;
1104 };
1105 let Some(ref env) = self.project.env else {
1106 return false;
1107 };
1108
1109 let env_vars = env.for_environment(env_name);
1111 for value in env_vars.values() {
1112 if Self::value_has_provider(value, providers) {
1113 return true;
1114 }
1115 }
1116 false
1117 }
1118
1119 fn value_has_provider(value: &cuenv_core::environment::EnvValue, providers: &[String]) -> bool {
1122 use cuenv_core::environment::{EnvValue, EnvValueSimple};
1123
1124 match value {
1125 EnvValue::String(s)
1126 if providers.iter().any(|p| p == "onepassword") && s.starts_with("op://") =>
1127 {
1128 true
1129 }
1130 EnvValue::Secret(secret) => providers.iter().any(|p| p == &secret.resolver),
1131 EnvValue::Interpolated(parts) => Self::parts_have_provider(parts, providers),
1132 EnvValue::WithPolicies(wp) => match &wp.value {
1133 EnvValueSimple::Secret(secret) => providers.iter().any(|p| p == &secret.resolver),
1134 EnvValueSimple::String(s)
1135 if providers.iter().any(|p| p == "onepassword") && s.starts_with("op://") =>
1136 {
1137 true
1138 }
1139 EnvValueSimple::Interpolated(parts) => Self::parts_have_provider(parts, providers),
1140 _ => false,
1141 },
1142 _ => false,
1143 }
1144 }
1145
1146 fn parts_have_provider(
1148 parts: &[cuenv_core::environment::EnvPart],
1149 providers: &[String],
1150 ) -> bool {
1151 use cuenv_core::environment::EnvPart;
1152
1153 parts.iter().any(|part| match part {
1154 EnvPart::Secret(secret) => providers.iter().any(|p| p == &secret.resolver),
1155 EnvPart::Literal(s) => {
1156 providers.iter().any(|p| p == "onepassword") && s.contains("op://")
1157 }
1158 })
1159 }
1160
1161 fn has_provider_config(&self, paths: &[String]) -> bool {
1163 let Some(ref ci) = self.project.ci else {
1164 return false;
1165 };
1166 let Some(ref provider) = ci.provider else {
1167 return false;
1168 };
1169
1170 for path in paths {
1171 let parts: Vec<&str> = path.split('.').collect();
1172 if parts.is_empty() {
1173 continue;
1174 }
1175
1176 let Some(config) = provider.get(parts[0]) else {
1178 continue;
1179 };
1180
1181 let mut current = config;
1183 let mut found = true;
1184 for part in &parts[1..] {
1185 match current.get(*part) {
1186 Some(value) if !value.is_null() => {
1187 current = value;
1188 }
1189 _ => {
1190 found = false;
1191 break;
1192 }
1193 }
1194 }
1195
1196 if found {
1197 return true;
1198 }
1199 }
1200
1201 false
1202 }
1203
1204 fn has_task_command(commands: &[String], ir: &IntermediateRepresentation) -> bool {
1206 for task in &ir.tasks {
1208 if !ir.pipeline.pipeline_tasks.is_empty()
1210 && !ir.pipeline.pipeline_tasks.contains(&task.id)
1211 {
1212 continue;
1213 }
1214
1215 if task.command.len() >= commands.len() {
1217 let matches = commands
1218 .iter()
1219 .zip(task.command.iter())
1220 .all(|(a, b)| a == b);
1221 if matches {
1222 return true;
1223 }
1224 }
1225
1226 if task.shell && task.command.len() == 1 {
1228 let cmd_str = commands.join(" ");
1229 if task.command[0].contains(&cmd_str) {
1230 return true;
1231 }
1232 }
1233 }
1234
1235 false
1236 }
1237
1238 fn has_task_labels(&self, labels: &[String]) -> bool {
1240 let Some(ref pipeline) = self.options.pipeline else {
1241 return false;
1242 };
1243
1244 for pipeline_task in &pipeline.tasks {
1245 let task_name = pipeline_task.task_name();
1246 if let Some(task) = self.find_task(task_name) {
1247 let has_all = labels.iter().all(|l| task.labels.contains(l));
1248 if has_all {
1249 return true;
1250 }
1251 }
1252 }
1253
1254 false
1255 }
1256
1257 fn derive_stage_from_priority(
1266 priority: i32,
1267 condition: Option<CueTaskCondition>,
1268 ) -> BuildStage {
1269 if matches!(condition, Some(CueTaskCondition::OnFailure)) {
1271 return BuildStage::Failure;
1272 }
1273
1274 match priority {
1275 0..=9 => BuildStage::Bootstrap,
1276 10..=49 => BuildStage::Setup,
1277 _ => BuildStage::Success,
1278 }
1279 }
1280
1281 fn cue_task_condition_to_ir(condition: CueTaskCondition) -> TaskCondition {
1283 match condition {
1284 CueTaskCondition::OnSuccess => TaskCondition::OnSuccess,
1285 CueTaskCondition::OnFailure => TaskCondition::OnFailure,
1286 CueTaskCondition::Always => TaskCondition::Always,
1287 }
1288 }
1289
1290 fn contributor_task_to_ir(contributor_task: &ContributorTask, contributor_id: &str) -> IrTask {
1296 let (command, shell) = if let Some(ref cmd) = contributor_task.command {
1298 let mut cmd_vec = vec![cmd.clone()];
1299 cmd_vec.extend(contributor_task.args.clone());
1300
1301 let has_github_action = contributor_task
1306 .provider
1307 .as_ref()
1308 .is_some_and(|p| p.github.is_some());
1309
1310 let is_cuenv_contributor = contributor_id == "cuenv";
1314 let is_bootstrap = contributor_task.priority < 10;
1315 let runs_before_cuenv = is_cuenv_contributor || is_bootstrap;
1316
1317 let needs_wrapping = !has_github_action && cmd != "cuenv" && !runs_before_cuenv;
1318
1319 if needs_wrapping {
1320 let mut wrapped = vec!["cuenv".to_string(), "exec".to_string(), "--".to_string()];
1321 wrapped.extend(cmd_vec);
1322 (wrapped, contributor_task.shell)
1323 } else {
1324 (cmd_vec, contributor_task.shell)
1325 }
1326 } else if let Some(ref script) = contributor_task.script {
1327 (vec![script.clone()], true)
1328 } else {
1329 (vec![], false)
1330 };
1331
1332 let secrets: BTreeMap<String, SecretConfig> = contributor_task
1334 .secrets
1335 .iter()
1336 .map(|(k, v)| {
1337 let config = match v {
1338 SecretRef::Simple(s) => SecretConfig {
1339 source: s.clone(),
1340 cache_key: false,
1341 },
1342 SecretRef::Detailed(d) => SecretConfig {
1343 source: d.source.clone(),
1344 cache_key: d.cache_key,
1345 },
1346 };
1347 (k.clone(), config)
1348 })
1349 .collect();
1350
1351 let provider_hints = contributor_task.provider.as_ref().and_then(|p| {
1353 p.github.as_ref().map(|gh| {
1354 let mut github_action = serde_json::Map::new();
1355 github_action.insert(
1356 "uses".to_string(),
1357 serde_json::Value::String(gh.uses.clone()),
1358 );
1359 if !gh.inputs.is_empty() {
1360 github_action.insert(
1361 "inputs".to_string(),
1362 serde_json::Value::Object(
1363 gh.inputs
1364 .iter()
1365 .map(|(k, v)| (k.clone(), v.clone()))
1366 .collect(),
1367 ),
1368 );
1369 }
1370
1371 let mut hints = serde_json::Map::new();
1372 hints.insert(
1373 "github_action".to_string(),
1374 serde_json::Value::Object(github_action),
1375 );
1376 serde_json::Value::Object(hints)
1377 })
1378 });
1379
1380 let condition = contributor_task
1382 .condition
1383 .map(Self::cue_task_condition_to_ir);
1384
1385 let stage =
1387 Self::derive_stage_from_priority(contributor_task.priority, contributor_task.condition);
1388
1389 let depends_on: Vec<String> = contributor_task
1391 .depends_on
1392 .iter()
1393 .map(|dep| {
1394 if dep.starts_with("cuenv:contributor:") {
1395 dep.clone()
1396 } else {
1397 format!("cuenv:contributor:{dep}")
1398 }
1399 })
1400 .collect();
1401
1402 IrTask {
1403 id: format!("cuenv:contributor:{}", contributor_task.id),
1404 runtime: None,
1405 command,
1406 shell,
1407 env: contributor_task
1408 .env
1409 .iter()
1410 .map(|(k, v)| (k.clone(), v.clone()))
1411 .collect(),
1412 secrets,
1413 resources: None,
1414 concurrency_group: None,
1415 inputs: contributor_task.inputs.clone(),
1416 outputs: vec![],
1417 depends_on,
1418 cache_policy: CachePolicy::Disabled, deployment: false,
1420 manual_approval: false,
1421 matrix: None,
1422 artifact_downloads: vec![],
1423 params: BTreeMap::new(),
1424 phase: Some(stage),
1426 label: contributor_task.label.clone(),
1427 priority: Some(contributor_task.priority),
1428 contributor: Some(contributor_id.to_string()),
1429 condition,
1430 provider_hints,
1431 }
1432 }
1433}
1434
1435#[cfg(test)]
1436mod tests {
1437 use super::*;
1438 use cuenv_core::ci::PipelineMode;
1439 use cuenv_core::tasks::{Task, TaskDependency, TaskNode};
1440
1441 #[test]
1442 fn test_compile_simple_task() {
1443 let mut project = Project::new("test-project");
1444 project.tasks.insert(
1445 "build".to_string(),
1446 TaskNode::Task(Box::new(Task {
1447 command: "cargo".to_string(),
1448 args: vec!["build".to_string()],
1449 inputs: vec![cuenv_core::tasks::Input::Path("src/**/*.rs".to_string())],
1450 outputs: vec!["target/debug/binary".to_string()],
1451 ..Default::default()
1452 })),
1453 );
1454
1455 let compiler = Compiler::new(project);
1456 let ir = compiler.compile().unwrap();
1457
1458 assert_eq!(ir.version, "1.5");
1459 assert_eq!(ir.pipeline.name, "test-project");
1460 assert_eq!(ir.tasks.len(), 1);
1461 assert_eq!(ir.tasks[0].id, "build");
1462 assert_eq!(ir.tasks[0].command, vec!["cargo", "build"]);
1463 assert_eq!(ir.tasks[0].inputs, vec!["src/**/*.rs"]);
1464 }
1465
1466 #[test]
1467 fn test_compile_task_with_dependencies() {
1468 let mut project = Project::new("test-project");
1469
1470 project.tasks.insert(
1471 "test".to_string(),
1472 TaskNode::Task(Box::new(Task {
1473 command: "cargo".to_string(),
1474 args: vec!["test".to_string()],
1475 depends_on: vec![TaskDependency::from_name("build")],
1476 ..Default::default()
1477 })),
1478 );
1479
1480 project.tasks.insert(
1481 "build".to_string(),
1482 TaskNode::Task(Box::new(Task {
1483 command: "cargo".to_string(),
1484 args: vec!["build".to_string()],
1485 ..Default::default()
1486 })),
1487 );
1488
1489 let compiler = Compiler::new(project);
1490 let ir = compiler.compile().unwrap();
1491
1492 assert_eq!(ir.tasks.len(), 2);
1493
1494 let test_task = ir.tasks.iter().find(|t| t.id == "test").unwrap();
1495 assert_eq!(test_task.depends_on, vec!["build"]);
1496 }
1497
1498 #[test]
1499 fn test_compile_deployment_task() {
1500 let mut project = Project::new("test-project");
1501
1502 project.tasks.insert(
1503 "deploy".to_string(),
1504 TaskNode::Task(Box::new(Task {
1505 command: "kubectl".to_string(),
1506 args: vec!["apply".to_string()],
1507 labels: vec!["deployment".to_string()],
1508 ..Default::default()
1509 })),
1510 );
1511
1512 let compiler = Compiler::new(project);
1513 let ir = compiler.compile().unwrap();
1514
1515 assert_eq!(ir.tasks.len(), 1);
1516 assert!(ir.tasks[0].deployment);
1517 assert_eq!(ir.tasks[0].cache_policy, CachePolicy::Disabled);
1518 }
1519
1520 #[test]
1521 fn test_compile_script_task() {
1522 let mut project = Project::new("test-project");
1523
1524 project.tasks.insert(
1525 "script-task".to_string(),
1526 TaskNode::Task(Box::new(Task {
1527 script: Some("echo 'Running script'\nls -la".to_string()),
1528 ..Default::default()
1529 })),
1530 );
1531
1532 let compiler = Compiler::new(project);
1533 let ir = compiler.compile().unwrap();
1534
1535 assert_eq!(ir.tasks.len(), 1);
1536 assert!(ir.tasks[0].shell);
1537 assert_eq!(ir.tasks[0].command[0], "/bin/sh");
1538 assert_eq!(ir.tasks[0].command[1], "-c");
1539 }
1540
1541 #[test]
1542 fn test_purity_analysis_pure_flake() {
1543 use std::io::Write;
1544 use tempfile::NamedTempFile;
1545
1546 let json = r#"{
1547 "nodes": {
1548 "nixpkgs": {
1549 "locked": {
1550 "type": "github",
1551 "owner": "NixOS",
1552 "repo": "nixpkgs",
1553 "rev": "abc123",
1554 "narHash": "sha256-xxxxxxxxxxxxx"
1555 }
1556 },
1557 "root": { "inputs": { "nixpkgs": "nixpkgs" } }
1558 },
1559 "root": "root",
1560 "version": 7
1561 }"#;
1562
1563 let mut temp_file = NamedTempFile::new().unwrap();
1564 temp_file.write_all(json.as_bytes()).unwrap();
1565
1566 let project = Project::new("test-project");
1567 let options = CompilerOptions {
1568 purity_mode: PurityMode::Strict,
1569 flake_lock_path: Some(temp_file.path().to_path_buf()),
1570 ..Default::default()
1571 };
1572
1573 let compiler = Compiler::with_options(project, options);
1574 let result = compiler.analyze_flake_purity();
1575
1576 assert!(result.is_some());
1577 let (digest, purity) = result.unwrap().unwrap();
1578 assert!(digest.starts_with("sha256:"));
1579 assert_eq!(purity, PurityMode::Strict);
1580 }
1581
1582 #[test]
1583 fn test_purity_strict_mode_rejects_unlocked() {
1584 use std::io::Write;
1585 use tempfile::NamedTempFile;
1586
1587 let json = r#"{
1588 "nodes": {
1589 "nixpkgs": {
1590 "original": { "type": "github", "owner": "NixOS", "repo": "nixpkgs" }
1591 },
1592 "root": { "inputs": { "nixpkgs": "nixpkgs" } }
1593 },
1594 "root": "root",
1595 "version": 7
1596 }"#;
1597
1598 let mut temp_file = NamedTempFile::new().unwrap();
1599 temp_file.write_all(json.as_bytes()).unwrap();
1600
1601 let project = Project::new("test-project");
1602 let options = CompilerOptions {
1603 purity_mode: PurityMode::Strict,
1604 flake_lock_path: Some(temp_file.path().to_path_buf()),
1605 ..Default::default()
1606 };
1607
1608 let compiler = Compiler::with_options(project, options);
1609 let result = compiler.analyze_flake_purity();
1610
1611 assert!(result.is_some());
1612 assert!(result.unwrap().is_err());
1613 }
1614
1615 #[test]
1616 fn test_purity_warning_mode_injects_uuid() {
1617 use std::io::Write;
1618 use tempfile::NamedTempFile;
1619
1620 let json = r#"{
1621 "nodes": {
1622 "nixpkgs": {
1623 "original": { "type": "github", "owner": "NixOS", "repo": "nixpkgs" }
1624 },
1625 "root": { "inputs": { "nixpkgs": "nixpkgs" } }
1626 },
1627 "root": "root",
1628 "version": 7
1629 }"#;
1630
1631 let mut temp_file = NamedTempFile::new().unwrap();
1632 temp_file.write_all(json.as_bytes()).unwrap();
1633
1634 let project = Project::new("test-project");
1635 let options = CompilerOptions {
1636 purity_mode: PurityMode::Warning,
1637 flake_lock_path: Some(temp_file.path().to_path_buf()),
1638 ..Default::default()
1639 };
1640
1641 let compiler = Compiler::with_options(project.clone(), options.clone());
1642 let result1 = compiler.analyze_flake_purity().unwrap().unwrap();
1643
1644 let compiler2 = Compiler::with_options(project, options);
1645 let result2 = compiler2.analyze_flake_purity().unwrap().unwrap();
1646
1647 assert_ne!(result1.0, result2.0);
1649 assert_eq!(result1.1, PurityMode::Warning);
1650 }
1651
1652 #[test]
1653 fn test_purity_override_mode_uses_overrides() {
1654 use std::io::Write;
1655 use tempfile::NamedTempFile;
1656
1657 let json = r#"{
1658 "nodes": {
1659 "nixpkgs": {
1660 "locked": {
1661 "type": "github",
1662 "narHash": "sha256-base"
1663 }
1664 },
1665 "root": { "inputs": { "nixpkgs": "nixpkgs" } }
1666 },
1667 "root": "root",
1668 "version": 7
1669 }"#;
1670
1671 let mut temp_file = NamedTempFile::new().unwrap();
1672 temp_file.write_all(json.as_bytes()).unwrap();
1673
1674 let mut input_overrides = HashMap::new();
1675 input_overrides.insert("nixpkgs".to_string(), "sha256-custom".to_string());
1676
1677 let project = Project::new("test-project");
1678 let options = CompilerOptions {
1679 purity_mode: PurityMode::Override,
1680 flake_lock_path: Some(temp_file.path().to_path_buf()),
1681 input_overrides,
1682 ..Default::default()
1683 };
1684
1685 let compiler = Compiler::with_options(project.clone(), options.clone());
1686 let result1 = compiler.analyze_flake_purity().unwrap().unwrap();
1687
1688 let compiler2 = Compiler::with_options(project, options);
1690 let result2 = compiler2.analyze_flake_purity().unwrap().unwrap();
1691
1692 assert_eq!(result1.0, result2.0);
1693 assert_eq!(result1.1, PurityMode::Override);
1694 }
1695
1696 #[test]
1697 fn test_compute_runtime() {
1698 use std::io::Write;
1699 use tempfile::NamedTempFile;
1700
1701 let json = r#"{
1702 "nodes": {
1703 "nixpkgs": {
1704 "locked": {
1705 "type": "github",
1706 "narHash": "sha256-test"
1707 }
1708 },
1709 "root": { "inputs": { "nixpkgs": "nixpkgs" } }
1710 },
1711 "root": "root",
1712 "version": 7
1713 }"#;
1714
1715 let mut temp_file = NamedTempFile::new().unwrap();
1716 temp_file.write_all(json.as_bytes()).unwrap();
1717
1718 let project = Project::new("test-project");
1719 let options = CompilerOptions {
1720 purity_mode: PurityMode::Strict,
1721 flake_lock_path: Some(temp_file.path().to_path_buf()),
1722 ..Default::default()
1723 };
1724
1725 let compiler = Compiler::with_options(project, options);
1726 let runtime = compiler
1727 .compute_runtime(
1728 "nix-x86_64-linux",
1729 "github:NixOS/nixpkgs",
1730 "devShells.x86_64-linux.default",
1731 "x86_64-linux",
1732 )
1733 .unwrap();
1734
1735 assert_eq!(runtime.id, "nix-x86_64-linux");
1736 assert_eq!(runtime.flake, "github:NixOS/nixpkgs");
1737 assert!(runtime.digest.starts_with("sha256:"));
1738 assert_eq!(runtime.purity, PurityMode::Strict);
1739 }
1740
1741 #[test]
1742 fn test_derive_trigger_paths_with_project_path() {
1743 use cuenv_core::ci::{CI, Pipeline, PipelineCondition, PipelineTask, StringOrVec, TaskRef};
1744 use std::collections::BTreeMap;
1745
1746 let mut project = Project::new("test-project");
1747 project.tasks.insert(
1748 "build".to_string(),
1749 TaskNode::Task(Box::new(Task {
1750 command: "cargo".to_string(),
1751 args: vec!["build".to_string()],
1752 inputs: vec![
1753 cuenv_core::tasks::Input::Path("src/**/*.rs".to_string()),
1754 cuenv_core::tasks::Input::Path("Cargo.toml".to_string()),
1755 ],
1756 ..Default::default()
1757 })),
1758 );
1759
1760 let pipeline = Pipeline {
1761 tasks: vec![PipelineTask::Simple(TaskRef::from_name("build"))],
1762 when: Some(PipelineCondition {
1763 branch: Some(StringOrVec::String("main".to_string())),
1764 pull_request: None,
1765 tag: None,
1766 default_branch: None,
1767 scheduled: None,
1768 manual: None,
1769 release: None,
1770 }),
1771 ..Default::default()
1772 };
1773
1774 project.ci = Some(CI {
1776 pipelines: BTreeMap::from([("default".to_string(), pipeline.clone())]),
1777 ..Default::default()
1778 });
1779
1780 let options = CompilerOptions {
1781 pipeline_name: Some("default".to_string()),
1782 pipeline: Some(pipeline),
1783 project_path: Some("projects/api".to_string()),
1784 ..Default::default()
1785 };
1786
1787 let compiler = Compiler::with_options(project, options);
1788 let ir = compiler.compile().unwrap();
1789
1790 let trigger = ir.pipeline.trigger.expect("should have trigger");
1791
1792 assert!(
1794 trigger
1795 .paths
1796 .contains(&"projects/api/src/**/*.rs".to_string())
1797 );
1798 assert!(
1799 trigger
1800 .paths
1801 .contains(&"projects/api/Cargo.toml".to_string())
1802 );
1803
1804 assert!(trigger.paths.contains(&"projects/api/env.cue".to_string()));
1806 assert!(
1807 trigger
1808 .paths
1809 .contains(&"projects/api/schema/**".to_string())
1810 );
1811
1812 assert!(trigger.paths.contains(&"cue.mod/**".to_string()));
1814 }
1815
1816 #[test]
1817 fn test_derive_trigger_paths_fallback_to_project_dir() {
1818 use cuenv_core::ci::{CI, Pipeline, PipelineCondition, PipelineTask, StringOrVec, TaskRef};
1819 use std::collections::BTreeMap;
1820
1821 let mut project = Project::new("test-project");
1822 project.tasks.insert(
1824 "deploy".to_string(),
1825 TaskNode::Task(Box::new(Task {
1826 command: "kubectl".to_string(),
1827 args: vec!["apply".to_string()],
1828 ..Default::default()
1829 })),
1830 );
1831
1832 let pipeline = Pipeline {
1833 tasks: vec![PipelineTask::Simple(TaskRef::from_name("deploy"))],
1834 when: Some(PipelineCondition {
1835 branch: Some(StringOrVec::String("main".to_string())),
1836 pull_request: None,
1837 tag: None,
1838 default_branch: None,
1839 scheduled: None,
1840 manual: None,
1841 release: None,
1842 }),
1843 ..Default::default()
1844 };
1845
1846 project.ci = Some(CI {
1847 pipelines: BTreeMap::from([("default".to_string(), pipeline.clone())]),
1848 ..Default::default()
1849 });
1850
1851 let options = CompilerOptions {
1852 pipeline_name: Some("default".to_string()),
1853 pipeline: Some(pipeline),
1854 project_path: Some("projects/rawkode.academy/api".to_string()),
1855 ..Default::default()
1856 };
1857
1858 let compiler = Compiler::with_options(project, options);
1859 let ir = compiler.compile().unwrap();
1860
1861 let trigger = ir.pipeline.trigger.expect("should have trigger");
1862
1863 assert!(
1865 trigger
1866 .paths
1867 .contains(&"projects/rawkode.academy/api/**".to_string()),
1868 "Should contain fallback path. Paths: {:?}",
1869 trigger.paths
1870 );
1871 }
1872
1873 #[test]
1874 fn test_derive_trigger_paths_root_project() {
1875 use cuenv_core::ci::{CI, Pipeline, PipelineCondition, PipelineTask, StringOrVec, TaskRef};
1876 use std::collections::BTreeMap;
1877
1878 let mut project = Project::new("test-project");
1879 project.tasks.insert(
1880 "build".to_string(),
1881 TaskNode::Task(Box::new(Task {
1882 command: "cargo".to_string(),
1883 args: vec!["build".to_string()],
1884 inputs: vec![cuenv_core::tasks::Input::Path("src/**".to_string())],
1885 ..Default::default()
1886 })),
1887 );
1888
1889 let pipeline = Pipeline {
1890 tasks: vec![PipelineTask::Simple(TaskRef::from_name("build"))],
1891 when: Some(PipelineCondition {
1892 branch: Some(StringOrVec::String("main".to_string())),
1893 pull_request: None,
1894 tag: None,
1895 default_branch: None,
1896 scheduled: None,
1897 manual: None,
1898 release: None,
1899 }),
1900 ..Default::default()
1901 };
1902
1903 project.ci = Some(CI {
1904 pipelines: BTreeMap::from([("default".to_string(), pipeline.clone())]),
1905 ..Default::default()
1906 });
1907
1908 let options = CompilerOptions {
1910 pipeline_name: Some("default".to_string()),
1911 pipeline: Some(pipeline),
1912 project_path: None,
1913 ..Default::default()
1914 };
1915
1916 let compiler = Compiler::with_options(project, options);
1917 let ir = compiler.compile().unwrap();
1918
1919 let trigger = ir.pipeline.trigger.expect("should have trigger");
1920
1921 assert!(trigger.paths.contains(&"src/**".to_string()));
1923 assert!(trigger.paths.contains(&"env.cue".to_string()));
1924 assert!(trigger.paths.contains(&"schema/**".to_string()));
1925 }
1926
1927 #[test]
1928 fn test_derive_trigger_paths_root_project_no_inputs_fallback() {
1929 use cuenv_core::ci::{CI, Pipeline, PipelineCondition, PipelineTask, StringOrVec, TaskRef};
1930 use std::collections::BTreeMap;
1931
1932 let mut project = Project::new("test-project");
1933 project.tasks.insert(
1935 "deploy".to_string(),
1936 TaskNode::Task(Box::new(Task {
1937 command: "kubectl".to_string(),
1938 args: vec!["apply".to_string()],
1939 ..Default::default()
1940 })),
1941 );
1942
1943 let pipeline = Pipeline {
1944 tasks: vec![PipelineTask::Simple(TaskRef::from_name("deploy"))],
1945 when: Some(PipelineCondition {
1946 branch: Some(StringOrVec::String("main".to_string())),
1947 pull_request: None,
1948 tag: None,
1949 default_branch: None,
1950 scheduled: None,
1951 manual: None,
1952 release: None,
1953 }),
1954 ..Default::default()
1955 };
1956
1957 project.ci = Some(CI {
1958 pipelines: BTreeMap::from([("default".to_string(), pipeline.clone())]),
1959 ..Default::default()
1960 });
1961
1962 let options = CompilerOptions {
1964 pipeline_name: Some("default".to_string()),
1965 pipeline: Some(pipeline),
1966 project_path: None,
1967 ..Default::default()
1968 };
1969
1970 let compiler = Compiler::with_options(project, options);
1971 let ir = compiler.compile().unwrap();
1972
1973 let trigger = ir.pipeline.trigger.expect("should have trigger");
1974
1975 assert!(
1977 trigger.paths.contains(&"**".to_string()),
1978 "Root project with no inputs should fallback to **. Paths: {:?}",
1979 trigger.paths
1980 );
1981 }
1982
1983 use cuenv_core::ci::ActivationCondition;
1988 use std::collections::HashMap;
1989
1990 fn test_contributor(id: &str, when: Option<ActivationCondition>) -> Contributor {
1992 Contributor {
1993 id: id.to_string(),
1994 when,
1995 tasks: vec![],
1996 auto_associate: None,
1997 }
1998 }
1999
2000 fn test_ir() -> IntermediateRepresentation {
2002 IntermediateRepresentation {
2003 version: "1.5".to_string(),
2004 pipeline: crate::ir::PipelineMetadata {
2005 name: "test".to_string(),
2006 mode: PipelineMode::default(),
2007 environment: None,
2008 requires_onepassword: false,
2009 project_name: None,
2010 trigger: None,
2011 pipeline_tasks: vec![],
2012 pipeline_task_defs: vec![],
2013 },
2014 runtimes: vec![],
2015 tasks: vec![],
2016 }
2017 }
2018
2019 #[test]
2020 fn test_contributor_no_condition_always_active() {
2021 let project = Project::new("test");
2022 let compiler = Compiler::new(project);
2023 let ir = test_ir();
2024
2025 let contributor = test_contributor("test", None);
2027 assert!(compiler.cue_contributor_is_active(&contributor, &ir));
2028 }
2029
2030 #[test]
2031 fn test_contributor_always_true_active() {
2032 let project = Project::new("test");
2033 let compiler = Compiler::new(project);
2034 let ir = test_ir();
2035
2036 let contributor = test_contributor(
2037 "test",
2038 Some(ActivationCondition {
2039 always: Some(true),
2040 ..Default::default()
2041 }),
2042 );
2043 assert!(compiler.cue_contributor_is_active(&contributor, &ir));
2044 }
2045
2046 #[test]
2047 fn test_contributor_always_false_inactive() {
2048 let project = Project::new("test");
2049 let compiler = Compiler::new(project);
2050 let ir = test_ir();
2051
2052 let contributor = test_contributor(
2054 "test",
2055 Some(ActivationCondition {
2056 always: Some(false),
2057 ..Default::default()
2058 }),
2059 );
2060 assert!(!compiler.cue_contributor_is_active(&contributor, &ir));
2061 }
2062
2063 #[test]
2064 fn test_contributor_runtime_type_matches_nix() {
2065 use cuenv_core::manifest::{NixRuntime, Runtime};
2066
2067 let mut project = Project::new("test");
2068 project.runtime = Some(Runtime::Nix(NixRuntime::default()));
2069
2070 let compiler = Compiler::new(project);
2071 let ir = test_ir();
2072
2073 let contributor = test_contributor(
2074 "nix",
2075 Some(ActivationCondition {
2076 runtime_type: vec!["nix".to_string()],
2077 ..Default::default()
2078 }),
2079 );
2080 assert!(compiler.cue_contributor_is_active(&contributor, &ir));
2081 }
2082
2083 #[test]
2084 fn test_contributor_runtime_type_no_match() {
2085 use cuenv_core::manifest::{NixRuntime, Runtime};
2086
2087 let mut project = Project::new("test");
2088 project.runtime = Some(Runtime::Nix(NixRuntime::default()));
2089
2090 let compiler = Compiler::new(project);
2091 let ir = test_ir();
2092
2093 let contributor = test_contributor(
2095 "devenv-only",
2096 Some(ActivationCondition {
2097 runtime_type: vec!["devenv".to_string()],
2098 ..Default::default()
2099 }),
2100 );
2101 assert!(!compiler.cue_contributor_is_active(&contributor, &ir));
2102 }
2103
2104 #[test]
2105 fn test_contributor_runtime_type_no_runtime_set() {
2106 let project = Project::new("test");
2107 let compiler = Compiler::new(project);
2108 let ir = test_ir();
2109
2110 let contributor = test_contributor(
2112 "needs-nix",
2113 Some(ActivationCondition {
2114 runtime_type: vec!["nix".to_string()],
2115 ..Default::default()
2116 }),
2117 );
2118 assert!(!compiler.cue_contributor_is_active(&contributor, &ir));
2119 }
2120
2121 #[test]
2122 fn test_contributor_cuenv_source_matches() {
2123 use cuenv_core::ci::CI;
2124 use cuenv_core::config::{CIConfig, CuenvConfig, CuenvSource};
2125 use std::collections::BTreeMap;
2126
2127 let mut project = Project::new("test");
2128 project.config = Some(cuenv_core::config::Config::default());
2129 project.ci = Some(CI {
2130 pipelines: BTreeMap::new(),
2131 ..Default::default()
2132 });
2133 if let Some(ref mut config) = project.config {
2135 config.ci = Some(CIConfig {
2136 cuenv: Some(CuenvConfig {
2137 source: CuenvSource::Git,
2138 ..Default::default()
2139 }),
2140 });
2141 }
2142
2143 let compiler = Compiler::new(project);
2144 let ir = test_ir();
2145
2146 let contributor = test_contributor(
2147 "cuenv-git",
2148 Some(ActivationCondition {
2149 cuenv_source: vec!["git".to_string()],
2150 ..Default::default()
2151 }),
2152 );
2153 assert!(compiler.cue_contributor_is_active(&contributor, &ir));
2154 }
2155
2156 #[test]
2157 fn test_contributor_multiple_conditions_and_logic() {
2158 use cuenv_core::manifest::{NixRuntime, Runtime};
2159
2160 let mut project = Project::new("test");
2161 project.runtime = Some(Runtime::Nix(NixRuntime::default()));
2162
2163 let compiler = Compiler::new(project);
2164 let ir = test_ir();
2165
2166 let contributor = test_contributor(
2168 "multi-condition",
2169 Some(ActivationCondition {
2170 runtime_type: vec!["nix".to_string()],
2171 cuenv_source: vec!["nix".to_string()], ..Default::default()
2173 }),
2174 );
2175 assert!(!compiler.cue_contributor_is_active(&contributor, &ir));
2177 }
2178
2179 #[test]
2184 fn test_contributor_task_to_ir_command() {
2185 let contributor_task = ContributorTask {
2186 id: "test-task".to_string(),
2187 label: Some("Test Task".to_string()),
2188 description: None,
2189 command: Some("echo".to_string()),
2190 args: vec!["hello".to_string()],
2191 script: None,
2192 shell: false,
2193 env: HashMap::default(),
2194 secrets: HashMap::default(),
2195 inputs: vec![],
2196 outputs: vec![],
2197 hermetic: false,
2198 depends_on: vec![],
2199 priority: 10,
2200 condition: None,
2201 provider: None,
2202 };
2203
2204 let ir_task = Compiler::contributor_task_to_ir(&contributor_task, "github");
2205
2206 assert_eq!(ir_task.id, "cuenv:contributor:test-task");
2207 assert_eq!(
2209 ir_task.command,
2210 vec!["cuenv", "exec", "--", "echo", "hello"]
2211 );
2212 assert!(!ir_task.shell);
2213 assert_eq!(ir_task.priority, Some(10));
2214 assert_eq!(ir_task.phase, Some(BuildStage::Setup)); }
2216
2217 #[test]
2218 fn test_contributor_task_to_ir_script() {
2219 let contributor_task = ContributorTask {
2220 id: "script-task".to_string(),
2221 label: None,
2222 description: None,
2223 command: None,
2224 args: vec![],
2225 script: Some("echo line1\necho line2".to_string()),
2226 shell: true,
2227 env: HashMap::default(),
2228 secrets: HashMap::default(),
2229 inputs: vec![],
2230 outputs: vec![],
2231 hermetic: false,
2232 depends_on: vec!["other".to_string()],
2233 priority: 5,
2234 condition: None,
2235 provider: None,
2236 };
2237
2238 let ir_task = Compiler::contributor_task_to_ir(&contributor_task, "github");
2239
2240 assert_eq!(ir_task.id, "cuenv:contributor:script-task");
2241 assert_eq!(ir_task.command, vec!["echo line1\necho line2"]);
2242 assert!(ir_task.shell);
2243 assert_eq!(ir_task.depends_on, vec!["cuenv:contributor:other"]);
2244 assert_eq!(ir_task.priority, Some(5));
2245 assert_eq!(ir_task.phase, Some(BuildStage::Bootstrap)); }
2247
2248 #[test]
2249 fn test_contributor_task_to_ir_github_action() {
2250 use cuenv_core::ci::{GitHubActionConfig, TaskProviderConfig};
2251
2252 let mut inputs = std::collections::BTreeMap::new();
2253 inputs.insert(
2254 "extra-conf".to_string(),
2255 serde_json::Value::String("accept-flake-config = true".to_string()),
2256 );
2257
2258 let contributor_task = ContributorTask {
2259 id: "nix.install".to_string(),
2260 label: Some("Install Nix".to_string()),
2261 description: None,
2262 command: None,
2263 args: vec![],
2264 script: None,
2265 shell: false,
2266 env: HashMap::default(),
2267 secrets: HashMap::default(),
2268 inputs: vec![],
2269 outputs: vec![],
2270 hermetic: false,
2271 depends_on: vec![],
2272 priority: 0,
2273 condition: None,
2274 provider: Some(TaskProviderConfig {
2275 github: Some(GitHubActionConfig {
2276 uses: "DeterminateSystems/nix-installer-action@v16".to_string(),
2277 inputs,
2278 }),
2279 }),
2280 };
2281
2282 let ir_task = Compiler::contributor_task_to_ir(&contributor_task, "nix");
2283
2284 assert_eq!(ir_task.id, "cuenv:contributor:nix.install");
2285 assert!(ir_task.command.is_empty()); assert!(ir_task.provider_hints.is_some());
2287 assert_eq!(ir_task.phase, Some(BuildStage::Bootstrap)); let hints = ir_task.provider_hints.as_ref().unwrap();
2291 let github_action = hints.get("github_action").unwrap();
2292 assert_eq!(
2293 github_action.get("uses").and_then(|v| v.as_str()),
2294 Some("DeterminateSystems/nix-installer-action@v16")
2295 );
2296 }
2297
2298 #[test]
2299 fn test_contributor_task_to_ir_secrets() {
2300 use cuenv_core::ci::SecretRefConfig;
2301
2302 let mut secrets = std::collections::HashMap::new();
2303 secrets.insert(
2304 "SIMPLE_SECRET".to_string(),
2305 SecretRef::Simple("SECRET_NAME".to_string()),
2306 );
2307 secrets.insert(
2308 "DETAILED_SECRET".to_string(),
2309 SecretRef::Detailed(SecretRefConfig {
2310 source: "DETAILED_SOURCE".to_string(),
2311 cache_key: true,
2312 }),
2313 );
2314
2315 let contributor_task = ContributorTask {
2316 id: "secrets-task".to_string(),
2317 label: None,
2318 description: None,
2319 command: Some("echo".to_string()),
2320 args: vec!["test".to_string()],
2321 script: None,
2322 shell: false,
2323 env: HashMap::default(),
2324 secrets,
2325 inputs: vec![],
2326 outputs: vec![],
2327 hermetic: false,
2328 depends_on: vec![],
2329 priority: 10,
2330 condition: None,
2331 provider: None,
2332 };
2333
2334 let ir_task = Compiler::contributor_task_to_ir(&contributor_task, "github");
2335
2336 assert_eq!(ir_task.secrets.len(), 2);
2337 assert_eq!(ir_task.phase, Some(BuildStage::Setup));
2338
2339 let simple = ir_task.secrets.get("SIMPLE_SECRET").unwrap();
2341 assert_eq!(simple.source, "SECRET_NAME");
2342 assert!(!simple.cache_key);
2343
2344 let detailed = ir_task.secrets.get("DETAILED_SECRET").unwrap();
2346 assert_eq!(detailed.source, "DETAILED_SOURCE");
2347 assert!(detailed.cache_key);
2348 }
2349
2350 #[test]
2351 fn test_contributor_task_to_ir_env_vars() {
2352 let mut env = std::collections::HashMap::new();
2353 env.insert("VAR1".to_string(), "value1".to_string());
2354 env.insert("VAR2".to_string(), "value2".to_string());
2355
2356 let contributor_task = ContributorTask {
2357 id: "env-task".to_string(),
2358 label: None,
2359 description: None,
2360 command: Some("printenv".to_string()),
2361 args: vec![],
2362 script: None,
2363 shell: false,
2364 env,
2365 secrets: HashMap::default(),
2366 inputs: vec![],
2367 outputs: vec![],
2368 hermetic: false,
2369 depends_on: vec![],
2370 priority: 10,
2371 condition: None,
2372 provider: None,
2373 };
2374
2375 let ir_task = Compiler::contributor_task_to_ir(&contributor_task, "github");
2376
2377 assert_eq!(ir_task.env.len(), 2);
2378 assert_eq!(ir_task.env.get("VAR1"), Some(&"value1".to_string()));
2379 assert_eq!(ir_task.env.get("VAR2"), Some(&"value2".to_string()));
2380 assert_eq!(ir_task.phase, Some(BuildStage::Setup));
2381 }
2382
2383 #[test]
2384 fn test_contributor_task_to_ir_command_with_args() {
2385 let contributor_task = ContributorTask {
2386 id: "bun.workspace.install".to_string(),
2387 label: Some("Install Bun Dependencies".to_string()),
2388 description: None,
2389 command: Some("bun".to_string()),
2390 args: vec!["install".to_string(), "--frozen-lockfile".to_string()],
2391 script: None,
2392 shell: false,
2393 env: HashMap::default(),
2394 secrets: HashMap::default(),
2395 inputs: vec!["package.json".to_string(), "bun.lock".to_string()],
2396 outputs: vec![],
2397 hermetic: false,
2398 depends_on: vec![],
2399 priority: 10,
2400 condition: None,
2401 provider: None,
2402 };
2403
2404 let ir_task = Compiler::contributor_task_to_ir(&contributor_task, "bun.workspace");
2405
2406 assert_eq!(ir_task.id, "cuenv:contributor:bun.workspace.install");
2407 assert_eq!(
2409 ir_task.command,
2410 vec!["cuenv", "exec", "--", "bun", "install", "--frozen-lockfile"]
2411 );
2412 assert!(!ir_task.shell);
2413 assert_eq!(ir_task.phase, Some(BuildStage::Setup));
2414 assert_eq!(ir_task.inputs, vec!["package.json", "bun.lock"]);
2415 }
2416
2417 #[test]
2418 fn test_contributor_task_to_ir_cuenv_contributor_not_wrapped() {
2419 let contributor_task = ContributorTask {
2422 id: "cuenv.setup".to_string(),
2423 label: Some("Setup cuenv".to_string()),
2424 description: None,
2425 command: Some("brew".to_string()),
2426 args: vec!["install".to_string(), "cuenv/cuenv/cuenv".to_string()],
2427 script: None,
2428 shell: false,
2429 env: HashMap::default(),
2430 secrets: HashMap::default(),
2431 inputs: vec![],
2432 outputs: vec![],
2433 hermetic: false,
2434 depends_on: vec![],
2435 priority: 10,
2436 condition: None,
2437 provider: None,
2438 };
2439
2440 let ir_task = Compiler::contributor_task_to_ir(&contributor_task, "cuenv");
2441
2442 assert_eq!(ir_task.id, "cuenv:contributor:cuenv.setup");
2443 assert_eq!(
2445 ir_task.command,
2446 vec!["brew", "install", "cuenv/cuenv/cuenv"]
2447 );
2448 }
2449
2450 #[test]
2451 fn test_contributor_task_to_ir_bootstrap_not_wrapped() {
2452 let contributor_task = ContributorTask {
2455 id: "setup.rust".to_string(),
2456 label: Some("Setup Rust".to_string()),
2457 description: None,
2458 command: Some("rustup".to_string()),
2459 args: vec!["default".to_string(), "stable".to_string()],
2460 script: None,
2461 shell: false,
2462 env: HashMap::default(),
2463 secrets: HashMap::default(),
2464 inputs: vec![],
2465 outputs: vec![],
2466 hermetic: false,
2467 depends_on: vec![],
2468 priority: 6, condition: None,
2470 provider: None,
2471 };
2472
2473 let ir_task = Compiler::contributor_task_to_ir(&contributor_task, "rust");
2474
2475 assert_eq!(ir_task.id, "cuenv:contributor:setup.rust");
2476 assert_eq!(ir_task.command, vec!["rustup", "default", "stable"]);
2478 assert_eq!(ir_task.phase, Some(BuildStage::Bootstrap));
2479 }
2480
2481 #[test]
2482 fn test_derive_stage_from_priority_bootstrap() {
2483 assert_eq!(
2485 Compiler::derive_stage_from_priority(0, None),
2486 BuildStage::Bootstrap
2487 );
2488 assert_eq!(
2489 Compiler::derive_stage_from_priority(5, None),
2490 BuildStage::Bootstrap
2491 );
2492 assert_eq!(
2493 Compiler::derive_stage_from_priority(9, None),
2494 BuildStage::Bootstrap
2495 );
2496 }
2497
2498 #[test]
2499 fn test_derive_stage_from_priority_setup() {
2500 assert_eq!(
2502 Compiler::derive_stage_from_priority(10, None),
2503 BuildStage::Setup
2504 );
2505 assert_eq!(
2506 Compiler::derive_stage_from_priority(25, None),
2507 BuildStage::Setup
2508 );
2509 assert_eq!(
2510 Compiler::derive_stage_from_priority(49, None),
2511 BuildStage::Setup
2512 );
2513 }
2514
2515 #[test]
2516 fn test_derive_stage_from_priority_success() {
2517 assert_eq!(
2519 Compiler::derive_stage_from_priority(50, None),
2520 BuildStage::Success
2521 );
2522 assert_eq!(
2523 Compiler::derive_stage_from_priority(100, None),
2524 BuildStage::Success
2525 );
2526 }
2527
2528 #[test]
2529 fn test_derive_stage_from_priority_failure_condition() {
2530 assert_eq!(
2532 Compiler::derive_stage_from_priority(0, Some(CueTaskCondition::OnFailure)),
2533 BuildStage::Failure
2534 );
2535 assert_eq!(
2536 Compiler::derive_stage_from_priority(50, Some(CueTaskCondition::OnFailure)),
2537 BuildStage::Failure
2538 );
2539 }
2540
2541 #[test]
2543 fn test_cue_task_condition_to_ir_on_success() {
2544 let result = Compiler::cue_task_condition_to_ir(CueTaskCondition::OnSuccess);
2545 assert_eq!(result, TaskCondition::OnSuccess);
2546 }
2547
2548 #[test]
2549 fn test_cue_task_condition_to_ir_on_failure() {
2550 let result = Compiler::cue_task_condition_to_ir(CueTaskCondition::OnFailure);
2551 assert_eq!(result, TaskCondition::OnFailure);
2552 }
2553
2554 #[test]
2555 fn test_cue_task_condition_to_ir_always() {
2556 let result = Compiler::cue_task_condition_to_ir(CueTaskCondition::Always);
2557 assert_eq!(result, TaskCondition::Always);
2558 }
2559
2560 use cuenv_core::ci::{PipelineCondition, PipelineTask, StringOrVec, TaskRef};
2565 use cuenv_core::tasks::Input;
2566
2567 #[test]
2568 fn test_derive_paths_from_task_group() {
2569 let mut project = Project::new("test-project");
2571
2572 let mut group_tasks = HashMap::new();
2573 group_tasks.insert(
2574 "lint".to_string(),
2575 TaskNode::Task(Box::new(Task {
2576 command: "cargo".to_string(),
2577 args: vec!["clippy".to_string()],
2578 inputs: vec![
2579 Input::Path("Cargo.toml".to_string()),
2580 Input::Path("crates/**".to_string()),
2581 ],
2582 ..Default::default()
2583 })),
2584 );
2585 group_tasks.insert(
2586 "test".to_string(),
2587 TaskNode::Task(Box::new(Task {
2588 command: "cargo".to_string(),
2589 args: vec!["test".to_string()],
2590 inputs: vec![
2591 Input::Path("Cargo.toml".to_string()),
2592 Input::Path("crates/**".to_string()),
2593 Input::Path("tests/**".to_string()),
2594 ],
2595 ..Default::default()
2596 })),
2597 );
2598
2599 project.tasks.insert(
2600 "check".to_string(),
2601 TaskNode::Group(TaskGroup {
2602 type_: "group".to_string(),
2603 children: group_tasks,
2604 depends_on: vec![],
2605 description: None,
2606 max_concurrency: None,
2607 }),
2608 );
2609
2610 let pipeline = Pipeline {
2611 tasks: vec![PipelineTask::Simple(TaskRef::from_name("check"))],
2612 when: Some(PipelineCondition {
2613 branch: Some(StringOrVec::String("main".to_string())),
2614 pull_request: None,
2615 tag: None,
2616 default_branch: None,
2617 scheduled: None,
2618 manual: None,
2619 release: None,
2620 }),
2621 ..Default::default()
2622 };
2623
2624 project.ci = Some(CI {
2625 pipelines: BTreeMap::from([("default".to_string(), pipeline.clone())]),
2626 ..Default::default()
2627 });
2628
2629 let options = CompilerOptions {
2631 pipeline_name: Some("default".to_string()),
2632 pipeline: Some(pipeline),
2633 project_path: None,
2634 ..Default::default()
2635 };
2636
2637 let compiler = Compiler::with_options(project, options);
2638 let ir = compiler.compile().unwrap();
2639
2640 let trigger = ir.pipeline.trigger.expect("should have trigger");
2641
2642 assert!(
2644 trigger.paths.contains(&"Cargo.toml".to_string()),
2645 "Should contain Cargo.toml from group tasks. Paths: {:?}",
2646 trigger.paths
2647 );
2648 assert!(
2649 trigger.paths.contains(&"crates/**".to_string()),
2650 "Should contain crates/** from group tasks. Paths: {:?}",
2651 trigger.paths
2652 );
2653 assert!(
2654 trigger.paths.contains(&"tests/**".to_string()),
2655 "Should contain tests/** from group tasks. Paths: {:?}",
2656 trigger.paths
2657 );
2658 assert!(
2660 !trigger.paths.contains(&"**".to_string()),
2661 "Should not fallback to ** when task group has inputs. Paths: {:?}",
2662 trigger.paths
2663 );
2664 }
2665
2666 #[test]
2667 fn test_derive_paths_root_project_no_dot_prefix() {
2668 let mut project = Project::new("test-project");
2670
2671 project.tasks.insert(
2672 "build".to_string(),
2673 TaskNode::Task(Box::new(Task {
2674 command: "cargo".to_string(),
2675 args: vec!["build".to_string()],
2676 inputs: vec![Input::Path("src/**".to_string())],
2677 ..Default::default()
2678 })),
2679 );
2680
2681 let pipeline = Pipeline {
2682 tasks: vec![PipelineTask::Simple(TaskRef::from_name("build"))],
2683 when: Some(PipelineCondition {
2684 branch: Some(StringOrVec::String("main".to_string())),
2685 pull_request: None,
2686 tag: None,
2687 default_branch: None,
2688 scheduled: None,
2689 manual: None,
2690 release: None,
2691 }),
2692 ..Default::default()
2693 };
2694
2695 project.ci = Some(CI {
2696 pipelines: BTreeMap::from([("default".to_string(), pipeline.clone())]),
2697 ..Default::default()
2698 });
2699
2700 let options = CompilerOptions {
2702 pipeline_name: Some("default".to_string()),
2703 pipeline: Some(pipeline),
2704 project_path: Some(".".to_string()),
2705 ..Default::default()
2706 };
2707
2708 let compiler = Compiler::with_options(project, options);
2709 let ir = compiler.compile().unwrap();
2710
2711 let trigger = ir.pipeline.trigger.expect("should have trigger");
2712
2713 assert!(
2715 trigger.paths.contains(&"src/**".to_string()),
2716 "Should contain src/** without ./ prefix. Paths: {:?}",
2717 trigger.paths
2718 );
2719 assert!(
2720 !trigger.paths.iter().any(|p| p.starts_with("./")),
2721 "No path should have ./ prefix. Paths: {:?}",
2722 trigger.paths
2723 );
2724 assert!(
2725 trigger.paths.contains(&"env.cue".to_string()),
2726 "Should contain env.cue without ./ prefix. Paths: {:?}",
2727 trigger.paths
2728 );
2729 }
2730
2731 #[test]
2732 fn test_derive_paths_subproject_has_prefix() {
2733 let mut project = Project::new("test-project");
2735
2736 project.tasks.insert(
2737 "build".to_string(),
2738 TaskNode::Task(Box::new(Task {
2739 command: "cargo".to_string(),
2740 args: vec!["build".to_string()],
2741 inputs: vec![Input::Path("src/**".to_string())],
2742 ..Default::default()
2743 })),
2744 );
2745
2746 let pipeline = Pipeline {
2747 tasks: vec![PipelineTask::Simple(TaskRef::from_name("build"))],
2748 when: Some(PipelineCondition {
2749 branch: Some(StringOrVec::String("main".to_string())),
2750 pull_request: None,
2751 tag: None,
2752 default_branch: None,
2753 scheduled: None,
2754 manual: None,
2755 release: None,
2756 }),
2757 ..Default::default()
2758 };
2759
2760 project.ci = Some(CI {
2761 pipelines: BTreeMap::from([("default".to_string(), pipeline.clone())]),
2762 ..Default::default()
2763 });
2764
2765 let options = CompilerOptions {
2767 pipeline_name: Some("default".to_string()),
2768 pipeline: Some(pipeline),
2769 project_path: Some("projects/api".to_string()),
2770 ..Default::default()
2771 };
2772
2773 let compiler = Compiler::with_options(project, options);
2774 let ir = compiler.compile().unwrap();
2775
2776 let trigger = ir.pipeline.trigger.expect("should have trigger");
2777
2778 assert!(
2780 trigger.paths.contains(&"projects/api/src/**".to_string()),
2781 "Should contain prefixed path. Paths: {:?}",
2782 trigger.paths
2783 );
2784 assert!(
2785 trigger.paths.contains(&"projects/api/env.cue".to_string()),
2786 "Should contain prefixed env.cue. Paths: {:?}",
2787 trigger.paths
2788 );
2789 }
2790
2791 #[test]
2792 fn test_expand_dependency_to_task_group() {
2793 let mut project = Project::new("test-project");
2795
2796 let mut test_children = HashMap::new();
2798 test_children.insert(
2799 "unit".to_string(),
2800 TaskNode::Task(Box::new(Task {
2801 command: "cargo".to_string(),
2802 args: vec!["test".to_string(), "--lib".to_string()],
2803 ..Default::default()
2804 })),
2805 );
2806 test_children.insert(
2807 "doc".to_string(),
2808 TaskNode::Task(Box::new(Task {
2809 command: "cargo".to_string(),
2810 args: vec!["test".to_string(), "--doc".to_string()],
2811 ..Default::default()
2812 })),
2813 );
2814
2815 project.tasks.insert(
2816 "tests".to_string(),
2817 TaskNode::Group(TaskGroup {
2818 type_: "group".to_string(),
2819 children: test_children,
2820 depends_on: vec![],
2821 description: None,
2822 max_concurrency: None,
2823 }),
2824 );
2825
2826 project.tasks.insert(
2828 "check".to_string(),
2829 TaskNode::Task(Box::new(Task {
2830 command: "echo".to_string(),
2831 args: vec!["done".to_string()],
2832 depends_on: vec![TaskDependency::from_name("tests")],
2833 ..Default::default()
2834 })),
2835 );
2836
2837 let compiler = Compiler::new(project);
2838 let ir = compiler.compile().unwrap();
2839
2840 let check_task = ir.tasks.iter().find(|t| t.id == "check").unwrap();
2842
2843 assert_eq!(
2845 check_task.depends_on,
2846 vec!["tests.doc", "tests.unit"],
2847 "Group dependency should expand to leaf tasks"
2848 );
2849 }
2850
2851 #[test]
2852 fn test_expand_dependency_leaf_task_unchanged() {
2853 let mut project = Project::new("test-project");
2855
2856 project.tasks.insert(
2857 "build".to_string(),
2858 TaskNode::Task(Box::new(Task {
2859 command: "cargo".to_string(),
2860 args: vec!["build".to_string()],
2861 ..Default::default()
2862 })),
2863 );
2864
2865 project.tasks.insert(
2866 "test".to_string(),
2867 TaskNode::Task(Box::new(Task {
2868 command: "cargo".to_string(),
2869 args: vec!["test".to_string()],
2870 depends_on: vec![TaskDependency::from_name("build")],
2871 ..Default::default()
2872 })),
2873 );
2874
2875 let compiler = Compiler::new(project);
2876 let ir = compiler.compile().unwrap();
2877
2878 let test_task = ir.tasks.iter().find(|t| t.id == "test").unwrap();
2879 assert_eq!(
2880 test_task.depends_on,
2881 vec!["build"],
2882 "Leaf task dependency should remain unchanged"
2883 );
2884 }
2885
2886 #[test]
2887 fn test_expand_dependency_nested_groups() {
2888 let mut project = Project::new("test-project");
2890
2891 let mut inner_children = HashMap::new();
2893 inner_children.insert(
2894 "a".to_string(),
2895 TaskNode::Task(Box::new(Task {
2896 command: "echo".to_string(),
2897 args: vec!["a".to_string()],
2898 ..Default::default()
2899 })),
2900 );
2901 inner_children.insert(
2902 "b".to_string(),
2903 TaskNode::Task(Box::new(Task {
2904 command: "echo".to_string(),
2905 args: vec!["b".to_string()],
2906 ..Default::default()
2907 })),
2908 );
2909
2910 let mut outer_children = HashMap::new();
2912 outer_children.insert(
2913 "inner".to_string(),
2914 TaskNode::Group(TaskGroup {
2915 type_: "group".to_string(),
2916 children: inner_children,
2917 depends_on: vec![],
2918 description: None,
2919 max_concurrency: None,
2920 }),
2921 );
2922 outer_children.insert(
2923 "leaf".to_string(),
2924 TaskNode::Task(Box::new(Task {
2925 command: "echo".to_string(),
2926 args: vec!["leaf".to_string()],
2927 ..Default::default()
2928 })),
2929 );
2930
2931 project.tasks.insert(
2932 "outer".to_string(),
2933 TaskNode::Group(TaskGroup {
2934 type_: "group".to_string(),
2935 children: outer_children,
2936 depends_on: vec![],
2937 description: None,
2938 max_concurrency: None,
2939 }),
2940 );
2941
2942 project.tasks.insert(
2943 "final".to_string(),
2944 TaskNode::Task(Box::new(Task {
2945 command: "echo".to_string(),
2946 args: vec!["final".to_string()],
2947 depends_on: vec![TaskDependency::from_name("outer")],
2948 ..Default::default()
2949 })),
2950 );
2951
2952 let compiler = Compiler::new(project);
2953 let ir = compiler.compile().unwrap();
2954
2955 let final_task = ir.tasks.iter().find(|t| t.id == "final").unwrap();
2956 assert_eq!(
2957 final_task.depends_on,
2958 vec!["outer.inner.a", "outer.inner.b", "outer.leaf"],
2959 "Nested group should be recursively expanded"
2960 );
2961 }
2962
2963 #[test]
2964 fn test_expand_dependency_sibling_resolution() {
2965 let mut project = Project::new("test-project");
2968
2969 let mut docs_children = HashMap::new();
2972 docs_children.insert(
2973 "build".to_string(),
2974 TaskNode::Task(Box::new(Task {
2975 command: "npm".to_string(),
2976 args: vec!["run".to_string(), "build".to_string()],
2977 ..Default::default()
2978 })),
2979 );
2980 docs_children.insert(
2981 "deploy".to_string(),
2982 TaskNode::Task(Box::new(Task {
2983 command: "npm".to_string(),
2984 args: vec!["run".to_string(), "deploy".to_string()],
2985 depends_on: vec![TaskDependency::from_name("build")],
2987 ..Default::default()
2988 })),
2989 );
2990
2991 project.tasks.insert(
2992 "docs".to_string(),
2993 TaskNode::Group(TaskGroup {
2994 type_: "group".to_string(),
2995 children: docs_children,
2996 depends_on: vec![],
2997 description: None,
2998 max_concurrency: None,
2999 }),
3000 );
3001
3002 let compiler = Compiler::new(project);
3003 let ir = compiler.compile().unwrap();
3004
3005 let deploy_task = ir.tasks.iter().find(|t| t.id == "docs.deploy").unwrap();
3007
3008 assert_eq!(
3010 deploy_task.depends_on,
3011 vec!["docs.build"],
3012 "Sibling reference 'build' should resolve to 'docs.build'"
3013 );
3014 }
3015
3016 #[test]
3021 fn test_value_has_provider_interpolated_with_exec_secret() {
3022 use cuenv_core::environment::{EnvPart, EnvValue};
3023 use cuenv_core::secrets::Secret;
3024
3025 let secret = Secret::new("echo".to_string(), vec!["test".to_string()]);
3026 let parts = vec![
3027 EnvPart::Literal("prefix-".to_string()),
3028 EnvPart::Secret(secret),
3029 ];
3030 let value = EnvValue::Interpolated(parts);
3031
3032 assert!(!Compiler::value_has_provider(
3034 &value,
3035 &["onepassword".to_string()]
3036 ));
3037 }
3038
3039 #[test]
3040 fn test_value_has_provider_interpolated_with_onepassword_secret() {
3041 use cuenv_core::environment::{EnvPart, EnvValue};
3042 use cuenv_core::secrets::Secret;
3043
3044 let secret = Secret::onepassword("op://vault/item/field");
3045 let parts = vec![
3046 EnvPart::Literal("prefix-".to_string()),
3047 EnvPart::Secret(secret),
3048 ];
3049 let value = EnvValue::Interpolated(parts);
3050
3051 assert!(Compiler::value_has_provider(
3053 &value,
3054 &["onepassword".to_string()]
3055 ));
3056 }
3057
3058 #[test]
3059 fn test_value_has_provider_interpolated_only_literals() {
3060 use cuenv_core::environment::{EnvPart, EnvValue};
3061
3062 let parts = vec![
3063 EnvPart::Literal("hello".to_string()),
3064 EnvPart::Literal("world".to_string()),
3065 ];
3066 let value = EnvValue::Interpolated(parts);
3067
3068 assert!(!Compiler::value_has_provider(
3070 &value,
3071 &["onepassword".to_string()]
3072 ));
3073 }
3074
3075 #[test]
3076 fn test_value_has_provider_interpolated_with_op_uri_in_literal() {
3077 use cuenv_core::environment::{EnvPart, EnvValue};
3078
3079 let parts = vec![
3081 EnvPart::Literal("op://vault/item/field".to_string()),
3082 EnvPart::Literal("-suffix".to_string()),
3083 ];
3084 let value = EnvValue::Interpolated(parts);
3085
3086 assert!(Compiler::value_has_provider(
3087 &value,
3088 &["onepassword".to_string()]
3089 ));
3090 }
3091
3092 #[test]
3093 fn test_value_has_provider_with_policies_interpolated() {
3094 use cuenv_core::environment::{EnvPart, EnvValue, EnvValueSimple, EnvVarWithPolicies};
3095 use cuenv_core::secrets::Secret;
3096
3097 let secret = Secret::onepassword("op://vault/item/field");
3098 let parts = vec![
3099 EnvPart::Literal("prefix-".to_string()),
3100 EnvPart::Secret(secret),
3101 ];
3102
3103 let value = EnvValue::WithPolicies(EnvVarWithPolicies {
3104 value: EnvValueSimple::Interpolated(parts),
3105 policies: None,
3106 });
3107
3108 assert!(Compiler::value_has_provider(
3109 &value,
3110 &["onepassword".to_string()]
3111 ));
3112 }
3113
3114 #[test]
3115 fn test_parts_have_provider_op_uri_in_literal() {
3116 use cuenv_core::environment::EnvPart;
3117
3118 let parts = vec![
3119 EnvPart::Literal("prefix-".to_string()),
3120 EnvPart::Literal("op://vault/item/password".to_string()),
3121 ];
3122
3123 assert!(Compiler::parts_have_provider(
3125 &parts,
3126 &["onepassword".to_string()]
3127 ));
3128 }
3129
3130 #[test]
3131 fn test_parts_have_provider_op_uri_not_matching_other_providers() {
3132 use cuenv_core::environment::EnvPart;
3133
3134 let parts = vec![EnvPart::Literal("op://vault/item/password".to_string())];
3135
3136 assert!(!Compiler::parts_have_provider(&parts, &["aws".to_string()]));
3138 assert!(!Compiler::parts_have_provider(
3139 &parts,
3140 &["vault".to_string()]
3141 ));
3142 }
3143}