1use crate::workflow::schema::{
6 Concurrency, Environment, Job, Matrix, PermissionLevel, Permissions, PullRequestTrigger,
7 PushTrigger, ReleaseTrigger, RunsOn, ScheduleTrigger, Step, Strategy, Workflow,
8 WorkflowDispatchTrigger, WorkflowInput, WorkflowTriggers,
9};
10use crate::workflow::stage_renderer::{GitHubStageRenderer, transform_secret_ref};
11use cuenv_ci::emitter::{Emitter, EmitterError, EmitterResult};
12use cuenv_ci::ir::{BuildStage, IntermediateRepresentation, OutputType, Task, TriggerCondition};
13use indexmap::IndexMap;
14use std::collections::HashMap;
15
16#[derive(Debug, Clone)]
35pub struct GitHubActionsEmitter {
36 pub runner: String,
38 pub use_nix: bool,
40 pub use_cachix: bool,
42 pub cachix_name: Option<String>,
44 pub cachix_auth_token_secret: String,
46 pub default_paths_ignore: Vec<String>,
48 pub build_cuenv: bool,
50 pub approval_environment: String,
52 pub configured_permissions: HashMap<String, String>,
54}
55
56impl Default for GitHubActionsEmitter {
57 fn default() -> Self {
58 Self {
59 runner: "ubuntu-latest".to_string(),
60 use_nix: true,
61 use_cachix: false,
62 cachix_name: None,
63 cachix_auth_token_secret: "CACHIX_AUTH_TOKEN".to_string(),
64 default_paths_ignore: vec![
65 "docs/**".to_string(),
66 "examples/**".to_string(),
67 "*.md".to_string(),
68 "LICENSE".to_string(),
69 ".vscode/**".to_string(),
70 ],
71 build_cuenv: true,
72 approval_environment: "production".to_string(),
73 configured_permissions: HashMap::new(),
74 }
75 }
76}
77
78impl GitHubActionsEmitter {
79 #[must_use]
81 pub fn new() -> Self {
82 Self::default()
83 }
84
85 #[must_use]
89 pub fn from_config(config: &crate::config::GitHubConfig) -> Self {
90 let mut emitter = Self::default();
91
92 if let Some(runner) = &config.runner {
94 emitter.runner = runner.as_single().unwrap_or("ubuntu-latest").to_string();
95 }
96
97 if let Some(cachix) = &config.cachix {
99 emitter.use_cachix = true;
100 emitter.cachix_name = Some(cachix.name.clone());
101 if let Some(auth_token) = &cachix.auth_token {
102 emitter.cachix_auth_token_secret.clone_from(auth_token);
103 }
104 }
105
106 if let Some(paths_ignore) = &config.paths_ignore {
108 emitter.default_paths_ignore.clone_from(paths_ignore);
109 }
110
111 if let Some(permissions) = &config.permissions {
113 emitter.configured_permissions.clone_from(permissions);
114 }
115
116 emitter
117 }
118
119 #[must_use]
121 pub fn runner_as_runs_on(&self) -> RunsOn {
122 RunsOn::Label(self.runner.clone())
123 }
124
125 #[must_use]
127 pub fn apply_configured_permissions(&self, mut permissions: Permissions) -> Permissions {
128 let parse_level = |s: &str| -> Option<PermissionLevel> {
130 match s.to_lowercase().as_str() {
131 "write" => Some(PermissionLevel::Write),
132 "read" => Some(PermissionLevel::Read),
133 "none" => Some(PermissionLevel::None),
134 _ => None,
135 }
136 };
137
138 for (key, value) in &self.configured_permissions {
140 if let Some(level) = parse_level(value) {
141 match key.as_str() {
142 "contents" => permissions.contents = Some(level),
143 "checks" => permissions.checks = Some(level),
144 "pull-requests" => permissions.pull_requests = Some(level),
145 "issues" => permissions.issues = Some(level),
146 "packages" => permissions.packages = Some(level),
147 "id-token" => permissions.id_token = Some(level),
148 "actions" => permissions.actions = Some(level),
149 _ => {}
150 }
151 }
152 }
153
154 permissions
155 }
156
157 #[must_use]
159 pub fn with_runner(mut self, runner: impl Into<String>) -> Self {
160 self.runner = runner.into();
161 self
162 }
163
164 #[must_use]
166 pub const fn with_nix(mut self) -> Self {
167 self.use_nix = true;
168 self
169 }
170
171 #[must_use]
173 pub const fn without_nix(mut self) -> Self {
174 self.use_nix = false;
175 self
176 }
177
178 #[must_use]
180 pub fn with_cachix(mut self, name: impl Into<String>) -> Self {
181 self.use_cachix = true;
182 self.cachix_name = Some(name.into());
183 self
184 }
185
186 #[must_use]
188 pub fn with_cachix_auth_token_secret(mut self, secret: impl Into<String>) -> Self {
189 self.cachix_auth_token_secret = secret.into();
190 self
191 }
192
193 #[must_use]
195 pub fn with_paths_ignore(mut self, paths: Vec<String>) -> Self {
196 self.default_paths_ignore = paths;
197 self
198 }
199
200 #[must_use]
202 pub const fn without_cuenv_build(mut self) -> Self {
203 self.build_cuenv = false;
204 self
205 }
206
207 #[must_use]
209 pub fn with_approval_environment(mut self, env: impl Into<String>) -> Self {
210 self.approval_environment = env.into();
211 self
212 }
213
214 pub fn emit_workflows(
223 &self,
224 ir: &IntermediateRepresentation,
225 ) -> EmitterResult<HashMap<String, String>> {
226 let mut workflows = HashMap::new();
227
228 let workflow_name = Self::build_workflow_name(ir);
230
231 let workflow = self.build_workflow(ir, &workflow_name);
233 let filename = format!("{}.yml", sanitize_filename(&workflow_name));
234 let yaml = Self::serialize_workflow(&workflow)?;
235 workflows.insert(filename, yaml);
236
237 Ok(workflows)
238 }
239
240 fn build_workflow_name(ir: &IntermediateRepresentation) -> String {
242 ir.pipeline.project_name.as_ref().map_or_else(
243 || ir.pipeline.name.clone(),
244 |project| format!("{}-{}", project, ir.pipeline.name),
245 )
246 }
247
248 fn build_workflow(&self, ir: &IntermediateRepresentation, workflow_name: &str) -> Workflow {
250 let workflow_filename = format!("{}.yml", sanitize_filename(workflow_name));
251 let triggers = self.build_triggers(ir, &workflow_filename);
252 let permissions = self.build_permissions(ir);
253 let jobs = self.build_jobs(ir);
254
255 Workflow {
256 name: workflow_name.to_string(),
257 on: triggers,
258 concurrency: Some(Concurrency {
259 group: "${{ github.workflow }}-${{ github.head_ref || github.ref }}".to_string(),
260 cancel_in_progress: Some(true),
261 }),
262 permissions: Some(permissions),
263 env: IndexMap::new(),
264 jobs,
265 }
266 }
267
268 fn build_triggers(
270 &self,
271 ir: &IntermediateRepresentation,
272 workflow_filename: &str,
273 ) -> WorkflowTriggers {
274 let trigger = ir.pipeline.trigger.as_ref();
275
276 WorkflowTriggers {
277 push: self.build_push_trigger(trigger, workflow_filename),
278 pull_request: self.build_pr_trigger(trigger, workflow_filename),
279 release: Self::build_release_trigger(trigger),
280 workflow_dispatch: Self::build_manual_trigger(trigger),
281 schedule: Self::build_schedule_trigger(trigger),
282 }
283 }
284
285 fn build_push_trigger(
287 &self,
288 trigger: Option<&TriggerCondition>,
289 workflow_filename: &str,
290 ) -> Option<PushTrigger> {
291 let trigger = trigger?;
292
293 if trigger.branches.is_empty() {
295 return None;
296 }
297
298 let paths = Self::build_trigger_paths(&trigger.paths, workflow_filename);
299
300 Some(PushTrigger {
301 branches: trigger.branches.clone(),
302 paths,
303 paths_ignore: if trigger.paths_ignore.is_empty() {
304 self.default_paths_ignore.clone()
305 } else {
306 trigger.paths_ignore.clone()
307 },
308 ..Default::default()
309 })
310 }
311
312 fn build_pr_trigger(
314 &self,
315 trigger: Option<&TriggerCondition>,
316 workflow_filename: &str,
317 ) -> Option<PullRequestTrigger> {
318 let trigger = trigger?;
319
320 if trigger.pull_request == Some(true) {
322 let paths = Self::build_trigger_paths(&trigger.paths, workflow_filename);
323
324 Some(PullRequestTrigger {
325 branches: trigger.branches.clone(),
326 paths,
327 paths_ignore: if trigger.paths_ignore.is_empty() {
328 self.default_paths_ignore.clone()
329 } else {
330 trigger.paths_ignore.clone()
331 },
332 ..Default::default()
333 })
334 } else {
335 None
336 }
337 }
338
339 fn build_release_trigger(trigger: Option<&TriggerCondition>) -> Option<ReleaseTrigger> {
341 let trigger = trigger?;
342
343 if trigger.release.is_empty() {
344 return None;
345 }
346
347 Some(ReleaseTrigger {
348 types: trigger.release.clone(),
349 })
350 }
351
352 fn build_schedule_trigger(trigger: Option<&TriggerCondition>) -> Option<Vec<ScheduleTrigger>> {
354 let trigger = trigger?;
355
356 if trigger.scheduled.is_empty() {
357 return None;
358 }
359
360 Some(
361 trigger
362 .scheduled
363 .iter()
364 .map(|cron| ScheduleTrigger { cron: cron.clone() })
365 .collect(),
366 )
367 }
368
369 fn build_trigger_paths(paths: &[String], workflow_filename: &str) -> Vec<String> {
374 if paths.is_empty() {
375 return Vec::new();
376 }
377
378 let workflow_path = format!(".github/workflows/{workflow_filename}");
379
380 if paths.contains(&workflow_path) {
381 return paths.to_vec();
382 }
383
384 let mut result = paths.to_vec();
385 result.push(workflow_path);
386 result.sort();
387 result
388 }
389
390 fn build_manual_trigger(trigger: Option<&TriggerCondition>) -> Option<WorkflowDispatchTrigger> {
392 let trigger = trigger?;
393 let manual = trigger.manual.as_ref()?;
394
395 if !manual.enabled && manual.inputs.is_empty() {
396 return None;
397 }
398
399 Some(WorkflowDispatchTrigger {
400 inputs: manual
401 .inputs
402 .iter()
403 .map(|(k, v)| {
404 (
405 k.clone(),
406 WorkflowInput {
407 description: v.description.clone(),
408 required: Some(v.required),
409 default: v.default.clone(),
410 input_type: v.input_type.clone(),
411 options: if v.options.is_empty() {
412 None
413 } else {
414 Some(v.options.clone())
415 },
416 },
417 )
418 })
419 .collect(),
420 })
421 }
422
423 fn build_permissions(&self, ir: &IntermediateRepresentation) -> Permissions {
425 let has_deployments = ir.tasks.iter().any(|t| t.deployment);
426 let has_outputs = ir.tasks.iter().any(|t| {
427 t.outputs
428 .iter()
429 .any(|o| o.output_type == OutputType::Orchestrator)
430 });
431
432 let parse_level = |s: &str| -> Option<PermissionLevel> {
434 match s.to_lowercase().as_str() {
435 "write" => Some(PermissionLevel::Write),
436 "read" => Some(PermissionLevel::Read),
437 "none" => Some(PermissionLevel::None),
438 _ => None,
439 }
440 };
441
442 let mut permissions = Permissions {
444 contents: Some(if has_deployments {
445 PermissionLevel::Write
446 } else {
447 PermissionLevel::Read
448 }),
449 checks: Some(PermissionLevel::Write),
450 pull_requests: Some(PermissionLevel::Write),
451 packages: if has_outputs {
452 Some(PermissionLevel::Write)
453 } else {
454 None
455 },
456 ..Default::default()
457 };
458
459 for (key, value) in &self.configured_permissions {
461 if let Some(level) = parse_level(value) {
462 match key.as_str() {
463 "contents" => permissions.contents = Some(level),
464 "checks" => permissions.checks = Some(level),
465 "pull-requests" => permissions.pull_requests = Some(level),
466 "issues" => permissions.issues = Some(level),
467 "packages" => permissions.packages = Some(level),
468 "id-token" => permissions.id_token = Some(level),
469 "actions" => permissions.actions = Some(level),
470 _ => {}
471 }
472 }
473 }
474
475 permissions
476 }
477
478 fn build_jobs(&self, ir: &IntermediateRepresentation) -> IndexMap<String, Job> {
483 let mut jobs = IndexMap::new();
484
485 for task in ir.regular_tasks() {
486 let mut job = self.build_simple_job(
488 task,
489 ir,
490 ir.pipeline.environment.as_ref(),
491 None, );
493
494 if let Some(resources) = &task.resources
498 && let Some(tag) = resources.tags.first()
499 {
500 job.runs_on = RunsOn::Label(tag.clone());
501 }
502
503 job.needs = task.depends_on.iter().map(|d| sanitize_job_id(d)).collect();
505
506 if task.manual_approval {
508 job.environment = Some(Environment::Name(self.approval_environment.clone()));
509 }
510
511 if let Some(group) = &task.concurrency_group {
513 job.concurrency = Some(Concurrency {
514 group: group.clone(),
515 cancel_in_progress: Some(false),
516 });
517 }
518
519 jobs.insert(sanitize_job_id(&task.id), job);
520 }
521
522 jobs
523 }
524
525 fn serialize_workflow(workflow: &Workflow) -> EmitterResult<String> {
527 let yaml = serde_yaml::to_string(workflow)
528 .map_err(|e| EmitterError::Serialization(e.to_string()))?;
529
530 let header = "# Generated by cuenv - do not edit manually\n# Regenerate with: cuenv ci --format github\n\n";
532
533 Ok(format!("{header}{yaml}"))
534 }
535
536 #[must_use]
549 pub fn render_phase_steps(
550 ir: &IntermediateRepresentation,
551 ) -> (Vec<Step>, IndexMap<String, String>) {
552 let renderer = GitHubStageRenderer::new();
553 let mut steps = Vec::new();
554 let mut secret_env_vars = IndexMap::new();
555
556 let bootstrap_steps = renderer.render_tasks(&ir.sorted_phase_tasks(BuildStage::Bootstrap));
558 steps.extend(bootstrap_steps);
559
560 for task in ir.sorted_phase_tasks(BuildStage::Setup) {
563 let step = renderer.render_task(task);
564 steps.push(step);
565
566 for (key, value) in &task.env {
569 secret_env_vars.insert(key.clone(), value.clone());
570 }
571 }
572
573 (steps, secret_env_vars)
574 }
575
576 #[must_use]
592 pub fn build_simple_job(
593 &self,
594 task: &Task,
595 ir: &IntermediateRepresentation,
596 environment: Option<&String>,
597 project_path: Option<&str>,
598 ) -> Job {
599 let mut steps = Vec::new();
600
601 steps.push(
603 Step::uses("actions/checkout@v4")
604 .with_name("Checkout")
605 .with_input("fetch-depth", serde_yaml::Value::Number(2.into())),
606 );
607
608 let (phase_steps, secret_env_vars) = Self::render_phase_steps(ir);
610 steps.extend(phase_steps);
611
612 for artifact in &task.artifact_downloads {
614 let download_step = Step::uses("actions/download-artifact@v4")
615 .with_name(format!("Download {}", artifact.name))
616 .with_input("name", serde_yaml::Value::String(artifact.name.clone()))
617 .with_input("path", serde_yaml::Value::String(artifact.path.clone()));
618 steps.push(download_step);
619 }
620
621 let task_command = environment.map_or_else(
624 || format!("cuenv task {} --skip-dependencies", task.id),
625 |env| format!("cuenv task {} -e {} --skip-dependencies", task.id, env),
626 );
627 let mut task_step = Step::run(task_command)
628 .with_name(task.id.clone())
629 .with_env("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}");
630
631 if let Some(path) = project_path {
633 task_step = task_step.with_working_directory(path);
634 }
635
636 for (key, value) in secret_env_vars {
638 task_step.env.insert(key, transform_secret_ref(&value));
639 }
640
641 for (key, value) in &task.env {
643 task_step
644 .env
645 .insert(key.clone(), transform_secret_ref(value));
646 }
647
648 steps.push(task_step);
649
650 let orchestrator_outputs: Vec<_> = task
652 .outputs
653 .iter()
654 .filter(|o| o.output_type == OutputType::Orchestrator)
655 .collect();
656
657 if !orchestrator_outputs.is_empty() {
658 let paths: Vec<String> = orchestrator_outputs
659 .iter()
660 .map(|o| o.path.clone())
661 .collect();
662 let mut upload_step = Step::uses("actions/upload-artifact@v4")
663 .with_name("Upload artifacts")
664 .with_input(
665 "name",
666 serde_yaml::Value::String(format!("{}-artifacts", task.id.replace('.', "-"))),
667 )
668 .with_input("path", serde_yaml::Value::String(paths.join("\n")));
669 upload_step.with_inputs.insert(
670 "if-no-files-found".to_string(),
671 serde_yaml::Value::String("ignore".to_string()),
672 );
673 upload_step.with_inputs.insert(
675 "include-hidden-files".to_string(),
676 serde_yaml::Value::Bool(true),
677 );
678 steps.push(upload_step);
679 }
680
681 Job {
682 name: Some(task.id.clone()),
683 runs_on: RunsOn::Label(self.runner.clone()),
684 needs: Vec::new(), if_condition: None,
686 strategy: None,
687 environment: None,
688 env: IndexMap::new(),
689 concurrency: None,
690 continue_on_error: None,
691 timeout_minutes: None,
692 steps,
693 }
694 }
695
696 #[must_use]
714 pub fn build_artifact_aggregation_job(
715 &self,
716 task: &Task,
717 ir: &IntermediateRepresentation,
718 environment: Option<&String>,
719 previous_jobs: &[String],
720 project_path: Option<&str>,
721 ) -> Job {
722 let mut steps = Vec::new();
723
724 steps.push(
726 Step::uses("actions/checkout@v4")
727 .with_name("Checkout")
728 .with_input("fetch-depth", serde_yaml::Value::Number(0.into())),
729 );
730
731 let (phase_steps, secret_env_vars) = Self::render_phase_steps(ir);
733 steps.extend(phase_steps);
734
735 for artifact in &task.artifact_downloads {
737 for prev_job in previous_jobs {
739 let source_prefix = artifact.name.replace('.', "-");
741 if prev_job.starts_with(&source_prefix) || prev_job.contains(&artifact.name) {
742 let suffix = prev_job
744 .strip_prefix(&source_prefix)
745 .unwrap_or("")
746 .trim_start_matches('-');
747
748 let download_path = if suffix.is_empty() {
749 artifact.path.clone()
750 } else {
751 format!("{}/{}", artifact.path, suffix)
752 };
753
754 steps.push(
755 Step::uses("actions/download-artifact@v4")
756 .with_name(format!("Download {prev_job}"))
757 .with_input("name", serde_yaml::Value::String(prev_job.clone()))
758 .with_input("path", serde_yaml::Value::String(download_path)),
759 );
760 }
761 }
762 }
763
764 let task_command = environment.map_or_else(
766 || format!("cuenv task {} --skip-dependencies", task.id),
767 |env| format!("cuenv task {} -e {} --skip-dependencies", task.id, env),
768 );
769
770 let mut task_step = Step::run(&task_command)
771 .with_name(task.id.clone())
772 .with_env("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}");
773
774 if let Some(path) = project_path {
776 task_step = task_step.with_working_directory(path);
777 }
778
779 for (key, value) in &task.params {
781 task_step.env.insert(key.to_uppercase(), value.clone());
782 }
783
784 for (key, value) in secret_env_vars {
786 task_step.env.insert(key, transform_secret_ref(&value));
787 }
788
789 steps.push(task_step);
790
791 Job {
792 name: Some(task.id.clone()),
793 runs_on: RunsOn::Label(self.runner.clone()),
794 needs: previous_jobs.to_vec(),
795 if_condition: None,
796 strategy: None,
797 environment: None,
798 env: IndexMap::new(),
799 concurrency: None,
800 continue_on_error: None,
801 timeout_minutes: Some(30),
802 steps,
803 }
804 }
805
806 #[must_use]
822 pub fn build_matrix_jobs(
823 &self,
824 task: &Task,
825 ir: &IntermediateRepresentation,
826 environment: Option<&String>,
827 arch_runners: Option<&HashMap<String, String>>,
828 previous_jobs: &[String],
829 project_path: Option<&str>,
830 ) -> IndexMap<String, Job> {
831 let mut jobs = IndexMap::new();
832 let base_job_id = task.id.replace(['.', ' '], "-");
833
834 let Some(matrix) = &task.matrix else {
835 return jobs;
836 };
837
838 if let Some(arch_values) = matrix.dimensions.get("arch") {
840 for arch in arch_values {
841 let job_id = format!("{base_job_id}-{arch}");
842
843 let runner = arch_runners
845 .and_then(|m| m.get(arch))
846 .cloned()
847 .unwrap_or_else(|| self.runner.clone());
848
849 let mut steps = Vec::new();
850
851 steps.push(
853 Step::uses("actions/checkout@v4")
854 .with_name("Checkout")
855 .with_input("fetch-depth", serde_yaml::Value::Number(0.into())),
856 );
857
858 let (phase_steps, secret_env_vars) = Self::render_phase_steps(ir);
860 steps.extend(phase_steps);
861
862 let task_command = environment.map_or_else(
864 || format!("cuenv task {} --skip-dependencies", task.id),
865 |env| format!("cuenv task {} -e {} --skip-dependencies", task.id, env),
866 );
867 let mut task_step = Step::run(&task_command)
868 .with_name(format!("{} ({arch})", task.id))
869 .with_env("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}");
870
871 if let Some(path) = project_path {
873 task_step = task_step.with_working_directory(path);
874 }
875
876 task_step.env.insert("CUENV_ARCH".to_string(), arch.clone());
878
879 for (key, value) in &secret_env_vars {
881 task_step
882 .env
883 .insert(key.clone(), transform_secret_ref(value));
884 }
885
886 steps.push(task_step);
887
888 let artifact_path = if task.outputs.is_empty() {
891 "result/bin/*".to_string()
892 } else {
893 task.outputs
895 .iter()
896 .map(|o| o.path.clone())
897 .collect::<Vec<_>>()
898 .join("\n")
899 };
900 let mut upload_step = Step::uses("actions/upload-artifact@v4")
901 .with_name("Upload artifacts")
902 .with_input(
903 "name",
904 serde_yaml::Value::String(format!("{base_job_id}-{arch}")),
905 )
906 .with_input("path", serde_yaml::Value::String(artifact_path));
907 upload_step.with_inputs.insert(
908 "if-no-files-found".to_string(),
909 serde_yaml::Value::String("ignore".to_string()),
910 );
911 upload_step.with_inputs.insert(
913 "include-hidden-files".to_string(),
914 serde_yaml::Value::Bool(true),
915 );
916 steps.push(upload_step);
917
918 jobs.insert(
919 job_id,
920 Job {
921 name: Some(format!("{} ({arch})", task.id)),
922 runs_on: RunsOn::Label(runner),
923 needs: previous_jobs.to_vec(),
924 if_condition: None,
925 strategy: None,
926 environment: None,
927 env: IndexMap::new(),
928 concurrency: None,
929 continue_on_error: None,
930 timeout_minutes: Some(60),
931 steps,
932 },
933 );
934 }
935 }
936
937 jobs
938 }
939
940 #[must_use]
942 pub fn task_has_matrix(task: &Task) -> bool {
943 task.matrix
944 .as_ref()
945 .is_some_and(|m| !m.dimensions.is_empty())
946 }
947
948 #[must_use]
950 pub const fn task_has_artifact_downloads(task: &Task) -> bool {
951 !task.artifact_downloads.is_empty()
952 }
953}
954
955impl Emitter for GitHubActionsEmitter {
956 fn emit_thin(&self, ir: &IntermediateRepresentation) -> EmitterResult<String> {
964 use crate::workflow::stage_renderer::GitHubStageRenderer;
965
966 let workflow_name = Self::build_workflow_name(ir);
967 let workflow_filename = format!("{}.yml", sanitize_filename(&workflow_name));
968 let triggers = self.build_triggers(ir, &workflow_filename);
969 let permissions = self.build_permissions(ir);
970
971 let renderer = GitHubStageRenderer::new();
972 let mut steps = Vec::new();
973
974 steps.push(
976 Step::uses("actions/checkout@v4")
977 .with_name("Checkout")
978 .with_input("fetch-depth", serde_yaml::Value::Number(2.into())),
979 );
980
981 let (phase_steps, secret_env) = Self::render_phase_steps(ir);
983 steps.extend(phase_steps);
984
985 let pipeline_name = &ir.pipeline.name;
987 let cuenv_command = format!("cuenv ci --pipeline {pipeline_name}");
988
989 let mut main_step = Step::run(&cuenv_command)
990 .with_name(format!("Run pipeline: {pipeline_name}"))
991 .with_env("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}");
992
993 if let Some(env) = &ir.pipeline.environment {
994 main_step = main_step.with_env("CUENV_ENVIRONMENT", env.clone());
995 }
996
997 for (key, value) in secret_env {
1000 main_step = main_step.with_env(key, transform_secret_ref(&value));
1001 }
1002
1003 steps.push(main_step);
1004
1005 for task in ir.sorted_phase_tasks(BuildStage::Success) {
1007 let mut step = renderer.render_task(task);
1008 step.if_condition = Some("success()".to_string());
1009 steps.push(step);
1010 }
1011
1012 for task in ir.sorted_phase_tasks(BuildStage::Failure) {
1014 let mut step = renderer.render_task(task);
1015 step.if_condition = Some("failure()".to_string());
1016 steps.push(step);
1017 }
1018
1019 let job = Job {
1021 name: Some(workflow_name.clone()),
1022 runs_on: self.runner_as_runs_on(),
1023 needs: Vec::new(),
1024 if_condition: None,
1025 strategy: None,
1026 environment: ir.pipeline.environment.clone().map(Environment::Name),
1027 env: IndexMap::new(),
1028 concurrency: None,
1029 continue_on_error: None,
1030 timeout_minutes: None,
1031 steps,
1032 };
1033
1034 let mut jobs = IndexMap::new();
1035 jobs.insert(sanitize_job_id(&workflow_name), job);
1036
1037 let workflow = Workflow {
1038 name: workflow_name,
1039 on: triggers,
1040 concurrency: Some(Concurrency {
1041 group: "${{ github.workflow }}-${{ github.head_ref || github.ref }}".to_string(),
1042 cancel_in_progress: Some(true),
1043 }),
1044 permissions: Some(permissions),
1045 env: IndexMap::new(),
1046 jobs,
1047 };
1048
1049 Self::serialize_workflow(&workflow)
1050 }
1051
1052 fn emit_expanded(&self, ir: &IntermediateRepresentation) -> EmitterResult<String> {
1057 let workflow_name = Self::build_workflow_name(ir);
1058 let workflow = self.build_workflow(ir, &workflow_name);
1059 Self::serialize_workflow(&workflow)
1060 }
1061
1062 fn format_name(&self) -> &'static str {
1063 "github"
1064 }
1065
1066 fn file_extension(&self) -> &'static str {
1067 "yml"
1068 }
1069
1070 fn description(&self) -> &'static str {
1071 "GitHub Actions workflow YAML emitter"
1072 }
1073
1074 fn validate(&self, ir: &IntermediateRepresentation) -> EmitterResult<()> {
1075 for task in &ir.tasks {
1077 if task.id.contains(' ') {
1078 return Err(EmitterError::InvalidIR(format!(
1079 "Task ID '{}' contains spaces, which are not allowed in GitHub Actions job IDs",
1080 task.id
1081 )));
1082 }
1083 }
1084
1085 let task_ids: std::collections::HashSet<_> = ir.tasks.iter().map(|t| &t.id).collect();
1087 for task in &ir.tasks {
1088 for dep in &task.depends_on {
1089 if !task_ids.contains(dep) {
1090 return Err(EmitterError::InvalidIR(format!(
1091 "Task '{}' depends on non-existent task '{}'",
1092 task.id, dep
1093 )));
1094 }
1095 }
1096 }
1097
1098 Ok(())
1099 }
1100}
1101
1102fn sanitize_filename(name: &str) -> String {
1104 name.to_lowercase()
1105 .replace(' ', "-")
1106 .chars()
1107 .filter(|c| c.is_alphanumeric() || *c == '-' || *c == '_')
1108 .collect()
1109}
1110
1111fn sanitize_job_id(id: &str) -> String {
1113 id.replace(['.', ' '], "-")
1114 .chars()
1115 .filter(|c| c.is_alphanumeric() || *c == '-' || *c == '_')
1116 .collect()
1117}
1118
1119#[derive(Debug, Clone)]
1121pub struct ReleaseTarget {
1122 pub id: String,
1124 pub rust_triple: String,
1126 pub runner: String,
1128}
1129
1130impl ReleaseTarget {
1131 #[must_use]
1136 pub fn defaults_with_runner(linux_runner: Option<&str>) -> Vec<Self> {
1137 let linux = linux_runner.unwrap_or("ubuntu-latest").to_string();
1138 vec![
1139 Self {
1140 id: "linux-x64".to_string(),
1141 rust_triple: "x86_64-unknown-linux-gnu".to_string(),
1142 runner: linux.clone(),
1143 },
1144 Self {
1145 id: "linux-arm64".to_string(),
1146 rust_triple: "aarch64-unknown-linux-gnu".to_string(),
1147 runner: linux,
1148 },
1149 Self {
1150 id: "darwin-arm64".to_string(),
1151 rust_triple: "aarch64-apple-darwin".to_string(),
1152 runner: "macos-14".to_string(),
1153 },
1154 ]
1155 }
1156
1157 #[must_use]
1159 pub fn defaults() -> Vec<Self> {
1160 Self::defaults_with_runner(None)
1161 }
1162}
1163
1164pub struct ReleaseWorkflowBuilder {
1166 emitter: GitHubActionsEmitter,
1167 targets: Vec<ReleaseTarget>,
1168}
1169
1170impl ReleaseWorkflowBuilder {
1171 #[must_use]
1175 pub fn new(emitter: GitHubActionsEmitter) -> Self {
1176 let targets = ReleaseTarget::defaults_with_runner(Some(&emitter.runner));
1177 Self { emitter, targets }
1178 }
1179
1180 #[must_use]
1182 pub fn with_targets(mut self, targets: Vec<ReleaseTarget>) -> Self {
1183 self.targets = targets;
1184 self
1185 }
1186
1187 #[must_use]
1189 pub fn build(&self, ir: &IntermediateRepresentation) -> Workflow {
1190 let workflow_name = GitHubActionsEmitter::build_workflow_name(ir);
1191
1192 let triggers = WorkflowTriggers {
1194 release: Some(ReleaseTrigger {
1195 types: vec!["published".to_string()],
1196 }),
1197 workflow_dispatch: Some(WorkflowDispatchTrigger {
1198 inputs: {
1199 let mut inputs = IndexMap::new();
1200 inputs.insert(
1201 "tag_name".to_string(),
1202 WorkflowInput {
1203 description: "Tag to release (e.g., v0.16.0)".to_string(),
1204 required: Some(true),
1205 default: None,
1206 input_type: Some("string".to_string()),
1207 options: None,
1208 },
1209 );
1210 inputs
1211 },
1212 }),
1213 ..Default::default()
1214 };
1215
1216 let mut jobs = IndexMap::new();
1218 jobs.insert("build".to_string(), self.build_matrix_job(ir));
1219 jobs.insert("publish".to_string(), self.build_publish_job(ir));
1220
1221 Workflow {
1222 name: workflow_name,
1223 on: triggers,
1224 concurrency: Some(Concurrency {
1225 group: "${{ github.workflow }}-${{ github.head_ref || github.ref }}".to_string(),
1226 cancel_in_progress: Some(true),
1227 }),
1228 permissions: Some(Permissions {
1229 contents: Some(PermissionLevel::Write),
1230 id_token: Some(PermissionLevel::Write),
1231 ..Default::default()
1232 }),
1233 env: IndexMap::new(),
1234 jobs,
1235 }
1236 }
1237
1238 fn build_matrix_job(&self, ir: &IntermediateRepresentation) -> Job {
1240 let matrix_include: Vec<IndexMap<String, serde_yaml::Value>> = self
1242 .targets
1243 .iter()
1244 .map(|t| {
1245 let mut entry = IndexMap::new();
1246 entry.insert(
1247 "target".to_string(),
1248 serde_yaml::Value::String(t.id.clone()),
1249 );
1250 entry.insert(
1251 "rust-triple".to_string(),
1252 serde_yaml::Value::String(t.rust_triple.clone()),
1253 );
1254 entry.insert(
1255 "runs-on".to_string(),
1256 serde_yaml::Value::String(t.runner.clone()),
1257 );
1258 entry
1259 })
1260 .collect();
1261
1262 let mut steps = Vec::new();
1264
1265 steps.push(
1267 Step::uses("actions/checkout@v4")
1268 .with_name("Checkout")
1269 .with_input("fetch-depth", serde_yaml::Value::Number(0.into())),
1270 );
1271
1272 let has_install_nix = ir
1274 .sorted_phase_tasks(BuildStage::Bootstrap)
1275 .iter()
1276 .any(|t| t.id == "install-nix");
1277 if has_install_nix {
1278 steps.push(
1279 Step::uses("DeterminateSystems/nix-installer-action@v16")
1280 .with_name("Install Nix")
1281 .with_input(
1282 "extra-conf",
1283 serde_yaml::Value::String("accept-flake-config = true".to_string()),
1284 ),
1285 );
1286 }
1287
1288 if let Some(cuenv_task) = ir
1290 .sorted_phase_tasks(BuildStage::Setup)
1291 .iter()
1292 .find(|t| t.id == "setup-cuenv")
1293 {
1294 let command = cuenv_task.command.first().cloned().unwrap_or_default();
1295 steps.push(Step::run(&command).with_name("Setup cuenv"));
1296 }
1297
1298 let environment = ir.pipeline.environment.as_deref();
1300 let build_cmd = environment.map_or_else(
1301 || "cuenv release binaries --build-only --target ${{ matrix.target }}".to_string(),
1302 |env| {
1303 "cuenv release binaries --build-only --target ${{ matrix.target }} -e $ENV"
1304 .replace("$ENV", env)
1305 },
1306 );
1307 steps.push(Step::run(&build_cmd).with_name("Build for ${{ matrix.target }}"));
1308
1309 let mut upload_step = Step::uses("actions/upload-artifact@v4")
1311 .with_name("Upload binary")
1312 .with_input(
1313 "name",
1314 serde_yaml::Value::String("binary-${{ matrix.target }}".to_string()),
1315 )
1316 .with_input(
1317 "path",
1318 serde_yaml::Value::String("target/${{ matrix.rust-triple }}/release/*".to_string()),
1319 );
1320 upload_step.with_inputs.insert(
1321 "if-no-files-found".to_string(),
1322 serde_yaml::Value::String("error".to_string()),
1323 );
1324 upload_step.with_inputs.insert(
1326 "include-hidden-files".to_string(),
1327 serde_yaml::Value::Bool(true),
1328 );
1329 steps.push(upload_step);
1330
1331 Job {
1332 name: Some("Build ${{ matrix.target }}".to_string()),
1333 runs_on: RunsOn::Label("${{ matrix.runs-on }}".to_string()),
1334 needs: Vec::new(),
1335 if_condition: None,
1336 strategy: Some(Strategy {
1337 matrix: Matrix {
1338 include: matrix_include,
1339 },
1340 fail_fast: Some(false),
1341 max_parallel: None,
1342 }),
1343 environment: None,
1344 env: IndexMap::new(),
1345 concurrency: None,
1346 continue_on_error: None,
1347 timeout_minutes: Some(60),
1348 steps,
1349 }
1350 }
1351
1352 fn build_publish_job(&self, ir: &IntermediateRepresentation) -> Job {
1354 let mut steps = Vec::new();
1355
1356 steps.push(
1358 Step::uses("actions/checkout@v4")
1359 .with_name("Checkout")
1360 .with_input("fetch-depth", serde_yaml::Value::Number(0.into())),
1361 );
1362
1363 let has_install_nix = ir
1365 .sorted_phase_tasks(BuildStage::Bootstrap)
1366 .iter()
1367 .any(|t| t.id == "install-nix");
1368 if has_install_nix {
1369 steps.push(
1370 Step::uses("DeterminateSystems/nix-installer-action@v16")
1371 .with_name("Install Nix")
1372 .with_input(
1373 "extra-conf",
1374 serde_yaml::Value::String("accept-flake-config = true".to_string()),
1375 ),
1376 );
1377 }
1378
1379 if let Some(cuenv_task) = ir
1381 .sorted_phase_tasks(BuildStage::Setup)
1382 .iter()
1383 .find(|t| t.id == "setup-cuenv")
1384 {
1385 let command = cuenv_task.command.first().cloned().unwrap_or_default();
1386 steps.push(Step::run(&command).with_name("Setup cuenv"));
1387 }
1388
1389 for target in &self.targets {
1391 let mut download_step = Step::uses("actions/download-artifact@v4")
1392 .with_name(format!("Download {}", target.id))
1393 .with_input(
1394 "name",
1395 serde_yaml::Value::String(format!("binary-{}", target.id)),
1396 )
1397 .with_input(
1398 "path",
1399 serde_yaml::Value::String(format!("target/{}/release", target.rust_triple)),
1400 );
1401 download_step.continue_on_error = Some(false);
1402 steps.push(download_step);
1403 }
1404
1405 let has_1password = ir
1407 .sorted_phase_tasks(BuildStage::Setup)
1408 .iter()
1409 .any(|t| t.id == "setup-1password");
1410 if has_1password {
1411 steps.push(Step::run("cuenv secrets setup onepassword").with_name("Setup 1Password"));
1412 }
1413
1414 let environment = ir.pipeline.environment.as_deref();
1416 let publish_cmd = environment.map_or_else(
1417 || "cuenv release binaries --publish-only".to_string(),
1418 |env| format!("cuenv release binaries --publish-only -e {env}"),
1419 );
1420 let mut publish_step = Step::run(&publish_cmd)
1421 .with_name("Publish release")
1422 .with_env("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}");
1423
1424 if has_1password {
1425 publish_step.env.insert(
1426 "OP_SERVICE_ACCOUNT_TOKEN".to_string(),
1427 "${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }}".to_string(),
1428 );
1429 }
1430 steps.push(publish_step);
1431
1432 Job {
1433 name: Some("Publish Release".to_string()),
1434 runs_on: RunsOn::Label(self.emitter.runner.clone()),
1435 needs: vec!["build".to_string()],
1436 if_condition: None,
1437 strategy: None,
1438 environment: Some(Environment::Name(
1439 ir.pipeline
1440 .environment
1441 .clone()
1442 .unwrap_or_else(|| "production".to_string()),
1443 )),
1444 env: IndexMap::new(),
1445 concurrency: None,
1446 continue_on_error: None,
1447 timeout_minutes: Some(30),
1448 steps,
1449 }
1450 }
1451}
1452
1453#[cfg(test)]
1454mod tests {
1455 use super::*;
1456 use cuenv_ci::ir::{CachePolicy, PipelineMetadata, ResourceRequirements, TriggerCondition};
1457 use cuenv_core::ci::PipelineMode;
1458 use std::collections::BTreeMap;
1459
1460 fn make_ir(tasks: Vec<Task>) -> IntermediateRepresentation {
1463 IntermediateRepresentation {
1464 version: "1.4".to_string(),
1465 pipeline: PipelineMetadata {
1466 name: "test-pipeline".to_string(),
1467 mode: PipelineMode::Expanded,
1468 environment: None,
1469 requires_onepassword: false,
1470 project_name: None,
1471 trigger: None,
1472 pipeline_tasks: vec![],
1473 pipeline_task_defs: vec![],
1474 },
1475 runtimes: vec![],
1476 tasks,
1477 }
1478 }
1479
1480 fn make_phase_task(id: &str, command: &[&str], phase: BuildStage, priority: i32) -> Task {
1482 Task {
1483 id: id.to_string(),
1484 runtime: None,
1485 command: command.iter().map(|s| (*s).to_string()).collect(),
1486 shell: command.len() == 1,
1487 env: BTreeMap::new(),
1488 secrets: BTreeMap::new(),
1489 resources: None,
1490 concurrency_group: None,
1491 inputs: vec![],
1492 outputs: vec![],
1493 depends_on: vec![],
1494 cache_policy: CachePolicy::Disabled,
1495 deployment: false,
1496 manual_approval: false,
1497 matrix: None,
1498 artifact_downloads: vec![],
1499 params: BTreeMap::new(),
1500 phase: Some(phase),
1501 label: None,
1502 priority: Some(priority),
1503 contributor: None,
1504 condition: None,
1505 provider_hints: None,
1506 }
1507 }
1508
1509 fn make_task(id: &str, command: &[&str]) -> Task {
1510 Task {
1511 id: id.to_string(),
1512 runtime: None,
1513 command: command.iter().map(|s| (*s).to_string()).collect(),
1514 shell: false,
1515 env: BTreeMap::new(),
1516 secrets: BTreeMap::new(),
1517 resources: None,
1518 concurrency_group: None,
1519 inputs: vec![],
1520 outputs: vec![],
1521 depends_on: vec![],
1522 cache_policy: CachePolicy::Normal,
1523 deployment: false,
1524 manual_approval: false,
1525 matrix: None,
1526 artifact_downloads: vec![],
1527 params: BTreeMap::new(),
1528 phase: None,
1529 label: None,
1530 priority: None,
1531 contributor: None,
1532 condition: None,
1533 provider_hints: None,
1534 }
1535 }
1536
1537 #[test]
1538 fn test_simple_workflow() {
1539 let emitter = GitHubActionsEmitter::new()
1540 .without_nix()
1541 .without_cuenv_build();
1542 let ir = make_ir(vec![make_task("build", &["cargo", "build"])]);
1543
1544 let yaml = emitter.emit(&ir).unwrap();
1545
1546 assert!(yaml.contains("name: test-pipeline"));
1547 assert!(yaml.contains("jobs:"));
1548 assert!(yaml.contains("build:"));
1549 assert!(yaml.contains("cuenv task build"));
1550 }
1551
1552 #[test]
1553 fn test_workflow_with_nix() {
1554 let emitter = GitHubActionsEmitter::new().with_nix();
1555
1556 let provider_hints = serde_json::json!({
1558 "github_action": {
1559 "uses": "DeterminateSystems/nix-installer-action@v16",
1560 "inputs": {
1561 "extra-conf": "accept-flake-config = true"
1562 }
1563 }
1564 });
1565
1566 let mut bootstrap_task =
1568 make_phase_task("install-nix", &["curl ... | sh"], BuildStage::Bootstrap, 0);
1569 bootstrap_task.label = Some("Install Nix".to_string());
1570 bootstrap_task.contributor = Some("nix".to_string());
1571 bootstrap_task.provider_hints = Some(provider_hints);
1572
1573 let mut setup_task =
1574 make_phase_task("setup-cuenv", &["nix build .#cuenv"], BuildStage::Setup, 10);
1575 setup_task.label = Some("Setup cuenv".to_string());
1576 setup_task.contributor = Some("cuenv".to_string());
1577 setup_task.depends_on = vec!["install-nix".to_string()];
1578
1579 let ir = make_ir(vec![
1580 bootstrap_task,
1581 setup_task,
1582 make_task("build", &["cargo", "build"]),
1583 ]);
1584
1585 let yaml = emitter.emit(&ir).unwrap();
1586
1587 assert!(yaml.contains("DeterminateSystems/nix-installer-action"));
1588 assert!(yaml.contains("nix build .#cuenv"));
1589 }
1590
1591 #[test]
1592 fn test_workflow_with_cachix() {
1593 let emitter = GitHubActionsEmitter::new()
1594 .with_nix()
1595 .with_cachix("my-cache");
1596
1597 let nix_provider_hints = serde_json::json!({
1599 "github_action": {
1600 "uses": "DeterminateSystems/nix-installer-action@v16",
1601 "inputs": {
1602 "extra-conf": "accept-flake-config = true"
1603 }
1604 }
1605 });
1606
1607 let mut bootstrap_task =
1609 make_phase_task("install-nix", &["curl ... | sh"], BuildStage::Bootstrap, 0);
1610 bootstrap_task.label = Some("Install Nix".to_string());
1611 bootstrap_task.contributor = Some("nix".to_string());
1612 bootstrap_task.provider_hints = Some(nix_provider_hints);
1613
1614 let mut cachix_task = make_phase_task(
1615 "setup-cachix",
1616 &["nix-env -iA cachix && cachix use my-cache"],
1617 BuildStage::Setup,
1618 5,
1619 );
1620 cachix_task.label = Some("Setup Cachix (my-cache)".to_string());
1621 cachix_task.contributor = Some("cachix".to_string());
1622 cachix_task.depends_on = vec!["install-nix".to_string()];
1623 cachix_task.env.insert(
1624 "CACHIX_AUTH_TOKEN".to_string(),
1625 "${CACHIX_AUTH_TOKEN}".to_string(),
1626 );
1627
1628 let ir = make_ir(vec![
1629 bootstrap_task,
1630 cachix_task,
1631 make_task("build", &["cargo", "build"]),
1632 ]);
1633
1634 let yaml = emitter.emit(&ir).unwrap();
1635
1636 assert!(yaml.contains("cachix use my-cache"));
1638 assert!(yaml.contains("Setup Cachix (my-cache)"));
1639 }
1640
1641 #[test]
1642 fn test_workflow_with_dependencies() {
1643 let emitter = GitHubActionsEmitter::new()
1644 .without_nix()
1645 .without_cuenv_build();
1646 let mut test_task = make_task("test", &["cargo", "test"]);
1647 test_task.depends_on = vec!["build".to_string()];
1648
1649 let ir = make_ir(vec![make_task("build", &["cargo", "build"]), test_task]);
1650
1651 let yaml = emitter.emit(&ir).unwrap();
1652
1653 assert!(yaml.contains("needs:"));
1654 assert!(yaml.contains("- build"));
1655 }
1656
1657 #[test]
1658 fn test_workflow_with_manual_approval() {
1659 let emitter = GitHubActionsEmitter::new()
1660 .without_nix()
1661 .without_cuenv_build()
1662 .with_approval_environment("staging");
1663 let mut deploy_task = make_task("deploy", &["./deploy.sh"]);
1664 deploy_task.manual_approval = true;
1665
1666 let ir = make_ir(vec![deploy_task]);
1667
1668 let yaml = emitter.emit(&ir).unwrap();
1669
1670 assert!(yaml.contains("environment: staging"));
1671 }
1672
1673 #[test]
1674 fn test_workflow_with_concurrency_group() {
1675 let emitter = GitHubActionsEmitter::new()
1676 .without_nix()
1677 .without_cuenv_build();
1678 let mut deploy_task = make_task("deploy", &["./deploy.sh"]);
1679 deploy_task.concurrency_group = Some("production".to_string());
1680
1681 let ir = make_ir(vec![deploy_task]);
1682
1683 let yaml = emitter.emit(&ir).unwrap();
1684
1685 assert!(yaml.contains("concurrency:"));
1686 assert!(yaml.contains("group: production"));
1687 }
1688
1689 #[test]
1690 fn test_workflow_with_custom_runner() {
1691 let emitter = GitHubActionsEmitter::new()
1692 .without_nix()
1693 .without_cuenv_build()
1694 .with_runner("self-hosted");
1695 let ir = make_ir(vec![make_task("build", &["cargo", "build"])]);
1696
1697 let yaml = emitter.emit(&ir).unwrap();
1698
1699 assert!(yaml.contains("runs-on: self-hosted"));
1700 }
1701
1702 #[test]
1703 fn test_workflow_with_resource_tags() {
1704 let emitter = GitHubActionsEmitter::new()
1705 .without_nix()
1706 .without_cuenv_build();
1707 let mut task = make_task("build", &["cargo", "build"]);
1708 task.resources = Some(ResourceRequirements {
1709 cpu: None,
1710 memory: None,
1711 tags: vec!["blacksmith-8vcpu-ubuntu-2404".to_string()],
1712 });
1713
1714 let ir = make_ir(vec![task]);
1715
1716 let yaml = emitter.emit(&ir).unwrap();
1717
1718 assert!(yaml.contains("runs-on: blacksmith-8vcpu-ubuntu-2404"));
1719 }
1720
1721 #[test]
1722 fn test_emit_workflows() {
1723 let emitter = GitHubActionsEmitter::new()
1724 .without_nix()
1725 .without_cuenv_build();
1726 let ir = make_ir(vec![make_task("build", &["cargo", "build"])]);
1727
1728 let workflows = emitter.emit_workflows(&ir).unwrap();
1729
1730 assert_eq!(workflows.len(), 1);
1731 assert!(workflows.contains_key("test-pipeline.yml"));
1732 }
1733
1734 #[test]
1735 fn test_sanitize_filename() {
1736 assert_eq!(sanitize_filename("CI Pipeline"), "ci-pipeline");
1737 assert_eq!(sanitize_filename("release/v1"), "releasev1");
1738 assert_eq!(sanitize_filename("test_workflow"), "test_workflow");
1739 }
1740
1741 #[test]
1742 fn test_sanitize_job_id() {
1743 assert_eq!(sanitize_job_id("build.test"), "build-test");
1744 assert_eq!(sanitize_job_id("deploy prod"), "deploy-prod");
1745 }
1746
1747 #[test]
1748 fn test_validation_invalid_id() {
1749 let emitter = GitHubActionsEmitter::new();
1750 let ir = make_ir(vec![make_task("invalid task", &["echo"])]);
1751
1752 let result = emitter.validate(&ir);
1753 assert!(result.is_err());
1754 }
1755
1756 #[test]
1757 fn test_validation_missing_dependency() {
1758 let emitter = GitHubActionsEmitter::new();
1759 let mut task = make_task("test", &["cargo", "test"]);
1760 task.depends_on = vec!["nonexistent".to_string()];
1761
1762 let ir = make_ir(vec![task]);
1763
1764 let result = emitter.validate(&ir);
1765 assert!(result.is_err());
1766 }
1767
1768 #[test]
1769 fn test_format_name() {
1770 let emitter = GitHubActionsEmitter::new();
1771 assert_eq!(emitter.format_name(), "github");
1772 assert_eq!(emitter.file_extension(), "yml");
1773 }
1774
1775 #[test]
1776 fn test_generation_header() {
1777 let emitter = GitHubActionsEmitter::new()
1778 .without_nix()
1779 .without_cuenv_build();
1780 let ir = make_ir(vec![make_task("build", &["cargo", "build"])]);
1781
1782 let yaml = emitter.emit(&ir).unwrap();
1783
1784 assert!(yaml.starts_with("# Generated by cuenv"));
1785 assert!(yaml.contains("cuenv ci --format github"));
1786 }
1787
1788 #[test]
1793 fn test_build_simple_job() {
1794 let emitter = GitHubActionsEmitter::new().with_runner("ubuntu-latest");
1795 let task = make_task("build", &["cargo", "build"]);
1796 let ir = make_ir(vec![task.clone()]);
1797
1798 let job = emitter.build_simple_job(&task, &ir, None, None);
1799
1800 assert_eq!(job.name, Some("build".to_string()));
1801 assert!(matches!(job.runs_on, RunsOn::Label(ref l) if l == "ubuntu-latest"));
1802 assert!(job.needs.is_empty()); assert!(!job.steps.is_empty());
1804
1805 let step_names: Vec<_> = job.steps.iter().filter_map(|s| s.name.as_ref()).collect();
1807 assert!(step_names.contains(&&"Checkout".to_string()));
1808 assert!(step_names.contains(&&"build".to_string()));
1809 }
1810
1811 #[test]
1812 fn test_build_simple_job_with_environment() {
1813 let emitter = GitHubActionsEmitter::new();
1814 let task = make_task("deploy", &["./deploy.sh"]);
1815 let ir = make_ir(vec![task.clone()]);
1816 let env = "production".to_string();
1817
1818 let job = emitter.build_simple_job(&task, &ir, Some(&env), None);
1819
1820 let task_step = job
1822 .steps
1823 .iter()
1824 .find(|s| s.name.as_deref() == Some("deploy"));
1825 assert!(task_step.is_some());
1826 let run_cmd = task_step.unwrap().run.as_ref().unwrap();
1827 assert!(run_cmd.contains("-e production"));
1828 assert!(run_cmd.contains("--skip-dependencies"));
1829 }
1830
1831 #[test]
1832 fn test_build_simple_job_with_working_directory() {
1833 let emitter = GitHubActionsEmitter::new();
1834 let task = make_task("build", &["cargo", "build"]);
1835 let ir = make_ir(vec![task.clone()]);
1836
1837 let job = emitter.build_simple_job(&task, &ir, None, Some("platform/my-project"));
1838
1839 let task_step = job
1841 .steps
1842 .iter()
1843 .find(|s| s.name.as_deref() == Some("build"));
1844 assert!(task_step.is_some());
1845 assert_eq!(
1846 task_step.unwrap().working_directory,
1847 Some("platform/my-project".to_string())
1848 );
1849 }
1850
1851 #[test]
1852 fn test_build_matrix_jobs() {
1853 use cuenv_ci::ir::MatrixConfig;
1854
1855 let emitter = GitHubActionsEmitter::new().with_runner("ubuntu-latest");
1856 let mut task = make_task("release.build", &["cargo", "build"]);
1857 task.matrix = Some(MatrixConfig {
1858 dimensions: [(
1859 "arch".to_string(),
1860 vec!["linux-x64".to_string(), "darwin-arm64".to_string()],
1861 )]
1862 .into_iter()
1863 .collect(),
1864 ..Default::default()
1865 });
1866 let ir = make_ir(vec![task.clone()]);
1867
1868 let jobs = emitter.build_matrix_jobs(&task, &ir, None, None, &[], None);
1869
1870 assert_eq!(jobs.len(), 2);
1872 assert!(jobs.contains_key("release-build-linux-x64"));
1873 assert!(jobs.contains_key("release-build-darwin-arm64"));
1874
1875 let linux_job = jobs.get("release-build-linux-x64").unwrap();
1877 assert_eq!(
1878 linux_job.name,
1879 Some("release.build (linux-x64)".to_string())
1880 );
1881
1882 let task_step = linux_job
1884 .steps
1885 .iter()
1886 .find(|s| s.name.as_deref() == Some("release.build (linux-x64)"));
1887 assert!(task_step.is_some());
1888 assert_eq!(
1889 task_step.unwrap().env.get("CUENV_ARCH"),
1890 Some(&"linux-x64".to_string())
1891 );
1892 }
1893
1894 #[test]
1895 fn test_build_matrix_jobs_with_arch_runners() {
1896 use cuenv_ci::ir::MatrixConfig;
1897
1898 let emitter = GitHubActionsEmitter::new().with_runner("ubuntu-latest");
1899 let mut task = make_task("build", &["cargo", "build"]);
1900 task.matrix = Some(MatrixConfig {
1901 dimensions: [(
1902 "arch".to_string(),
1903 vec!["linux-x64".to_string(), "darwin-arm64".to_string()],
1904 )]
1905 .into_iter()
1906 .collect(),
1907 ..Default::default()
1908 });
1909 let ir = make_ir(vec![task.clone()]);
1910 let arch_runners: HashMap<String, String> = [
1911 ("linux-x64".to_string(), "ubuntu-24.04".to_string()),
1912 ("darwin-arm64".to_string(), "macos-14".to_string()),
1913 ]
1914 .into_iter()
1915 .collect();
1916
1917 let jobs = emitter.build_matrix_jobs(&task, &ir, None, Some(&arch_runners), &[], None);
1918
1919 let linux_job = jobs.get("build-linux-x64").unwrap();
1921 assert!(matches!(linux_job.runs_on, RunsOn::Label(ref l) if l == "ubuntu-24.04"));
1922
1923 let darwin_job = jobs.get("build-darwin-arm64").unwrap();
1924 assert!(matches!(darwin_job.runs_on, RunsOn::Label(ref l) if l == "macos-14"));
1925 }
1926
1927 #[test]
1928 fn test_build_artifact_aggregation_job() {
1929 use cuenv_ci::ir::ArtifactDownload;
1930
1931 let emitter = GitHubActionsEmitter::new();
1932 let mut task = make_task("release.publish", &["./publish.sh"]);
1933 task.artifact_downloads = vec![ArtifactDownload {
1934 name: "release-build".to_string(),
1935 path: "./artifacts".to_string(),
1936 filter: String::new(),
1937 }];
1938 task.params = [("version".to_string(), "1.0.0".to_string())]
1939 .into_iter()
1940 .collect();
1941 let ir = make_ir(vec![task.clone()]);
1942 let previous_jobs = vec![
1943 "release-build-linux-x64".to_string(),
1944 "release-build-darwin-arm64".to_string(),
1945 ];
1946
1947 let job = emitter.build_artifact_aggregation_job(&task, &ir, None, &previous_jobs, None);
1948
1949 assert_eq!(job.name, Some("release.publish".to_string()));
1950 assert_eq!(job.needs, previous_jobs);
1951 assert_eq!(job.timeout_minutes, Some(30));
1952
1953 let download_steps: Vec<_> = job
1955 .steps
1956 .iter()
1957 .filter(|s| s.uses.as_deref() == Some("actions/download-artifact@v4"))
1958 .collect();
1959 assert_eq!(download_steps.len(), 2);
1960
1961 let task_step = job
1963 .steps
1964 .iter()
1965 .find(|s| s.name.as_deref() == Some("release.publish"));
1966 assert!(task_step.is_some());
1967 assert_eq!(
1968 task_step.unwrap().env.get("VERSION"),
1969 Some(&"1.0.0".to_string())
1970 );
1971 }
1972
1973 #[test]
1974 fn test_task_has_matrix() {
1975 use cuenv_ci::ir::MatrixConfig;
1976
1977 let task_without = make_task("build", &["cargo", "build"]);
1978 assert!(!GitHubActionsEmitter::task_has_matrix(&task_without));
1979
1980 let mut task_with_empty = make_task("build", &["cargo", "build"]);
1981 task_with_empty.matrix = Some(MatrixConfig::default());
1982 assert!(!GitHubActionsEmitter::task_has_matrix(&task_with_empty));
1983
1984 let mut task_with_matrix = make_task("build", &["cargo", "build"]);
1985 task_with_matrix.matrix = Some(MatrixConfig {
1986 dimensions: [("arch".to_string(), vec!["x64".to_string()])]
1987 .into_iter()
1988 .collect(),
1989 ..Default::default()
1990 });
1991 assert!(GitHubActionsEmitter::task_has_matrix(&task_with_matrix));
1992 }
1993
1994 #[test]
1995 fn test_task_has_artifact_downloads() {
1996 use cuenv_ci::ir::ArtifactDownload;
1997
1998 let task_without = make_task("build", &["cargo", "build"]);
1999 assert!(!GitHubActionsEmitter::task_has_artifact_downloads(
2000 &task_without
2001 ));
2002
2003 let mut task_with = make_task("publish", &["./publish.sh"]);
2004 task_with.artifact_downloads = vec![ArtifactDownload {
2005 name: "build".to_string(),
2006 path: "./out".to_string(),
2007 filter: String::new(),
2008 }];
2009 assert!(GitHubActionsEmitter::task_has_artifact_downloads(
2010 &task_with
2011 ));
2012 }
2013
2014 #[test]
2015 fn test_render_phase_steps() {
2016 let mut bootstrap_task =
2017 make_phase_task("install-nix", &["curl ... | sh"], BuildStage::Bootstrap, 0);
2018 bootstrap_task.label = Some("Install Nix".to_string());
2019 bootstrap_task.contributor = Some("nix".to_string());
2020
2021 let mut setup_task =
2022 make_phase_task("setup-cuenv", &["nix build .#cuenv"], BuildStage::Setup, 10);
2023 setup_task.label = Some("Setup cuenv".to_string());
2024 setup_task.contributor = Some("cuenv".to_string());
2025 setup_task
2026 .env
2027 .insert("MY_VAR".to_string(), "${MY_SECRET}".to_string());
2028
2029 let ir = make_ir(vec![bootstrap_task, setup_task]);
2030
2031 let (steps, secret_env_vars) = GitHubActionsEmitter::render_phase_steps(&ir);
2032
2033 assert_eq!(steps.len(), 2);
2034 assert!(steps[0].name.as_deref() == Some("Install Nix"));
2035 assert!(steps[1].name.as_deref() == Some("Setup cuenv"));
2036
2037 assert_eq!(
2039 secret_env_vars.get("MY_VAR"),
2040 Some(&"${MY_SECRET}".to_string())
2041 );
2042 }
2043
2044 #[test]
2049 fn test_build_simple_job_without_working_directory() {
2050 let emitter = GitHubActionsEmitter::new();
2051 let task = make_task("build", &["cargo", "build"]);
2052 let ir = make_ir(vec![task.clone()]);
2053
2054 let job = emitter.build_simple_job(&task, &ir, None, None);
2056
2057 let task_step = job
2058 .steps
2059 .iter()
2060 .find(|s| s.name.as_deref() == Some("build"));
2061 assert!(task_step.is_some());
2062 assert_eq!(
2063 task_step.unwrap().working_directory,
2064 None,
2065 "Root project should NOT have working-directory"
2066 );
2067 }
2068
2069 #[test]
2070 fn test_build_simple_job_with_nested_working_directory() {
2071 let emitter = GitHubActionsEmitter::new();
2072 let task = make_task("deploy", &["./deploy.sh"]);
2073 let ir = make_ir(vec![task.clone()]);
2074
2075 let job = emitter.build_simple_job(
2077 &task,
2078 &ir,
2079 None,
2080 Some("projects/rawkode.academy/platform/email-preferences"),
2081 );
2082
2083 let task_step = job
2084 .steps
2085 .iter()
2086 .find(|s| s.name.as_deref() == Some("deploy"));
2087 assert!(task_step.is_some());
2088 assert_eq!(
2089 task_step.unwrap().working_directory,
2090 Some("projects/rawkode.academy/platform/email-preferences".to_string()),
2091 "Nested project should have correct working-directory"
2092 );
2093 }
2094
2095 #[test]
2096 fn test_build_matrix_jobs_with_working_directory() {
2097 use cuenv_ci::ir::MatrixConfig;
2098
2099 let emitter = GitHubActionsEmitter::new();
2100 let mut task = make_task("release.build", &["cargo", "build"]);
2101 task.matrix = Some(MatrixConfig {
2102 dimensions: [("arch".to_string(), vec!["linux-x64".to_string()])]
2103 .into_iter()
2104 .collect(),
2105 ..Default::default()
2106 });
2107 let ir = make_ir(vec![task.clone()]);
2108
2109 let jobs = emitter.build_matrix_jobs(&task, &ir, None, None, &[], Some("apps/my-service"));
2110
2111 assert_eq!(jobs.len(), 1);
2112 let job = jobs.get("release-build-linux-x64").unwrap();
2113
2114 let task_step = job
2115 .steps
2116 .iter()
2117 .find(|s| s.name.as_deref() == Some("release.build (linux-x64)"));
2118 assert!(task_step.is_some());
2119 assert_eq!(
2120 task_step.unwrap().working_directory,
2121 Some("apps/my-service".to_string()),
2122 "Matrix job should have working-directory"
2123 );
2124 }
2125
2126 #[test]
2127 fn test_build_matrix_jobs_without_working_directory() {
2128 use cuenv_ci::ir::MatrixConfig;
2129
2130 let emitter = GitHubActionsEmitter::new();
2131 let mut task = make_task("build", &["cargo", "build"]);
2132 task.matrix = Some(MatrixConfig {
2133 dimensions: [("arch".to_string(), vec!["linux-x64".to_string()])]
2134 .into_iter()
2135 .collect(),
2136 ..Default::default()
2137 });
2138 let ir = make_ir(vec![task.clone()]);
2139
2140 let jobs = emitter.build_matrix_jobs(&task, &ir, None, None, &[], None);
2142
2143 let job = jobs.get("build-linux-x64").unwrap();
2144 let task_step = job
2145 .steps
2146 .iter()
2147 .find(|s| s.name.as_deref() == Some("build (linux-x64)"));
2148 assert!(task_step.is_some());
2149 assert_eq!(
2150 task_step.unwrap().working_directory,
2151 None,
2152 "Root project matrix job should NOT have working-directory"
2153 );
2154 }
2155
2156 #[test]
2157 fn test_build_artifact_aggregation_job_with_working_directory() {
2158 use cuenv_ci::ir::ArtifactDownload;
2159
2160 let emitter = GitHubActionsEmitter::new();
2161 let mut task = make_task("publish", &["./publish.sh"]);
2162 task.artifact_downloads = vec![ArtifactDownload {
2163 name: "build".to_string(),
2164 path: "./out".to_string(),
2165 filter: String::new(),
2166 }];
2167 let ir = make_ir(vec![task.clone()]);
2168
2169 let job = emitter.build_artifact_aggregation_job(
2170 &task,
2171 &ir,
2172 None,
2173 &["build-linux-x64".to_string()],
2174 Some("services/api"),
2175 );
2176
2177 let task_step = job
2178 .steps
2179 .iter()
2180 .find(|s| s.name.as_deref() == Some("publish"));
2181 assert!(task_step.is_some());
2182 assert_eq!(
2183 task_step.unwrap().working_directory,
2184 Some("services/api".to_string()),
2185 "Artifact aggregation job should have working-directory"
2186 );
2187 }
2188
2189 #[test]
2190 fn test_build_artifact_aggregation_job_without_working_directory() {
2191 use cuenv_ci::ir::ArtifactDownload;
2192
2193 let emitter = GitHubActionsEmitter::new();
2194 let mut task = make_task("publish", &["./publish.sh"]);
2195 task.artifact_downloads = vec![ArtifactDownload {
2196 name: "build".to_string(),
2197 path: "./out".to_string(),
2198 filter: String::new(),
2199 }];
2200 let ir = make_ir(vec![task.clone()]);
2201
2202 let job = emitter.build_artifact_aggregation_job(
2203 &task,
2204 &ir,
2205 None,
2206 &["build-linux-x64".to_string()],
2207 None,
2208 );
2209
2210 let task_step = job
2211 .steps
2212 .iter()
2213 .find(|s| s.name.as_deref() == Some("publish"));
2214 assert!(task_step.is_some());
2215 assert_eq!(
2216 task_step.unwrap().working_directory,
2217 None,
2218 "Root project aggregation job should NOT have working-directory"
2219 );
2220 }
2221
2222 #[test]
2223 fn test_working_directory_yaml_serialization() {
2224 let emitter = GitHubActionsEmitter::new();
2225 let task = make_task("test", &["cargo", "test"]);
2226 let ir = make_ir(vec![task.clone()]);
2227
2228 let job = emitter.build_simple_job(&task, &ir, None, Some("my-project"));
2229
2230 let yaml = serde_yaml::to_string(&job).expect("Failed to serialize job");
2232 assert!(
2233 yaml.contains("working-directory: my-project"),
2234 "YAML should contain working-directory field. Got:\n{yaml}"
2235 );
2236 }
2237
2238 #[test]
2239 fn test_working_directory_not_in_yaml_when_none() {
2240 let emitter = GitHubActionsEmitter::new();
2241 let task = make_task("test", &["cargo", "test"]);
2242 let ir = make_ir(vec![task.clone()]);
2243
2244 let job = emitter.build_simple_job(&task, &ir, None, None);
2245
2246 let yaml = serde_yaml::to_string(&job).expect("Failed to serialize job");
2248 assert!(
2249 !yaml.contains("working-directory"),
2250 "YAML should NOT contain working-directory field. Got:\n{yaml}"
2251 );
2252 }
2253
2254 #[test]
2259 fn test_workflow_includes_own_path_in_triggers() {
2260 let emitter = GitHubActionsEmitter::new()
2261 .without_nix()
2262 .without_cuenv_build();
2263
2264 let mut ir = make_ir(vec![make_task("build", &["cargo", "build"])]);
2265 ir.pipeline.trigger = Some(TriggerCondition {
2266 branches: vec!["main".to_string()],
2267 paths: vec!["src/**".to_string(), "Cargo.toml".to_string()],
2268 pull_request: Some(true),
2269 ..Default::default()
2270 });
2271
2272 let yaml = emitter.emit(&ir).unwrap();
2273
2274 assert!(
2276 yaml.contains(".github/workflows/test-pipeline.yml"),
2277 "Workflow should include its own path in triggers. Got:\n{yaml}"
2278 );
2279 }
2280
2281 #[test]
2282 fn test_workflow_path_not_added_when_paths_empty() {
2283 let emitter = GitHubActionsEmitter::new()
2284 .without_nix()
2285 .without_cuenv_build();
2286
2287 let mut ir = make_ir(vec![make_task("build", &["cargo", "build"])]);
2288 ir.pipeline.trigger = Some(TriggerCondition {
2289 branches: vec!["main".to_string()],
2290 paths: vec![], ..Default::default()
2292 });
2293
2294 let yaml = emitter.emit(&ir).unwrap();
2295
2296 assert!(
2298 !yaml.contains(".github/workflows/test-pipeline.yml"),
2299 "Workflow should NOT include its own path when no path filtering. Got:\n{yaml}"
2300 );
2301 }
2302
2303 #[test]
2304 fn test_workflow_path_added_to_both_push_and_pr_triggers() {
2305 let emitter = GitHubActionsEmitter::new()
2306 .without_nix()
2307 .without_cuenv_build();
2308
2309 let mut ir = make_ir(vec![make_task("build", &["cargo", "build"])]);
2310 ir.pipeline.trigger = Some(TriggerCondition {
2311 branches: vec!["main".to_string()],
2312 paths: vec!["src/**".to_string()],
2313 pull_request: Some(true),
2314 ..Default::default()
2315 });
2316
2317 let yaml = emitter.emit(&ir).unwrap();
2318
2319 let workflow_path_count = yaml.matches(".github/workflows/test-pipeline.yml").count();
2321 assert_eq!(
2322 workflow_path_count, 2,
2323 "Workflow path should appear in both push and PR triggers. Got:\n{yaml}"
2324 );
2325 }
2326
2327 #[test]
2328 fn test_build_trigger_paths_adds_workflow_path() {
2329 let paths = vec!["src/**".to_string(), "Cargo.toml".to_string()];
2330
2331 let result = GitHubActionsEmitter::build_trigger_paths(&paths, "ci.yml");
2332
2333 assert!(result.contains(&".github/workflows/ci.yml".to_string()));
2334 assert!(result.contains(&"src/**".to_string()));
2335 assert!(result.contains(&"Cargo.toml".to_string()));
2336 }
2337
2338 #[test]
2339 fn test_build_trigger_paths_empty_input() {
2340 let paths: Vec<String> = vec![];
2341
2342 let result = GitHubActionsEmitter::build_trigger_paths(&paths, "ci.yml");
2343
2344 assert!(result.is_empty());
2345 }
2346
2347 #[test]
2348 fn test_build_trigger_paths_deduplication() {
2349 let paths = vec![".github/workflows/ci.yml".to_string(), "src/**".to_string()];
2350
2351 let result = GitHubActionsEmitter::build_trigger_paths(&paths, "ci.yml");
2352
2353 let count = result
2355 .iter()
2356 .filter(|p| *p == ".github/workflows/ci.yml")
2357 .count();
2358 assert_eq!(count, 1);
2359 }
2360
2361 #[test]
2362 fn test_build_trigger_paths_sorted() {
2363 let paths = vec!["z-file".to_string(), "a-file".to_string()];
2364
2365 let result = GitHubActionsEmitter::build_trigger_paths(&paths, "ci.yml");
2366
2367 let mut sorted = result.clone();
2369 sorted.sort();
2370 assert_eq!(result, sorted);
2371 }
2372}