use super::super::{CommandExecutor, relative_path_from_root};
use cuenv_core::DryRun;
use cuenv_core::manifest::Project;
use cuenv_core::{ModuleEvaluation, Result};
use cuenv_github::GitHubConfigExt;
use similar::TextDiff;
use std::collections::{BTreeMap, HashSet};
use std::path::{Path, PathBuf};
use tracing::instrument;
struct ProjectInfo {
project_path: PathBuf,
relative_path: PathBuf,
module_root: PathBuf,
config: Project,
}
impl ProjectInfo {
fn collect_from_module(module: &ModuleEvaluation) -> Result<Vec<Self>> {
let mut projects = Vec::new();
for instance in module.projects() {
let config = Project::try_from(instance)?;
let relative_path = instance.path.clone();
let project_path = module.root.join(&relative_path);
projects.push(Self {
project_path,
relative_path,
module_root: module.root.clone(),
config,
});
}
Ok(projects)
}
}
#[derive(Clone, Copy, Debug)]
pub struct CodegenSyncOptions {
pub dry_run: DryRun,
pub check: bool,
pub diff: bool,
}
impl CodegenSyncOptions {
fn should_check(self) -> bool {
self.check || self.diff
}
}
#[derive(Debug)]
pub struct CodegenSyncRequest<'a> {
pub path: &'a str,
pub package: &'a str,
pub options: CodegenSyncOptions,
}
struct CodegenSyncContext<'a> {
dir_path: &'a Path,
options: CodegenSyncOptions,
executor: &'a CommandExecutor,
}
struct CodegenSyncFilesRequest<'a> {
project_root: &'a Path,
project_name: &'a str,
codegen_config: &'a cuenv_core::manifest::CodegenConfig,
options: CodegenSyncOptions,
}
struct CodegenFileSyncRequest<'a> {
output_lines: &'a mut Vec<String>,
output_path: &'a Path,
file_path: &'a str,
content: &'a str,
}
struct CodegenWriteRequest<'a> {
output_path: &'a Path,
file_path: &'a str,
content: &'a str,
mode: &'a str,
}
#[derive(Clone, Copy, Debug)]
pub struct CiSyncOptions<'a> {
pub dry_run: DryRun,
pub check: bool,
pub provider: Option<&'a str>,
}
#[derive(Debug)]
pub struct CiSyncRequest<'a> {
pub path: &'a str,
pub package: &'a str,
pub options: CiSyncOptions<'a>,
}
#[derive(Debug)]
pub struct CiWorkspaceSyncRequest<'a> {
pub package: &'a str,
pub options: CiSyncOptions<'a>,
}
struct GithubSyncRequest<'a> {
repo_root: &'a Path,
options: CiSyncOptions<'a>,
projects: &'a [ProjectInfo],
}
struct BuildkiteSyncRequest<'a> {
repo_root: &'a Path,
options: CiSyncOptions<'a>,
}
fn load_project_config(path: &Path, executor: &CommandExecutor) -> Result<Project> {
let (instance, _module_root) = load_instance_at_path(path, executor)?;
instance.deserialize()
}
fn load_instance_at_path(
path: &Path,
executor: &CommandExecutor,
) -> Result<(cuenv_core::module::Instance, PathBuf)> {
let target_path = path.canonicalize().map_err(|e| cuenv_core::Error::Io {
source: e,
path: Some(path.to_path_buf().into_boxed_path()),
operation: "canonicalize path".to_string(),
})?;
tracing::debug!("Using cached module evaluation from executor");
let module = executor.get_module(&target_path)?;
let relative_path = relative_path_from_root(&module.root, &target_path);
let instance = module.get(&relative_path).ok_or_else(|| {
cuenv_core::Error::configuration(format!(
"No CUE instance found at path: {} (relative: {})",
target_path.display(),
relative_path.display()
))
})?;
Ok((instance.clone(), module.root.clone()))
}
#[instrument(name = "sync_codegen", skip(executor))]
pub async fn execute_sync_codegen(
request: CodegenSyncRequest<'_>,
executor: &CommandExecutor,
) -> Result<String> {
tracing::info!("Starting sync codegen command");
let dir_path = Path::new(request.path);
let context = CodegenSyncContext {
dir_path,
options: request.options,
executor,
};
execute_sync_codegen_local(&context)
}
fn execute_sync_codegen_local(context: &CodegenSyncContext<'_>) -> Result<String> {
let dir_path = context.dir_path;
let options = context.options;
let executor = context.executor;
let manifest: Project = load_project_config(dir_path, executor)?;
let Some(codegen_config) = &manifest.codegen else {
return Ok("No codegen configuration found in this project.".to_string());
};
let sync_request = CodegenSyncFilesRequest {
project_root: dir_path,
project_name: &manifest.name,
codegen_config,
options,
};
let sync_result = sync_codegen_files(&sync_request)?;
let format_result = if let Some(ref formatters) = manifest.formatters {
if sync_result.written_files.is_empty() && !options.dry_run.is_dry_run() {
String::new()
} else if options.dry_run.is_dry_run() {
let file_paths: Vec<std::path::PathBuf> = codegen_config
.files
.keys()
.map(|p| dir_path.join(p))
.collect();
let file_refs: Vec<&Path> = file_paths.iter().map(|p| p.as_path()).collect();
super::formatters::format_generated_files(
&file_refs,
formatters,
dir_path,
options.dry_run,
options.check,
)?
} else {
let file_refs: Vec<&Path> = sync_result
.written_files
.iter()
.map(|p| p.as_path())
.collect();
super::formatters::format_generated_files(
&file_refs,
formatters,
dir_path,
options.dry_run,
options.check,
)?
}
} else {
String::new()
};
if format_result.is_empty() {
Ok(sync_result.output)
} else {
Ok(format!("{}\n\n{format_result}", sync_result.output))
}
}
struct SyncResult {
output: String,
written_files: Vec<PathBuf>,
}
fn sync_codegen_files(request: &CodegenSyncFilesRequest<'_>) -> Result<SyncResult> {
use cuenv_core::manifest::FileMode;
let project_root = request.project_root;
let project_name = request.project_name;
let codegen_config = request.codegen_config;
let options = request.options;
let mut output_lines = Vec::new();
let mut written_files = Vec::new();
for (file_path, file_def) in &codegen_config.files {
let output_path = project_root.join(file_path);
let mut file_request = CodegenFileSyncRequest {
output_lines: &mut output_lines,
output_path: &output_path,
file_path,
content: &file_def.content,
};
match file_def.mode {
FileMode::Managed => {
let was_written = sync_managed_file(&mut file_request, options)?;
if was_written {
written_files.push(output_path);
}
}
FileMode::Scaffold => {
let was_written = sync_scaffold_file(&mut file_request, options)?;
if was_written {
written_files.push(output_path);
}
}
}
}
tracing::info!(
project = project_name,
files = codegen_config.files.len(),
written = written_files.len(),
"Codegen sync complete"
);
Ok(SyncResult {
output: output_lines.join("\n"),
written_files,
})
}
fn sync_managed_file(
request: &mut CodegenFileSyncRequest<'_>,
options: CodegenSyncOptions,
) -> Result<bool> {
if options.should_check() {
if request.output_path.exists() {
let contents = std::fs::read_to_string(request.output_path).unwrap_or_default();
if contents == request.content {
request
.output_lines
.push(format!(" OK: {}", request.file_path));
} else {
request
.output_lines
.push(format!(" Out of sync: {}", request.file_path));
maybe_push_diff(request, Some(&contents), options);
}
} else {
request
.output_lines
.push(format!(" Missing: {}", request.file_path));
maybe_push_diff(request, None, options);
}
Ok(false)
} else if options.dry_run.is_dry_run() {
if request.output_path.exists() {
request
.output_lines
.push(format!(" Would update: {}", request.file_path));
} else {
request
.output_lines
.push(format!(" Would create: {}", request.file_path));
}
Ok(false)
} else {
let write_request = CodegenWriteRequest {
output_path: request.output_path,
file_path: request.file_path,
content: request.content,
mode: "managed",
};
write_codegen_file(&write_request)?;
request
.output_lines
.push(format!(" Generated: {}", request.file_path));
Ok(true)
}
}
fn sync_scaffold_file(
request: &mut CodegenFileSyncRequest<'_>,
options: CodegenSyncOptions,
) -> Result<bool> {
if request.output_path.exists() {
if !options.dry_run.is_dry_run() && !options.should_check() {
tracing::debug!(
"Skipping {} (scaffold mode, file exists)",
request.file_path
);
}
request
.output_lines
.push(format!(" Skipped (exists): {}", request.file_path));
Ok(false)
} else if options.should_check() {
request
.output_lines
.push(format!(" Missing scaffold: {}", request.file_path));
maybe_push_diff(request, None, options);
Ok(false)
} else if options.dry_run.is_dry_run() {
request
.output_lines
.push(format!(" Would scaffold: {}", request.file_path));
Ok(false)
} else {
let write_request = CodegenWriteRequest {
output_path: request.output_path,
file_path: request.file_path,
content: request.content,
mode: "scaffold",
};
write_codegen_file(&write_request)?;
request
.output_lines
.push(format!(" Scaffolded: {}", request.file_path));
Ok(true)
}
}
fn write_codegen_file(request: &CodegenWriteRequest<'_>) -> Result<()> {
let CodegenWriteRequest {
output_path,
file_path,
content,
mode,
} = *request;
tracing::debug!(
file_path = %file_path,
output_path = %output_path.display(),
content_len = content.len(),
"Writing {mode} codegen file"
);
if let Some(parent) = output_path.parent() {
std::fs::create_dir_all(parent).map_err(|e| {
tracing::error!(
parent = %parent.display(),
error = %e,
"Failed to create parent directory"
);
cuenv_core::Error::Io {
source: e,
path: Some(parent.to_path_buf().into_boxed_path()),
operation: format!("create parent directory for {mode} file: {file_path}"),
}
})?;
}
std::fs::write(output_path, content).map_err(|e| {
tracing::error!(
output_path = %output_path.display(),
error = %e,
"Failed to write {mode} file"
);
cuenv_core::Error::Io {
source: e,
path: Some(output_path.to_path_buf().into_boxed_path()),
operation: format!("write {mode} file: {file_path}"),
}
})?;
Ok(())
}
fn maybe_push_diff(
request: &mut CodegenFileSyncRequest<'_>,
existing: Option<&str>,
options: CodegenSyncOptions,
) {
if !options.diff {
return;
}
let current = existing.unwrap_or("");
if current == request.content {
return;
}
request.output_lines.push(format_unified_diff(
request.file_path,
current,
request.content,
));
}
fn format_unified_diff(path: &str, current: &str, expected: &str) -> String {
let diff = TextDiff::from_lines(current, expected);
let from = format!("a/{path}");
let to = format!("b/{path}");
diff.unified_diff().header(&from, &to).to_string()
}
const KNOWN_PROVIDERS: &[&str] = &["github", "buildkite", "gitlab"];
fn validate_providers(providers: &[String]) -> (Vec<String>, Vec<String>) {
let mut valid = Vec::new();
let mut warnings = Vec::new();
for p in providers {
if KNOWN_PROVIDERS.contains(&p.as_str()) {
valid.push(p.clone());
} else {
warnings.push(format!(
"Unknown CI provider '{}'. Known providers: {}",
p,
KNOWN_PROVIDERS.join(", ")
));
}
}
(valid, warnings)
}
#[instrument(name = "sync_ci", skip_all)]
pub async fn execute_sync_ci(
request: CiSyncRequest<'_>,
executor: &CommandExecutor,
) -> Result<String> {
tracing::info!("Starting sync ci command");
let dir_path = Path::new(request.path);
let (projects, repo_root, target_path) = {
let target_path = dir_path.canonicalize().map_err(|e| cuenv_core::Error::Io {
source: e,
path: Some(dir_path.to_path_buf().into_boxed_path()),
operation: "canonicalize path".to_string(),
})?;
let module = executor.get_module(&target_path)?;
let projects = ProjectInfo::collect_from_module(&module)?;
(projects, module.root.clone(), target_path)
};
let target_projects: Vec<_> = projects
.into_iter()
.filter(|project| {
project
.project_path
.canonicalize()
.ok()
.is_some_and(|path| path == target_path)
})
.collect();
if target_projects.is_empty() {
if repo_root == target_path {
return Ok("No CI configuration found.".to_string());
}
return Err(cuenv_core::Error::configuration(format!(
"No cuenv project found at path: {}. Run 'cuenv info' to inspect project layout or use 'cuenv sync -A' to sync all projects.",
dir_path.display()
)));
}
let providers: Vec<String> = if let Some(p) = request.options.provider {
vec![p.to_string()]
} else {
let ci_config = target_projects.first().and_then(|p| p.config.ci.as_ref());
match ci_config {
Some(ci) if !ci.providers.is_empty() => ci.providers.clone(),
_ => {
return Ok(
"No CI providers configured. Add 'providers: [\"github\"]' to your ci config."
.to_string(),
);
}
}
};
let (valid_providers, warnings) = validate_providers(&providers);
for warning in &warnings {
tracing::warn!("{}", warning);
}
if valid_providers.is_empty() {
return Err(cuenv_core::Error::configuration(format!(
"No valid CI providers specified. Known providers: {}",
KNOWN_PROVIDERS.join(", ")
)));
}
let mut outputs = Vec::new();
let mut errors: Vec<(String, cuenv_core::Error)> = Vec::new();
for prov in &valid_providers {
let result = match prov.as_str() {
"github" => {
let github_request = GithubSyncRequest {
repo_root: &repo_root,
options: request.options,
projects: &target_projects,
};
execute_sync_github(github_request).await
}
"buildkite" => {
let buildkite_request = BuildkiteSyncRequest {
repo_root: &repo_root,
options: request.options,
};
execute_sync_buildkite(&buildkite_request)
}
"gitlab" => {
tracing::debug!("GitLab CI sync not yet implemented");
continue;
}
_ => Err(cuenv_core::Error::configuration(format!(
"Unsupported CI provider: {prov}. Supported: {}",
KNOWN_PROVIDERS.join(", ")
))),
};
match result {
Ok(output) if !output.is_empty() => outputs.push(output),
Ok(_) => {} Err(e) => {
if request.options.provider.is_some() {
return Err(e);
}
tracing::debug!("Skipping {prov}: {e}");
errors.push((prov.clone(), e));
}
}
}
if outputs.is_empty() {
if errors.is_empty() {
Ok("No CI configuration found.".to_string())
} else {
let error_summary: Vec<String> = errors
.iter()
.map(|(prov, e)| format!("{prov}: {e}"))
.collect();
Ok(format!(
"CI sync failed for all providers:\n{}",
error_summary.join("\n")
))
}
} else {
Ok(outputs.join("\n"))
}
}
#[instrument(name = "sync_ci_workspace", skip_all)]
pub async fn execute_sync_ci_workspace(
request: CiWorkspaceSyncRequest<'_>,
executor: &CommandExecutor,
) -> Result<String> {
let projects = {
let cwd = std::env::current_dir().map_err(|e| {
cuenv_core::Error::configuration(format!("Failed to get current directory: {e}"))
})?;
let module = executor.discover_all_modules(&cwd)?;
ProjectInfo::collect_from_module(&module)?
};
if projects.is_empty() {
return Ok("No projects with CI configuration found.".to_string());
}
let mut outputs = Vec::new();
for project in &projects {
let project_path_str = project.project_path.to_string_lossy();
let ci_request = CiSyncRequest {
path: &project_path_str,
package: request.package,
options: request.options,
};
let result = execute_sync_ci(ci_request, executor).await;
match result {
Ok(output) if !output.is_empty() => {
outputs.push(format!("[{}]\n{}", project.config.name, output));
}
Ok(_) => {}
Err(e) => {
outputs.push(format!("[{}] Error: {}", project.config.name, e));
}
}
}
if outputs.is_empty() {
Ok("No CI workflows to sync.".to_string())
} else {
Ok(outputs.join("\n\n"))
}
}
#[allow(clippy::too_many_lines)]
#[instrument(name = "sync_github", skip_all)]
async fn execute_sync_github(request: GithubSyncRequest<'_>) -> Result<String> {
let GithubSyncRequest {
repo_root,
options,
projects,
} = request;
if projects.is_empty() {
return Err(cuenv_core::Error::configuration(
"No cuenv projects found. Ensure env.cue files declare 'package cuenv'",
));
}
let mut all_workflows: Vec<(String, String)> = Vec::new();
for project in projects {
let Some(ci) = &project.config.ci else {
continue;
};
for (pipeline_name, pipeline) in &ci.pipelines {
let workflows = generate_github_workflow_for_project(project, pipeline_name, pipeline)?;
all_workflows.extend(workflows);
}
}
if all_workflows.is_empty() {
return Ok(String::new());
}
let workflows_dir = repo_root.join(".github/workflows");
let mut output_lines = Vec::new();
if options.check {
let mut out_of_sync = Vec::new();
for (filename, content) in &all_workflows {
let path = workflows_dir.join(filename);
if path.exists() {
let existing =
std::fs::read_to_string(&path).map_err(|e| cuenv_core::Error::Io {
source: e,
path: Some(path.clone().into_boxed_path()),
operation: "read workflow file".to_string(),
})?;
if existing != *content {
out_of_sync.push(filename.clone());
}
} else {
out_of_sync.push(format!("{filename} (missing)"));
}
}
if !out_of_sync.is_empty() {
return Err(cuenv_core::Error::configuration(format!(
"GitHub workflows out of sync: {}. Run 'cuenv sync ci' to update.",
out_of_sync.join(", ")
)));
}
return Ok(format!(
"GitHub: {} workflow(s) in sync",
all_workflows.len()
));
}
for (filename, content) in &all_workflows {
let workflow_path = workflows_dir.join(filename);
let exists = workflow_path.exists();
if exists && !options.dry_run.is_dry_run() {
let existing = std::fs::read_to_string(&workflow_path).unwrap_or_default();
if existing == *content {
output_lines.push(format!("GitHub: {filename} (unchanged)"));
continue;
}
}
if options.dry_run.is_dry_run() {
if exists {
output_lines.push(format!("GitHub: Would update {filename}"));
} else {
output_lines.push(format!("GitHub: Would create {filename}"));
}
} else {
if !workflows_dir.exists() {
std::fs::create_dir_all(&workflows_dir).map_err(|e| cuenv_core::Error::Io {
source: e,
path: Some(workflows_dir.clone().into_boxed_path()),
operation: "create directory".to_string(),
})?;
}
std::fs::write(&workflow_path, content).map_err(|e| cuenv_core::Error::Io {
source: e,
path: Some(workflow_path.clone().into_boxed_path()),
operation: "write workflow file".to_string(),
})?;
if exists {
output_lines.push(format!("GitHub: Updated {filename}"));
} else {
output_lines.push(format!("GitHub: Created {filename}"));
}
}
}
Ok(output_lines.join("\n"))
}
struct PipelineContext {
is_release: bool,
mode: cuenv_core::ci::PipelineMode,
github_config: cuenv_github::config::GitHubConfig,
trigger: cuenv_ci::ir::TriggerCondition,
project_name: Option<String>,
project_path: Option<String>,
environment: Option<String>,
runtimes: Vec<cuenv_ci::ir::Runtime>,
tasks: Vec<cuenv_ci::ir::Task>,
pipeline_tasks: Vec<cuenv_core::ci::PipelineTask>,
}
impl PipelineContext {
fn to_ir(&self, pipeline_name: &str) -> cuenv_ci::ir::IntermediateRepresentation {
cuenv_ci::ir::IntermediateRepresentation {
version: "1.5".to_string(),
pipeline: cuenv_ci::ir::PipelineMetadata {
name: pipeline_name.to_string(),
mode: self.mode,
environment: self.environment.clone(),
requires_onepassword: false,
project_name: self.project_name.clone(),
project_path: self.project_path.clone(),
trigger: Some(self.trigger.clone()),
pipeline_tasks: self
.pipeline_tasks
.iter()
.map(|t| t.task_name().to_string())
.collect(),
pipeline_task_defs: self.pipeline_tasks.clone(),
},
runtimes: self.runtimes.clone(),
tasks: self.tasks.clone(),
}
}
fn regular_tasks(&self) -> Vec<&cuenv_ci::ir::Task> {
self.tasks.iter().filter(|t| t.phase.is_none()).collect()
}
}
fn has_matrix_tasks(pipeline_tasks: &[cuenv_core::ci::PipelineTask]) -> bool {
pipeline_tasks
.iter()
.any(cuenv_core::ci::PipelineTask::has_matrix_dimensions)
}
fn generate_github_workflow_for_project(
project: &ProjectInfo,
pipeline_name: &str,
pipeline: &cuenv_core::ci::Pipeline,
) -> Result<Vec<(String, String)>> {
use cuenv_core::ci::PipelineMode;
let ctx = build_project_pipeline_context(project, pipeline_name, pipeline)?;
match ctx.mode {
PipelineMode::Thin => {
if has_matrix_tasks(&ctx.pipeline_tasks) {
emit_matrix_workflow(pipeline_name, &ctx)
} else {
emit_thin_workflow(pipeline_name, &ctx)
}
}
PipelineMode::Expanded => {
if has_matrix_tasks(&ctx.pipeline_tasks) {
emit_matrix_workflow(pipeline_name, &ctx)
} else if ctx.is_release {
emit_release_workflow(pipeline_name, &ctx)
} else if ctx.tasks.is_empty() {
Ok(Vec::new())
} else {
emit_standard_workflow(pipeline_name, &ctx)
}
}
}
}
fn build_project_pipeline_context(
project: &ProjectInfo,
pipeline_name: &str,
pipeline: &cuenv_core::ci::Pipeline,
) -> Result<PipelineContext> {
use cuenv_ci::compiler::{Compiler, CompilerOptions};
let ci = project
.config
.ci
.as_ref()
.ok_or_else(|| cuenv_core::Error::configuration("Project has no CI configuration"))?;
let is_release = pipeline.when.as_ref().is_some_and(|w| w.release.is_some());
let project_path_for_compiler = if project.relative_path.as_os_str().is_empty() {
None
} else {
Some(project.relative_path.to_string_lossy().to_string())
};
let options = CompilerOptions {
pipeline_name: Some(pipeline_name.to_string()),
pipeline: Some(pipeline.clone()),
ci_mode: true,
module_root: Some(project.module_root.clone()),
project_path: project_path_for_compiler.clone(),
..Default::default()
};
let compiler = Compiler::with_options(project.config.clone(), options);
let ir = compiler
.compile()
.map_err(|e| cuenv_core::Error::configuration(format!("Failed to compile project: {e}")))?;
let pipeline_task_names: Vec<String> = pipeline
.tasks
.iter()
.map(|t| t.task_name().to_string())
.collect();
let filtered_tasks = cuenv_ci::pipeline::filter_tasks(&pipeline_task_names, ir.tasks.clone());
let phase_tasks: Vec<cuenv_ci::ir::Task> =
ir.tasks.into_iter().filter(|t| t.phase.is_some()).collect();
let mut all_tasks = phase_tasks;
all_tasks.extend(filtered_tasks);
let trigger = ir
.pipeline
.trigger
.unwrap_or_else(|| build_github_trigger_condition(pipeline_name, pipeline, ci));
Ok(PipelineContext {
is_release,
mode: pipeline.mode,
github_config: ci.github_config_for_pipeline(pipeline_name),
trigger,
project_name: Some(project.config.name.clone()),
project_path: project_path_for_compiler,
environment: pipeline.environment.clone(),
runtimes: ir.runtimes,
tasks: all_tasks,
pipeline_tasks: pipeline.tasks.clone(),
})
}
fn emit_release_workflow(
pipeline_name: &str,
ctx: &PipelineContext,
) -> Result<Vec<(String, String)>> {
use cuenv_github::workflow::{GitHubActionsEmitter, ReleaseWorkflowBuilder};
let ir = ctx.to_ir(pipeline_name);
let emitter = GitHubActionsEmitter::from_config(&ctx.github_config).with_nix();
let workflow = ReleaseWorkflowBuilder::new(emitter).build(&ir);
let workflow_name = match &ir.pipeline.project_name {
Some(project) => format!("{project}-{}", ir.pipeline.name),
None => ir.pipeline.name.clone(),
};
let filename = format!("{}.yml", sanitize_workflow_name(&workflow_name));
let yaml = workflow.to_yaml().map_err(|e| {
cuenv_core::Error::configuration(format!("Failed to serialize workflow: {e}"))
})?;
Ok(vec![(filename, yaml)])
}
fn emit_thin_workflow(pipeline_name: &str, ctx: &PipelineContext) -> Result<Vec<(String, String)>> {
use cuenv_github::workflow::GitHubActionsEmitter;
let ir = ctx.to_ir(pipeline_name);
let emitter = GitHubActionsEmitter::from_config(&ctx.github_config).with_nix();
let (filename, yaml) = emitter.emit_thin_workflow(&ir).map_err(|e| {
cuenv_core::Error::configuration(format!("Failed to emit thin workflow: {e}"))
})?;
Ok(vec![(filename, yaml)])
}
fn runner_key(runs_on: &cuenv_github::workflow::schema::RunsOn) -> String {
match runs_on {
cuenv_github::workflow::schema::RunsOn::Label(label) => format!("label:{label}"),
cuenv_github::workflow::schema::RunsOn::Labels(labels) => {
format!("labels:{}", labels.join("|"))
}
}
}
fn runner_suffix(runs_on: &cuenv_github::workflow::schema::RunsOn) -> String {
let raw = match runs_on {
cuenv_github::workflow::schema::RunsOn::Label(label) => label.clone(),
cuenv_github::workflow::schema::RunsOn::Labels(labels) => labels.join("-"),
};
raw.to_lowercase()
.replace(['.', ' '], "-")
.chars()
.filter(|c| c.is_alphanumeric() || *c == '-' || *c == '_')
.collect()
}
fn prepend_need(job: &mut cuenv_github::workflow::schema::Job, dependency: &str) {
if job.needs.iter().any(|need| need == dependency) {
return;
}
let mut needs = Vec::with_capacity(job.needs.len() + 1);
needs.push(dependency.to_string());
needs.extend(job.needs.clone());
job.needs = needs;
}
fn inject_cuenv_bootstrap_jobs(
jobs: &mut indexmap::IndexMap<String, cuenv_github::workflow::schema::Job>,
ir: &cuenv_ci::ir::IntermediateRepresentation,
emitter: &cuenv_github::workflow::GitHubActionsEmitter,
) {
use cuenv_github::workflow::schema::RunsOn;
use indexmap::IndexMap;
if jobs.is_empty() {
return;
}
let mut runners = IndexMap::<String, RunsOn>::new();
for job in jobs.values() {
runners
.entry(runner_key(&job.runs_on))
.or_insert_with(|| job.runs_on.clone());
}
let multiple_runners = runners.len() > 1;
let mut runner_bootstrap_jobs = IndexMap::<String, String>::new();
let mut bootstrap_jobs = IndexMap::<String, cuenv_github::workflow::schema::Job>::new();
for (key, runs_on) in runners {
let (bootstrap_job_id, display_name) = if multiple_runners {
let suffix = runner_suffix(&runs_on);
(
format!("build-cuenv-{suffix}"),
format!("build.cuenv ({suffix})"),
)
} else {
("build-cuenv".to_string(), "build.cuenv".to_string())
};
let Some(job) = emitter.build_cuenv_bootstrap_job(ir, runs_on, &display_name) else {
return;
};
runner_bootstrap_jobs.insert(key, bootstrap_job_id.clone());
bootstrap_jobs.insert(bootstrap_job_id, job);
}
for job in jobs.values_mut() {
if let Some(bootstrap_job_id) = runner_bootstrap_jobs.get(&runner_key(&job.runs_on)) {
prepend_need(job, bootstrap_job_id);
}
}
let existing_jobs = std::mem::take(jobs);
jobs.extend(bootstrap_jobs);
jobs.extend(existing_jobs);
}
fn simple_job_options<'a>(
ctx: &'a PipelineContext,
task: &cuenv_ci::ir::Task,
) -> cuenv_github::workflow::SimpleJobOptions<'a> {
let is_direct_nix_job =
!ctx.is_release && task.command.first().is_some_and(|command| command == "nix");
if is_direct_nix_job {
cuenv_github::workflow::SimpleJobOptions::direct(ctx.project_path.as_deref())
} else {
cuenv_github::workflow::SimpleJobOptions::orchestrated(
ctx.environment.as_ref(),
ctx.project_path.as_deref(),
)
}
}
fn emit_standard_workflow(
pipeline_name: &str,
ctx: &PipelineContext,
) -> Result<Vec<(String, String)>> {
use cuenv_github::workflow::GitHubActionsEmitter;
use cuenv_github::workflow::schema::{Concurrency, Workflow};
use indexmap::IndexMap;
let workflow_name = match &ctx.project_name {
Some(project) => format!("{project}-{pipeline_name}"),
None => pipeline_name.to_string(),
};
let ir = ctx.to_ir(pipeline_name);
let emitter = GitHubActionsEmitter::from_config(&ctx.github_config).with_nix();
let mut jobs = IndexMap::new();
for task in ctx.regular_tasks() {
let mut job = emitter.build_simple_job(task, &ir, simple_job_options(ctx, task));
job.needs = task
.depends_on
.iter()
.map(|d| d.replace(['.', ' '], "-"))
.collect();
jobs.insert(task.id.replace(['.', ' '], "-"), job);
}
inject_cuenv_bootstrap_jobs(&mut jobs, &ir, &emitter);
let filename = format!("{}.yml", sanitize_workflow_name(&workflow_name));
let workflow = Workflow {
name: workflow_name.clone(),
on: emitter.build_triggers(&ir, &filename),
concurrency: Some(Concurrency {
group: "${{ github.workflow }}-${{ github.head_ref || github.ref }}".to_string(),
cancel_in_progress: Some(true),
}),
permissions: Some(emitter.build_permissions(&ir)),
env: IndexMap::new(),
jobs,
};
let yaml = workflow.to_yaml().map_err(|e| {
cuenv_core::Error::configuration(format!("Failed to serialize workflow: {e}"))
})?;
Ok(vec![(filename, yaml)])
}
fn build_pipeline_jobs(
expanded_tasks: &[cuenv_core::ci::PipelineTask],
ctx: &PipelineContext,
ir: &cuenv_ci::ir::IntermediateRepresentation,
emitter: &cuenv_github::workflow::GitHubActionsEmitter,
) -> indexmap::IndexMap<String, cuenv_github::workflow::schema::Job> {
use indexmap::IndexMap;
let mut jobs = IndexMap::new();
let mut artifact_source_jobs: HashSet<String> = HashSet::new();
let mut processed_task_names: HashSet<String> = HashSet::new();
for pipeline_task in expanded_tasks {
let task_name = pipeline_task.task_name();
processed_task_names.insert(task_name.to_string());
let job_id = task_name.replace(['.', ' '], "-");
match pipeline_task {
cuenv_core::ci::PipelineTask::Simple(_) | cuenv_core::ci::PipelineTask::Node(_) => {
if let Some(ir_task) = ctx.tasks.iter().find(|t| t.id == task_name) {
let mut job =
emitter.build_simple_job(ir_task, ir, simple_job_options(ctx, ir_task));
job.needs = ir_task
.depends_on
.iter()
.map(|dep| dep.replace(['.', ' '], "-"))
.collect();
jobs.insert(job_id, job);
}
}
cuenv_core::ci::PipelineTask::Matrix(matrix_task) => {
if matrix_task.matrix.is_empty() {
let ir_task = ctx.tasks.iter().find(|t| t.id == task_name);
let mut seen: HashSet<String> = artifact_source_jobs.clone();
let mut combined_needs: Vec<String> =
artifact_source_jobs.iter().cloned().collect();
if let Some(ir_task) = ir_task {
for dep in &ir_task.depends_on {
let dep_job_id = dep.replace(['.', ' '], "-");
if seen.insert(dep_job_id.clone()) {
combined_needs.push(dep_job_id);
}
}
}
combined_needs.sort();
let synthetic_task = create_synthetic_aggregation_task(task_name, matrix_task);
let job = emitter.build_artifact_aggregation_job(
&synthetic_task,
ir,
ctx.environment.as_ref(),
&combined_needs,
ctx.project_path.as_deref(),
);
jobs.insert(job_id, job);
} else {
let ir_task = ctx.tasks.iter().find(|t| t.id == task_name);
let outputs = ir_task.map(|t| t.outputs.clone()).unwrap_or_default();
let synthetic_task =
create_synthetic_matrix_task(task_name, matrix_task, outputs);
let arch_runners = ctx
.github_config
.runners
.as_ref()
.and_then(|r| r.arch.clone());
let expanded_jobs = emitter.build_matrix_jobs(
&synthetic_task,
ir,
ctx.environment.as_ref(),
arch_runners.as_ref(),
&[],
ctx.project_path.as_deref(),
);
for (id, job) in expanded_jobs {
artifact_source_jobs.insert(id.clone());
jobs.insert(id, job);
}
}
}
}
}
for ir_task in &ctx.tasks {
if ir_task.phase.is_some() {
continue;
}
if processed_task_names.contains(&ir_task.id) {
continue;
}
let job_id = ir_task.id.replace(['.', ' '], "-");
if jobs.contains_key(&job_id) {
continue;
}
let mut job = emitter.build_simple_job(ir_task, ir, simple_job_options(ctx, ir_task));
job.needs = ir_task
.depends_on
.iter()
.map(|dep| dep.replace(['.', ' '], "-"))
.collect();
jobs.insert(job_id, job);
}
jobs
}
fn create_synthetic_aggregation_task(
task_name: &str,
matrix_task: &cuenv_core::ci::MatrixTask,
) -> cuenv_ci::ir::Task {
use cuenv_ci::ir::{ArtifactDownload, CachePolicy, Task};
let artifact_downloads = matrix_task
.artifacts
.as_ref()
.map(|artifacts| {
artifacts
.iter()
.map(|a| ArtifactDownload {
name: a.from.replace('.', "-"),
path: a.to.clone(),
filter: String::new(),
})
.collect()
})
.unwrap_or_default();
let params: BTreeMap<String, String> = matrix_task
.params
.clone()
.unwrap_or_default()
.into_iter()
.collect();
Task {
id: task_name.to_string(),
runtime: None,
command: vec![],
shell: false,
env: BTreeMap::new(),
secrets: BTreeMap::new(),
resources: None,
concurrency_group: None,
inputs: vec![],
outputs: vec![],
depends_on: vec![],
cache_policy: CachePolicy::Normal,
deployment: false,
manual_approval: false,
matrix: None,
artifact_downloads,
params,
phase: None,
label: None,
priority: None,
contributor: None,
condition: None,
provider_hints: None,
}
}
fn create_synthetic_matrix_task(
task_name: &str,
matrix_task: &cuenv_core::ci::MatrixTask,
outputs: Vec<cuenv_ci::ir::OutputDeclaration>,
) -> cuenv_ci::ir::Task {
use cuenv_ci::ir::{CachePolicy, MatrixConfig, Task};
let dimensions: BTreeMap<String, Vec<String>> = matrix_task
.matrix
.iter()
.map(|(k, v)| {
let mut sorted_values = v.clone();
sorted_values.sort();
(k.clone(), sorted_values)
})
.collect();
let matrix = MatrixConfig {
dimensions,
exclude: vec![],
include: vec![],
max_parallel: 0,
fail_fast: true,
};
Task {
id: task_name.to_string(),
runtime: None,
command: vec![],
shell: false,
env: BTreeMap::new(),
secrets: BTreeMap::new(),
resources: None,
concurrency_group: None,
inputs: vec![],
outputs,
depends_on: vec![],
cache_policy: CachePolicy::Normal,
deployment: false,
manual_approval: false,
matrix: Some(matrix),
artifact_downloads: vec![],
params: BTreeMap::new(),
phase: None,
label: None,
priority: None,
contributor: None,
condition: None,
provider_hints: None,
}
}
fn emit_matrix_workflow(
pipeline_name: &str,
ctx: &PipelineContext,
) -> Result<Vec<(String, String)>> {
use cuenv_github::workflow::GitHubActionsEmitter;
use cuenv_github::workflow::schema::{Concurrency, Workflow};
let workflow_name = match &ctx.project_name {
Some(project) => format!("{project}-{pipeline_name}"),
None => pipeline_name.to_string(),
};
let ir = ctx.to_ir(pipeline_name);
let emitter = GitHubActionsEmitter::from_config(&ctx.github_config).with_nix();
let explicit_task_names: HashSet<String> = ctx
.pipeline_tasks
.iter()
.map(|pt| pt.task_name().to_string())
.collect();
let expanded_tasks = cuenv_ci::pipeline::expand_task_groups(
&ctx.pipeline_tasks,
&ctx.tasks,
&explicit_task_names,
);
let jobs = build_pipeline_jobs(&expanded_tasks, ctx, &ir, &emitter);
let mut jobs = jobs;
inject_cuenv_bootstrap_jobs(&mut jobs, &ir, &emitter);
let filename = format!("{}.yml", sanitize_workflow_name(&workflow_name));
let workflow = Workflow {
name: workflow_name.clone(),
on: emitter.build_triggers(&ir, &filename),
concurrency: Some(Concurrency {
group: "${{ github.workflow }}-${{ github.head_ref || github.ref }}".to_string(),
cancel_in_progress: Some(true),
}),
permissions: Some(emitter.build_permissions(&ir)),
env: indexmap::IndexMap::new(),
jobs,
};
let yaml = workflow.to_yaml().map_err(|e| {
cuenv_core::Error::configuration(format!("Failed to serialize workflow: {e}"))
})?;
Ok(vec![(filename, yaml)])
}
fn sanitize_workflow_name(name: &str) -> String {
name.to_lowercase()
.replace(' ', "-")
.chars()
.filter(|c| c.is_alphanumeric() || *c == '-' || *c == '_')
.collect()
}
fn build_github_trigger_condition(
_pipeline_name: &str,
pipeline: &cuenv_core::ci::Pipeline,
_ci_config: &cuenv_core::ci::CI,
) -> cuenv_ci::ir::TriggerCondition {
use cuenv_ci::ir::{ManualTriggerConfig, TriggerCondition, WorkflowDispatchInputDef};
use cuenv_core::ci::ManualTrigger;
let when = pipeline.when.as_ref();
let branches = when
.and_then(|w| w.branch.as_ref())
.map(cuenv_core::ci::StringOrVec::to_vec)
.unwrap_or_default();
let pull_request = when.and_then(|w| w.pull_request);
let scheduled = when
.and_then(|w| w.scheduled.as_ref())
.map(cuenv_core::ci::StringOrVec::to_vec)
.unwrap_or_default();
let release = when.and_then(|w| w.release.clone()).unwrap_or_default();
let manual = when.and_then(|w| w.manual.as_ref()).map(|m| match m {
ManualTrigger::Enabled(enabled) => ManualTriggerConfig {
enabled: *enabled,
inputs: BTreeMap::new(),
},
ManualTrigger::WithInputs(inputs) => ManualTriggerConfig {
enabled: true,
inputs: inputs
.iter()
.map(|(k, v)| {
(
k.clone(),
WorkflowDispatchInputDef {
description: v.description.clone(),
required: v.required.unwrap_or(false),
default: v.default.clone(),
input_type: v.input_type.clone(),
options: v.options.clone().unwrap_or_default(),
},
)
})
.collect(),
},
});
TriggerCondition {
branches,
pull_request,
scheduled,
release,
manual,
paths: Vec::new(),
}
}
#[instrument(name = "sync_buildkite", skip_all)]
fn execute_sync_buildkite(request: &BuildkiteSyncRequest<'_>) -> Result<String> {
let BuildkiteSyncRequest { repo_root, options } = *request;
let pipeline_content = r#"# Buildkite bootstrap pipeline for cuenv
# This installs Nix, builds cuenv, then generates a dynamic pipeline
steps:
- label: ":nix: Install Nix"
key: install-nix
command: |
curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | sh -s -- install linux --no-confirm --init none
. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh
nix --version
- label: ":package: Build cuenv"
key: build-cuenv
depends_on: install-nix
command: |
. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh
nix build .#cuenv --accept-flake-config
echo "$(pwd)/result/bin" >> "$BUILDKITE_ENV_FILE"
- label: ":pipeline: Generate Pipeline"
depends_on: build-cuenv
command: cuenv ci --dynamic buildkite | buildkite-agent pipeline upload
"#;
let buildkite_dir = repo_root.join(".buildkite");
let pipeline_path = buildkite_dir.join("pipeline.yml");
if options.check {
if pipeline_path.exists() {
let existing = std::fs::read_to_string(&pipeline_path).unwrap_or_default();
if existing == pipeline_content {
return Ok("Buildkite: pipeline.yml in sync".to_string());
}
return Err(cuenv_core::Error::configuration(
"Buildkite pipeline.yml out of sync. Run 'cuenv sync ci --provider buildkite' to update.",
));
}
return Err(cuenv_core::Error::configuration(
"Buildkite pipeline.yml missing. Run 'cuenv sync ci --provider buildkite' to create.",
));
}
let exists = pipeline_path.exists();
if exists && !options.dry_run.is_dry_run() {
let existing = std::fs::read_to_string(&pipeline_path).unwrap_or_default();
if existing == pipeline_content {
return Ok("Buildkite: pipeline.yml (unchanged)".to_string());
}
}
if options.dry_run.is_dry_run() {
if exists {
return Ok("Buildkite: Would update pipeline.yml".to_string());
}
return Ok("Buildkite: Would create pipeline.yml".to_string());
}
if !buildkite_dir.exists() {
std::fs::create_dir_all(&buildkite_dir).map_err(|e| cuenv_core::Error::Io {
source: e,
path: Some(buildkite_dir.clone().into_boxed_path()),
operation: "create directory".to_string(),
})?;
}
std::fs::write(&pipeline_path, pipeline_content).map_err(|e| cuenv_core::Error::Io {
source: e,
path: Some(pipeline_path.clone().into_boxed_path()),
operation: "write pipeline file".to_string(),
})?;
if exists {
Ok("Buildkite: Updated pipeline.yml".to_string())
} else {
Ok("Buildkite: Created pipeline.yml".to_string())
}
}
#[cfg(test)]
mod tests {
use super::*;
use cuenv_core::ci::{MatrixTask, PipelineTask, TaskRef};
use std::collections::BTreeMap;
#[test]
fn test_has_matrix_tasks_empty() {
let tasks: Vec<PipelineTask> = vec![];
assert!(!has_matrix_tasks(&tasks));
}
#[test]
fn test_has_matrix_tasks_simple_only() {
let tasks = vec![
PipelineTask::Simple(TaskRef::from_name("build")),
PipelineTask::Simple(TaskRef::from_name("test")),
];
assert!(!has_matrix_tasks(&tasks));
}
#[test]
fn test_has_matrix_tasks_with_matrix() {
let mut matrix = BTreeMap::new();
matrix.insert(
"arch".to_string(),
vec!["linux-x64".to_string(), "darwin-arm64".to_string()],
);
let tasks = vec![PipelineTask::Matrix(MatrixTask {
task_type: Some("matrix".to_string()),
task: TaskRef::from_name("cargo.build"),
matrix,
artifacts: None,
params: None,
})];
assert!(has_matrix_tasks(&tasks));
}
#[test]
fn test_has_matrix_tasks_aggregation_only() {
let tasks = vec![PipelineTask::Matrix(MatrixTask {
task_type: Some("matrix".to_string()),
task: TaskRef::from_name("publish"),
matrix: BTreeMap::new(),
artifacts: Some(vec![]),
params: None,
})];
assert!(!has_matrix_tasks(&tasks));
}
#[test]
fn test_has_matrix_tasks_mixed() {
let mut matrix = BTreeMap::new();
matrix.insert("arch".to_string(), vec!["linux-x64".to_string()]);
let tasks = vec![
PipelineTask::Simple(TaskRef::from_name("check")),
PipelineTask::Matrix(MatrixTask {
task_type: Some("matrix".to_string()),
task: TaskRef::from_name("build"),
matrix,
artifacts: None,
params: None,
}),
PipelineTask::Simple(TaskRef::from_name("deploy")),
];
assert!(has_matrix_tasks(&tasks));
}
}