1use indexmap::IndexMap;
34use serde::Deserialize;
35
36use crate::{Job, JobStatus, Workflow};
37
38#[derive(Debug, Deserialize)]
40pub struct WorkflowDef {
41 pub name: String,
42 #[serde(rename = "on")]
43 pub trigger: TriggerDef,
44 #[serde(default)]
45 pub env: IndexMap<String, String>,
46 pub jobs: IndexMap<String, JobDef>,
47}
48
49#[derive(Debug, Deserialize)]
51#[serde(untagged)]
52pub enum TriggerDef {
53 Simple(String),
54 List(Vec<String>),
55 Structured(IndexMap<String, serde_yaml::Value>),
56}
57
58impl TriggerDef {
59 pub fn display(&self) -> String {
60 match self {
61 TriggerDef::Simple(s) => format!("on: {s}"),
62 TriggerDef::List(v) => format!("on: [{}]", v.join(", ")),
63 TriggerDef::Structured(m) => {
64 let keys: Vec<&str> = m.keys().map(|k| k.as_str()).collect();
65 format!("on: [{}]", keys.join(", "))
66 }
67 }
68 }
69}
70
71#[derive(Debug, Deserialize)]
73pub struct JobDef {
74 pub name: Option<String>,
76 #[serde(default)]
78 pub needs: Needs,
79 pub run: Option<String>,
81 #[serde(default)]
83 pub steps: Vec<StepDef>,
84 #[serde(default)]
86 pub env: IndexMap<String, String>,
87 pub timeout: Option<u64>,
89 #[serde(rename = "if")]
91 pub condition: Option<String>,
92 #[serde(default)]
94 pub labels: Vec<String>,
95 #[serde(default)]
97 pub retries: u32,
98}
99
100#[derive(Debug, Default, Deserialize)]
102#[serde(untagged)]
103pub enum Needs {
104 #[default]
105 None,
106 Single(String),
107 List(Vec<String>),
108}
109
110impl Needs {
111 pub fn to_vec(&self) -> Vec<String> {
112 match self {
113 Needs::None => vec![],
114 Needs::Single(s) => vec![s.clone()],
115 Needs::List(v) => v.clone(),
116 }
117 }
118}
119
120#[derive(Debug, Deserialize)]
122pub struct StepDef {
123 pub id: Option<String>,
124 pub name: Option<String>,
125 pub run: Option<String>,
126 #[serde(rename = "if")]
127 pub condition: Option<String>,
128 #[serde(default)]
129 pub env: IndexMap<String, String>,
130}
131
132impl WorkflowDef {
133 pub fn from_yaml(yaml: &str) -> Result<Self, String> {
135 serde_yaml::from_str(yaml).map_err(|e| format!("YAML parse error: {e}"))
136 }
137
138 pub fn from_json(json: &str) -> Result<Self, String> {
140 serde_json::from_str(json).map_err(|e| format!("JSON parse error: {e}"))
141 }
142
143 pub fn parse(input: &str) -> Result<Self, String> {
145 let trimmed = input.trim_start();
146 if trimmed.starts_with('{') {
147 Self::from_json(input)
148 } else {
149 Self::from_yaml(input)
150 }
151 }
152
153 pub fn from_file_contents(contents: &str, filename: &str) -> Result<Self, String> {
155 if filename.ends_with(".json") {
156 Self::from_json(contents)
157 } else if filename.ends_with(".yml") || filename.ends_with(".yaml") {
158 Self::from_yaml(contents)
159 } else {
160 Self::parse(contents)
161 }
162 }
163
164 pub fn into_workflow(self, id: &str) -> Result<Workflow, String> {
169 let trigger = self.trigger.display();
170 let mut jobs = Vec::with_capacity(self.jobs.len());
171
172 for (job_id, job_def) in &self.jobs {
173 let name = job_def.name.clone().unwrap_or_else(|| job_id.clone());
174
175 let command = build_command(job_def, &self.env)?;
176 let depends_on = job_def.needs.to_vec();
177
178 for dep in &depends_on {
180 if !self.jobs.contains_key(dep) {
181 return Err(format!(
182 "Job '{job_id}' depends on '{dep}', which doesn't exist"
183 ));
184 }
185 }
186
187 jobs.push(Job {
188 id: job_id.clone(),
189 name,
190 status: JobStatus::Queued,
191 command,
192 duration_secs: None,
193 started_at: None,
194 depends_on,
195 output: None,
196 required_labels: job_def.labels.clone(),
197 max_retries: job_def.retries,
198 attempt: 0,
199 metadata: std::collections::HashMap::new(),
200 ports: vec![],
201 children: None,
202 collapsed: false,
203 });
204 }
205
206 Ok(Workflow {
207 id: id.to_string(),
208 name: self.name,
209 trigger,
210 jobs,
211 })
212 }
213}
214
215fn build_command(job: &JobDef, global_env: &IndexMap<String, String>) -> Result<String, String> {
217 let mut env_exports = Vec::new();
219 for (k, v) in global_env {
220 env_exports.push(format!("export {k}={}", shell_quote(v)));
221 }
222 for (k, v) in &job.env {
223 env_exports.push(format!("export {k}={}", shell_quote(v)));
224 }
225
226 let commands = if !job.steps.is_empty() {
227 let step_cmds: Result<Vec<String>, String> = job
229 .steps
230 .iter()
231 .enumerate()
232 .filter_map(|(i, step)| {
233 step.run.as_ref().map(|cmd| {
234 let mut parts = Vec::new();
235 for (k, v) in &step.env {
237 parts.push(format!("export {k}={}", shell_quote(v)));
238 }
239 let default_label = format!("step {}", i + 1);
240 let label = step
241 .name
242 .as_deref()
243 .or(step.id.as_deref())
244 .unwrap_or(&default_label);
245 parts.push(format!("echo '=== {label} ==='"));
246 parts.push(cmd.trim().to_string());
247 Ok(parts.join(" && "))
248 })
249 })
250 .collect();
251 step_cmds?
252 } else if let Some(run) = &job.run {
253 vec![run.trim().to_string()]
254 } else {
255 return Err("Job must have either 'run' or 'steps'".to_string());
256 };
257
258 let mut full = env_exports;
259 full.extend(commands);
260 Ok(full.join(" && "))
261}
262
263fn shell_quote(s: &str) -> String {
264 format!("'{}'", s.replace('\'', "'\\''"))
265}
266
267#[cfg(test)]
268mod tests {
269 use super::*;
270
271 #[test]
272 fn parse_simple_workflow() {
273 let yaml = r#"
274name: CI
275on: push
276
277jobs:
278 lint:
279 name: Lint
280 run: cargo clippy
281
282 test:
283 name: Test
284 run: cargo test
285
286 build:
287 name: Build
288 needs: [lint, test]
289 run: cargo build --release
290"#;
291 let def = WorkflowDef::from_yaml(yaml).unwrap();
292 let wf = def.into_workflow("ci-1").unwrap();
293
294 assert_eq!(wf.name, "CI");
295 assert_eq!(wf.trigger, "on: push");
296 assert_eq!(wf.jobs.len(), 3);
297 assert_eq!(wf.jobs[2].depends_on, vec!["lint", "test"]);
298 }
299
300 #[test]
301 fn parse_steps_workflow() {
302 let yaml = r#"
303name: Deploy
304on: push
305
306jobs:
307 deploy:
308 name: Deploy All
309 steps:
310 - name: Migrate DB
311 run: ./migrate.sh
312 - name: Deploy App
313 run: ./deploy.sh
314"#;
315 let def = WorkflowDef::from_yaml(yaml).unwrap();
316 let wf = def.into_workflow("deploy-1").unwrap();
317
318 assert_eq!(wf.jobs.len(), 1);
319 assert!(wf.jobs[0].command.contains("Migrate DB"));
320 assert!(wf.jobs[0].command.contains("./deploy.sh"));
321 }
322
323 #[test]
324 fn invalid_dependency_errors() {
325 let yaml = r#"
326name: Bad
327on: push
328
329jobs:
330 build:
331 needs: [nonexistent]
332 run: echo hi
333"#;
334 let def = WorkflowDef::from_yaml(yaml).unwrap();
335 let result = def.into_workflow("bad-1");
336 assert!(result.is_err());
337 assert!(result.unwrap_err().contains("nonexistent"));
338 }
339
340 #[test]
341 fn job_without_run_or_steps_errors() {
342 let yaml = r#"
343name: Bad
344on: push
345
346jobs:
347 empty:
348 name: Empty Job
349"#;
350 let def = WorkflowDef::from_yaml(yaml).unwrap();
351 let result = def.into_workflow("bad-2");
352 assert!(result.is_err());
353 assert!(
354 result
355 .unwrap_err()
356 .contains("must have either 'run' or 'steps'")
357 );
358 }
359
360 #[test]
361 fn empty_jobs_map() {
362 let yaml = r#"
363name: Empty
364on: push
365
366jobs: {}
367"#;
368 let def = WorkflowDef::from_yaml(yaml).unwrap();
369 let wf = def.into_workflow("empty-1").unwrap();
370 assert_eq!(wf.jobs.len(), 0);
371 }
372
373 #[test]
374 fn single_string_dependency() {
375 let yaml = r#"
376name: Single Dep
377on: push
378
379jobs:
380 a:
381 run: echo a
382 b:
383 needs: a
384 run: echo b
385"#;
386 let def = WorkflowDef::from_yaml(yaml).unwrap();
387 let wf = def.into_workflow("single-1").unwrap();
388 assert_eq!(wf.jobs[1].depends_on, vec!["a"]);
389 }
390
391 #[test]
392 fn special_characters_in_job_names() {
393 let yaml = r#"
394name: Special Chars
395on: push
396
397jobs:
398 build-linux_x86:
399 name: "Build (Linux x86_64)"
400 run: echo "building"
401"#;
402 let def = WorkflowDef::from_yaml(yaml).unwrap();
403 let wf = def.into_workflow("special-1").unwrap();
404 assert_eq!(wf.jobs[0].id, "build-linux_x86");
405 assert_eq!(wf.jobs[0].name, "Build (Linux x86_64)");
406 }
407
408 #[test]
409 fn labels_and_retries_parsed() {
410 let yaml = r#"
411name: Config
412on: push
413
414jobs:
415 deploy:
416 name: Deploy
417 run: ./deploy.sh
418 labels: [linux, aws]
419 retries: 3
420"#;
421 let def = WorkflowDef::from_yaml(yaml).unwrap();
422 let wf = def.into_workflow("config-1").unwrap();
423 assert_eq!(wf.jobs[0].required_labels, vec!["linux", "aws"]);
424 assert_eq!(wf.jobs[0].max_retries, 3);
425 }
426
427 #[test]
428 fn env_vars_in_command() {
429 let yaml = r#"
430name: Env
431on: push
432
433env:
434 GLOBAL: "value"
435
436jobs:
437 test:
438 run: echo test
439 env:
440 LOCAL: "local_value"
441"#;
442 let def = WorkflowDef::from_yaml(yaml).unwrap();
443 let wf = def.into_workflow("env-1").unwrap();
444 assert!(wf.jobs[0].command.contains("export GLOBAL="));
445 assert!(wf.jobs[0].command.contains("export LOCAL="));
446 }
447
448 #[test]
449 fn json_format_parsing() {
450 let json = r#"{
451 "name": "JSON Workflow",
452 "on": "push",
453 "jobs": {
454 "test": {
455 "run": "echo test"
456 }
457 }
458 }"#;
459 let def = WorkflowDef::from_json(json).unwrap();
460 let wf = def.into_workflow("json-1").unwrap();
461 assert_eq!(wf.name, "JSON Workflow");
462 assert_eq!(wf.jobs.len(), 1);
463 }
464
465 #[test]
466 fn malformed_yaml_returns_error() {
467 let yaml = "this is not valid yaml: [[[";
468 assert!(WorkflowDef::from_yaml(yaml).is_err());
469 }
470
471 #[test]
472 fn shell_quote_handles_single_quotes() {
473 let result = super::shell_quote("it's a test");
474 assert_eq!(result, "'it'\\''s a test'");
475 }
476}