1use indexmap::IndexMap;
34use serde::Deserialize;
35
36use crate::{Job, JobStatus, Workflow};
37
38#[derive(Debug, Deserialize)]
40pub struct WorkflowDef {
41 pub name: String,
42 #[serde(rename = "on")]
43 pub trigger: TriggerDef,
44 #[serde(default)]
45 pub env: IndexMap<String, String>,
46 pub jobs: IndexMap<String, JobDef>,
47}
48
49#[derive(Debug, Deserialize)]
51#[serde(untagged)]
52pub enum TriggerDef {
53 Simple(String),
54 List(Vec<String>),
55 Structured(IndexMap<String, serde_yaml::Value>),
56}
57
58impl TriggerDef {
59 pub fn display(&self) -> String {
60 match self {
61 TriggerDef::Simple(s) => format!("on: {s}"),
62 TriggerDef::List(v) => format!("on: [{}]", v.join(", ")),
63 TriggerDef::Structured(m) => {
64 let keys: Vec<&str> = m.keys().map(|k| k.as_str()).collect();
65 format!("on: [{}]", keys.join(", "))
66 }
67 }
68 }
69}
70
71#[derive(Debug, Deserialize)]
73pub struct JobDef {
74 pub name: Option<String>,
76 #[serde(default)]
78 pub needs: Needs,
79 pub run: Option<String>,
81 #[serde(default)]
83 pub steps: Vec<StepDef>,
84 #[serde(default)]
86 pub env: IndexMap<String, String>,
87 pub timeout: Option<u64>,
89 #[serde(rename = "if")]
91 pub condition: Option<String>,
92 #[serde(default)]
94 pub labels: Vec<String>,
95 #[serde(default)]
97 pub retries: u32,
98}
99
100#[derive(Debug, Default, Deserialize)]
102#[serde(untagged)]
103pub enum Needs {
104 #[default]
105 None,
106 Single(String),
107 List(Vec<String>),
108}
109
110impl Needs {
111 pub fn to_vec(&self) -> Vec<String> {
112 match self {
113 Needs::None => vec![],
114 Needs::Single(s) => vec![s.clone()],
115 Needs::List(v) => v.clone(),
116 }
117 }
118}
119
120#[derive(Debug, Deserialize)]
122pub struct StepDef {
123 pub id: Option<String>,
124 pub name: Option<String>,
125 pub run: Option<String>,
126 #[serde(rename = "if")]
127 pub condition: Option<String>,
128 #[serde(default)]
129 pub env: IndexMap<String, String>,
130}
131
132impl WorkflowDef {
133 pub fn from_yaml(yaml: &str) -> Result<Self, String> {
135 serde_yaml::from_str(yaml).map_err(|e| format!("YAML parse error: {e}"))
136 }
137
138 pub fn from_json(json: &str) -> Result<Self, String> {
140 serde_json::from_str(json).map_err(|e| format!("JSON parse error: {e}"))
141 }
142
143 pub fn parse(input: &str) -> Result<Self, String> {
145 let trimmed = input.trim_start();
146 if trimmed.starts_with('{') {
147 Self::from_json(input)
148 } else {
149 Self::from_yaml(input)
150 }
151 }
152
153 pub fn from_file_contents(contents: &str, filename: &str) -> Result<Self, String> {
155 if filename.ends_with(".json") {
156 Self::from_json(contents)
157 } else if filename.ends_with(".yml") || filename.ends_with(".yaml") {
158 Self::from_yaml(contents)
159 } else {
160 Self::parse(contents)
161 }
162 }
163
164 pub fn into_workflow(self, id: &str) -> Result<Workflow, String> {
169 let trigger = self.trigger.display();
170 let mut jobs = Vec::with_capacity(self.jobs.len());
171
172 for (job_id, job_def) in &self.jobs {
173 let name = job_def.name.clone().unwrap_or_else(|| job_id.clone());
174
175 let command = build_command(job_def, &self.env)?;
176 let depends_on = job_def.needs.to_vec();
177
178 for dep in &depends_on {
180 if !self.jobs.contains_key(dep) {
181 return Err(format!(
182 "Job '{job_id}' depends on '{dep}', which doesn't exist"
183 ));
184 }
185 }
186
187 jobs.push(Job {
188 id: job_id.clone(),
189 name,
190 status: JobStatus::Queued,
191 command,
192 duration_secs: None,
193 started_at: None,
194 depends_on,
195 output: None,
196 required_labels: job_def.labels.clone(),
197 max_retries: job_def.retries,
198 attempt: 0,
199 metadata: std::collections::HashMap::new(),
200 });
201 }
202
203 Ok(Workflow {
204 id: id.to_string(),
205 name: self.name,
206 trigger,
207 jobs,
208 })
209 }
210}
211
212fn build_command(job: &JobDef, global_env: &IndexMap<String, String>) -> Result<String, String> {
214 let mut env_exports = Vec::new();
216 for (k, v) in global_env {
217 env_exports.push(format!("export {k}={}", shell_quote(v)));
218 }
219 for (k, v) in &job.env {
220 env_exports.push(format!("export {k}={}", shell_quote(v)));
221 }
222
223 let commands = if !job.steps.is_empty() {
224 let step_cmds: Result<Vec<String>, String> = job
226 .steps
227 .iter()
228 .enumerate()
229 .filter_map(|(i, step)| {
230 step.run.as_ref().map(|cmd| {
231 let mut parts = Vec::new();
232 for (k, v) in &step.env {
234 parts.push(format!("export {k}={}", shell_quote(v)));
235 }
236 let default_label = format!("step {}", i + 1);
237 let label = step
238 .name
239 .as_deref()
240 .or(step.id.as_deref())
241 .unwrap_or(&default_label);
242 parts.push(format!("echo '=== {label} ==='"));
243 parts.push(cmd.trim().to_string());
244 Ok(parts.join(" && "))
245 })
246 })
247 .collect();
248 step_cmds?
249 } else if let Some(run) = &job.run {
250 vec![run.trim().to_string()]
251 } else {
252 return Err("Job must have either 'run' or 'steps'".to_string());
253 };
254
255 let mut full = env_exports;
256 full.extend(commands);
257 Ok(full.join(" && "))
258}
259
260fn shell_quote(s: &str) -> String {
261 format!("'{}'", s.replace('\'', "'\\''"))
262}
263
264#[cfg(test)]
265mod tests {
266 use super::*;
267
268 #[test]
269 fn parse_simple_workflow() {
270 let yaml = r#"
271name: CI
272on: push
273
274jobs:
275 lint:
276 name: Lint
277 run: cargo clippy
278
279 test:
280 name: Test
281 run: cargo test
282
283 build:
284 name: Build
285 needs: [lint, test]
286 run: cargo build --release
287"#;
288 let def = WorkflowDef::from_yaml(yaml).unwrap();
289 let wf = def.into_workflow("ci-1").unwrap();
290
291 assert_eq!(wf.name, "CI");
292 assert_eq!(wf.trigger, "on: push");
293 assert_eq!(wf.jobs.len(), 3);
294 assert_eq!(wf.jobs[2].depends_on, vec!["lint", "test"]);
295 }
296
297 #[test]
298 fn parse_steps_workflow() {
299 let yaml = r#"
300name: Deploy
301on: push
302
303jobs:
304 deploy:
305 name: Deploy All
306 steps:
307 - name: Migrate DB
308 run: ./migrate.sh
309 - name: Deploy App
310 run: ./deploy.sh
311"#;
312 let def = WorkflowDef::from_yaml(yaml).unwrap();
313 let wf = def.into_workflow("deploy-1").unwrap();
314
315 assert_eq!(wf.jobs.len(), 1);
316 assert!(wf.jobs[0].command.contains("Migrate DB"));
317 assert!(wf.jobs[0].command.contains("./deploy.sh"));
318 }
319
320 #[test]
321 fn invalid_dependency_errors() {
322 let yaml = r#"
323name: Bad
324on: push
325
326jobs:
327 build:
328 needs: [nonexistent]
329 run: echo hi
330"#;
331 let def = WorkflowDef::from_yaml(yaml).unwrap();
332 let result = def.into_workflow("bad-1");
333 assert!(result.is_err());
334 assert!(result.unwrap_err().contains("nonexistent"));
335 }
336
337 #[test]
338 fn job_without_run_or_steps_errors() {
339 let yaml = r#"
340name: Bad
341on: push
342
343jobs:
344 empty:
345 name: Empty Job
346"#;
347 let def = WorkflowDef::from_yaml(yaml).unwrap();
348 let result = def.into_workflow("bad-2");
349 assert!(result.is_err());
350 assert!(
351 result
352 .unwrap_err()
353 .contains("must have either 'run' or 'steps'")
354 );
355 }
356
357 #[test]
358 fn empty_jobs_map() {
359 let yaml = r#"
360name: Empty
361on: push
362
363jobs: {}
364"#;
365 let def = WorkflowDef::from_yaml(yaml).unwrap();
366 let wf = def.into_workflow("empty-1").unwrap();
367 assert_eq!(wf.jobs.len(), 0);
368 }
369
370 #[test]
371 fn single_string_dependency() {
372 let yaml = r#"
373name: Single Dep
374on: push
375
376jobs:
377 a:
378 run: echo a
379 b:
380 needs: a
381 run: echo b
382"#;
383 let def = WorkflowDef::from_yaml(yaml).unwrap();
384 let wf = def.into_workflow("single-1").unwrap();
385 assert_eq!(wf.jobs[1].depends_on, vec!["a"]);
386 }
387
388 #[test]
389 fn special_characters_in_job_names() {
390 let yaml = r#"
391name: Special Chars
392on: push
393
394jobs:
395 build-linux_x86:
396 name: "Build (Linux x86_64)"
397 run: echo "building"
398"#;
399 let def = WorkflowDef::from_yaml(yaml).unwrap();
400 let wf = def.into_workflow("special-1").unwrap();
401 assert_eq!(wf.jobs[0].id, "build-linux_x86");
402 assert_eq!(wf.jobs[0].name, "Build (Linux x86_64)");
403 }
404
405 #[test]
406 fn labels_and_retries_parsed() {
407 let yaml = r#"
408name: Config
409on: push
410
411jobs:
412 deploy:
413 name: Deploy
414 run: ./deploy.sh
415 labels: [linux, aws]
416 retries: 3
417"#;
418 let def = WorkflowDef::from_yaml(yaml).unwrap();
419 let wf = def.into_workflow("config-1").unwrap();
420 assert_eq!(wf.jobs[0].required_labels, vec!["linux", "aws"]);
421 assert_eq!(wf.jobs[0].max_retries, 3);
422 }
423
424 #[test]
425 fn env_vars_in_command() {
426 let yaml = r#"
427name: Env
428on: push
429
430env:
431 GLOBAL: "value"
432
433jobs:
434 test:
435 run: echo test
436 env:
437 LOCAL: "local_value"
438"#;
439 let def = WorkflowDef::from_yaml(yaml).unwrap();
440 let wf = def.into_workflow("env-1").unwrap();
441 assert!(wf.jobs[0].command.contains("export GLOBAL="));
442 assert!(wf.jobs[0].command.contains("export LOCAL="));
443 }
444
445 #[test]
446 fn json_format_parsing() {
447 let json = r#"{
448 "name": "JSON Workflow",
449 "on": "push",
450 "jobs": {
451 "test": {
452 "run": "echo test"
453 }
454 }
455 }"#;
456 let def = WorkflowDef::from_json(json).unwrap();
457 let wf = def.into_workflow("json-1").unwrap();
458 assert_eq!(wf.name, "JSON Workflow");
459 assert_eq!(wf.jobs.len(), 1);
460 }
461
462 #[test]
463 fn malformed_yaml_returns_error() {
464 let yaml = "this is not valid yaml: [[[";
465 assert!(WorkflowDef::from_yaml(yaml).is_err());
466 }
467
468 #[test]
469 fn shell_quote_handles_single_quotes() {
470 let result = super::shell_quote("it's a test");
471 assert_eq!(result, "'it'\\''s a test'");
472 }
473}