1use indexmap::IndexMap;
34use serde::Deserialize;
35
36use crate::{Job, JobStatus, Workflow};
37
38#[derive(Debug, Deserialize)]
40pub struct WorkflowDef {
41 pub name: String,
42 #[serde(rename = "on")]
43 pub trigger: TriggerDef,
44 #[serde(default)]
45 pub env: IndexMap<String, String>,
46 pub jobs: IndexMap<String, JobDef>,
47}
48
49#[derive(Debug, Deserialize)]
51#[serde(untagged)]
52pub enum TriggerDef {
53 Simple(String),
54 List(Vec<String>),
55 Structured(IndexMap<String, serde_yaml::Value>),
56}
57
58impl TriggerDef {
59 pub fn display(&self) -> String {
60 match self {
61 TriggerDef::Simple(s) => format!("on: {s}"),
62 TriggerDef::List(v) => format!("on: [{}]", v.join(", ")),
63 TriggerDef::Structured(m) => {
64 let keys: Vec<&str> = m.keys().map(|k| k.as_str()).collect();
65 format!("on: [{}]", keys.join(", "))
66 }
67 }
68 }
69}
70
71#[derive(Debug, Deserialize)]
73pub struct JobDef {
74 pub name: Option<String>,
76 #[serde(default)]
78 pub needs: Needs,
79 pub run: Option<String>,
81 #[serde(default)]
83 pub steps: Vec<StepDef>,
84 #[serde(default)]
86 pub env: IndexMap<String, String>,
87 pub timeout: Option<u64>,
89 #[serde(rename = "if")]
91 pub condition: Option<String>,
92 #[serde(default)]
94 pub labels: Vec<String>,
95 #[serde(default)]
97 pub retries: u32,
98}
99
100#[derive(Debug, Default, Deserialize)]
102#[serde(untagged)]
103pub enum Needs {
104 #[default]
105 None,
106 Single(String),
107 List(Vec<String>),
108}
109
110impl Needs {
111 pub fn to_vec(&self) -> Vec<String> {
112 match self {
113 Needs::None => vec![],
114 Needs::Single(s) => vec![s.clone()],
115 Needs::List(v) => v.clone(),
116 }
117 }
118}
119
120#[derive(Debug, Deserialize)]
122pub struct StepDef {
123 pub id: Option<String>,
124 pub name: Option<String>,
125 pub run: Option<String>,
126 #[serde(rename = "if")]
127 pub condition: Option<String>,
128 #[serde(default)]
129 pub env: IndexMap<String, String>,
130}
131
132impl WorkflowDef {
133 pub fn from_yaml(yaml: &str) -> Result<Self, String> {
135 serde_yaml::from_str(yaml).map_err(|e| format!("YAML parse error: {e}"))
136 }
137
138 pub fn from_json(json: &str) -> Result<Self, String> {
140 serde_json::from_str(json).map_err(|e| format!("JSON parse error: {e}"))
141 }
142
143 pub fn parse(input: &str) -> Result<Self, String> {
145 let trimmed = input.trim_start();
146 if trimmed.starts_with('{') {
147 Self::from_json(input)
148 } else {
149 Self::from_yaml(input)
150 }
151 }
152
153 pub fn from_file_contents(contents: &str, filename: &str) -> Result<Self, String> {
155 if filename.ends_with(".json") {
156 Self::from_json(contents)
157 } else if filename.ends_with(".yml") || filename.ends_with(".yaml") {
158 Self::from_yaml(contents)
159 } else {
160 Self::parse(contents)
161 }
162 }
163
164 pub fn into_workflow(self, id: &str) -> Result<Workflow, String> {
169 let trigger = self.trigger.display();
170 let mut jobs = Vec::with_capacity(self.jobs.len());
171
172 for (job_id, job_def) in &self.jobs {
173 let name = job_def.name.clone().unwrap_or_else(|| job_id.clone());
174
175 let command = build_command(job_def, &self.env)?;
176 let depends_on = job_def.needs.to_vec();
177
178 for dep in &depends_on {
180 if !self.jobs.contains_key(dep) {
181 return Err(format!(
182 "Job '{job_id}' depends on '{dep}', which doesn't exist"
183 ));
184 }
185 }
186
187 jobs.push(Job {
188 id: job_id.clone(),
189 name,
190 status: JobStatus::Queued,
191 command,
192 duration_secs: None,
193 started_at: None,
194 depends_on,
195 output: None,
196 required_labels: job_def.labels.clone(),
197 max_retries: job_def.retries,
198 attempt: 0,
199 metadata: std::collections::HashMap::new(),
200 ports: vec![],
201 });
202 }
203
204 Ok(Workflow {
205 id: id.to_string(),
206 name: self.name,
207 trigger,
208 jobs,
209 })
210 }
211}
212
213fn build_command(job: &JobDef, global_env: &IndexMap<String, String>) -> Result<String, String> {
215 let mut env_exports = Vec::new();
217 for (k, v) in global_env {
218 env_exports.push(format!("export {k}={}", shell_quote(v)));
219 }
220 for (k, v) in &job.env {
221 env_exports.push(format!("export {k}={}", shell_quote(v)));
222 }
223
224 let commands = if !job.steps.is_empty() {
225 let step_cmds: Result<Vec<String>, String> = job
227 .steps
228 .iter()
229 .enumerate()
230 .filter_map(|(i, step)| {
231 step.run.as_ref().map(|cmd| {
232 let mut parts = Vec::new();
233 for (k, v) in &step.env {
235 parts.push(format!("export {k}={}", shell_quote(v)));
236 }
237 let default_label = format!("step {}", i + 1);
238 let label = step
239 .name
240 .as_deref()
241 .or(step.id.as_deref())
242 .unwrap_or(&default_label);
243 parts.push(format!("echo '=== {label} ==='"));
244 parts.push(cmd.trim().to_string());
245 Ok(parts.join(" && "))
246 })
247 })
248 .collect();
249 step_cmds?
250 } else if let Some(run) = &job.run {
251 vec![run.trim().to_string()]
252 } else {
253 return Err("Job must have either 'run' or 'steps'".to_string());
254 };
255
256 let mut full = env_exports;
257 full.extend(commands);
258 Ok(full.join(" && "))
259}
260
261fn shell_quote(s: &str) -> String {
262 format!("'{}'", s.replace('\'', "'\\''"))
263}
264
265#[cfg(test)]
266mod tests {
267 use super::*;
268
269 #[test]
270 fn parse_simple_workflow() {
271 let yaml = r#"
272name: CI
273on: push
274
275jobs:
276 lint:
277 name: Lint
278 run: cargo clippy
279
280 test:
281 name: Test
282 run: cargo test
283
284 build:
285 name: Build
286 needs: [lint, test]
287 run: cargo build --release
288"#;
289 let def = WorkflowDef::from_yaml(yaml).unwrap();
290 let wf = def.into_workflow("ci-1").unwrap();
291
292 assert_eq!(wf.name, "CI");
293 assert_eq!(wf.trigger, "on: push");
294 assert_eq!(wf.jobs.len(), 3);
295 assert_eq!(wf.jobs[2].depends_on, vec!["lint", "test"]);
296 }
297
298 #[test]
299 fn parse_steps_workflow() {
300 let yaml = r#"
301name: Deploy
302on: push
303
304jobs:
305 deploy:
306 name: Deploy All
307 steps:
308 - name: Migrate DB
309 run: ./migrate.sh
310 - name: Deploy App
311 run: ./deploy.sh
312"#;
313 let def = WorkflowDef::from_yaml(yaml).unwrap();
314 let wf = def.into_workflow("deploy-1").unwrap();
315
316 assert_eq!(wf.jobs.len(), 1);
317 assert!(wf.jobs[0].command.contains("Migrate DB"));
318 assert!(wf.jobs[0].command.contains("./deploy.sh"));
319 }
320
321 #[test]
322 fn invalid_dependency_errors() {
323 let yaml = r#"
324name: Bad
325on: push
326
327jobs:
328 build:
329 needs: [nonexistent]
330 run: echo hi
331"#;
332 let def = WorkflowDef::from_yaml(yaml).unwrap();
333 let result = def.into_workflow("bad-1");
334 assert!(result.is_err());
335 assert!(result.unwrap_err().contains("nonexistent"));
336 }
337
338 #[test]
339 fn job_without_run_or_steps_errors() {
340 let yaml = r#"
341name: Bad
342on: push
343
344jobs:
345 empty:
346 name: Empty Job
347"#;
348 let def = WorkflowDef::from_yaml(yaml).unwrap();
349 let result = def.into_workflow("bad-2");
350 assert!(result.is_err());
351 assert!(
352 result
353 .unwrap_err()
354 .contains("must have either 'run' or 'steps'")
355 );
356 }
357
358 #[test]
359 fn empty_jobs_map() {
360 let yaml = r#"
361name: Empty
362on: push
363
364jobs: {}
365"#;
366 let def = WorkflowDef::from_yaml(yaml).unwrap();
367 let wf = def.into_workflow("empty-1").unwrap();
368 assert_eq!(wf.jobs.len(), 0);
369 }
370
371 #[test]
372 fn single_string_dependency() {
373 let yaml = r#"
374name: Single Dep
375on: push
376
377jobs:
378 a:
379 run: echo a
380 b:
381 needs: a
382 run: echo b
383"#;
384 let def = WorkflowDef::from_yaml(yaml).unwrap();
385 let wf = def.into_workflow("single-1").unwrap();
386 assert_eq!(wf.jobs[1].depends_on, vec!["a"]);
387 }
388
389 #[test]
390 fn special_characters_in_job_names() {
391 let yaml = r#"
392name: Special Chars
393on: push
394
395jobs:
396 build-linux_x86:
397 name: "Build (Linux x86_64)"
398 run: echo "building"
399"#;
400 let def = WorkflowDef::from_yaml(yaml).unwrap();
401 let wf = def.into_workflow("special-1").unwrap();
402 assert_eq!(wf.jobs[0].id, "build-linux_x86");
403 assert_eq!(wf.jobs[0].name, "Build (Linux x86_64)");
404 }
405
406 #[test]
407 fn labels_and_retries_parsed() {
408 let yaml = r#"
409name: Config
410on: push
411
412jobs:
413 deploy:
414 name: Deploy
415 run: ./deploy.sh
416 labels: [linux, aws]
417 retries: 3
418"#;
419 let def = WorkflowDef::from_yaml(yaml).unwrap();
420 let wf = def.into_workflow("config-1").unwrap();
421 assert_eq!(wf.jobs[0].required_labels, vec!["linux", "aws"]);
422 assert_eq!(wf.jobs[0].max_retries, 3);
423 }
424
425 #[test]
426 fn env_vars_in_command() {
427 let yaml = r#"
428name: Env
429on: push
430
431env:
432 GLOBAL: "value"
433
434jobs:
435 test:
436 run: echo test
437 env:
438 LOCAL: "local_value"
439"#;
440 let def = WorkflowDef::from_yaml(yaml).unwrap();
441 let wf = def.into_workflow("env-1").unwrap();
442 assert!(wf.jobs[0].command.contains("export GLOBAL="));
443 assert!(wf.jobs[0].command.contains("export LOCAL="));
444 }
445
446 #[test]
447 fn json_format_parsing() {
448 let json = r#"{
449 "name": "JSON Workflow",
450 "on": "push",
451 "jobs": {
452 "test": {
453 "run": "echo test"
454 }
455 }
456 }"#;
457 let def = WorkflowDef::from_json(json).unwrap();
458 let wf = def.into_workflow("json-1").unwrap();
459 assert_eq!(wf.name, "JSON Workflow");
460 assert_eq!(wf.jobs.len(), 1);
461 }
462
463 #[test]
464 fn malformed_yaml_returns_error() {
465 let yaml = "this is not valid yaml: [[[";
466 assert!(WorkflowDef::from_yaml(yaml).is_err());
467 }
468
469 #[test]
470 fn shell_quote_handles_single_quotes() {
471 let result = super::shell_quote("it's a test");
472 assert_eq!(result, "'it'\\''s a test'");
473 }
474}