Skip to main content

dk_runner/
runner.rs

1use std::collections::HashMap;
2use std::path::Path;
3use std::sync::Arc;
4use std::time::Duration;
5
6use anyhow::{Context, Result};
7use tokio::sync::mpsc;
8use tracing::info;
9use uuid::Uuid;
10
11use dk_engine::repo::Engine;
12
13use crate::executor::Executor;
14use crate::scheduler::{self, StepResult};
15use crate::workflow::parser::parse_yaml_workflow_file;
16use crate::workflow::types::{Stage, Step, StepType, Workflow};
17use crate::workflow::validator::validate_workflow;
18
19/// The top-level runner that loads workflows and executes them.
20pub struct Runner {
21    engine: Arc<Engine>,
22    executor: Box<dyn Executor>,
23}
24
25impl Runner {
26    pub fn new(engine: Arc<Engine>, executor: Box<dyn Executor>) -> Self {
27        Self { engine, executor }
28    }
29
30    /// Run a verification pipeline for a changeset.
31    pub async fn verify(
32        &self,
33        changeset_id: Uuid,
34        repo_name: &str,
35        tx: mpsc::Sender<StepResult>,
36    ) -> Result<bool> {
37        let (repo_id, repo_dir) = {
38            let (repo_id, git_repo) = self.engine.get_repo(repo_name).await?;
39            // GitRepository::path() already returns the working tree directory
40            (repo_id, git_repo.path().to_path_buf())
41        };
42
43        // Create a temp directory with the full repo content, then overlay
44        // changeset files so that cargo/build tools find Cargo.toml and
45        // all workspace metadata alongside the modified source files.
46        let changeset_data = self.engine.changeset_store().get_files(changeset_id).await?;
47        let temp_dir = tempfile::tempdir().context("failed to create temp dir for verify")?;
48        let work_dir = temp_dir.path().to_path_buf();
49
50        // Copy repo working tree into temp dir so Cargo.toml, Cargo.lock,
51        // and all other workspace files are present for build tools.
52        copy_dir_recursive(&repo_dir, &work_dir).await
53            .context("failed to copy repo into temp dir")?;
54
55        // Overlay changeset files on top of the repo copy.
56        let mut changeset_paths: Vec<String> = Vec::with_capacity(changeset_data.len());
57        for file in &changeset_data {
58            changeset_paths.push(file.file_path.clone());
59            if let Some(content) = &file.content {
60                // Security: reject dangerous paths BEFORE any filesystem operations.
61                // 1. Reject traversal components (../)
62                if file.file_path.contains("..") {
63                    anyhow::bail!(
64                        "changeset file path contains traversal component: '{}'",
65                        file.file_path
66                    );
67                }
68                // 2. Reject absolute paths (would discard work_dir base in Path::join)
69                if file.file_path.starts_with('/') || file.file_path.starts_with('\\') {
70                    anyhow::bail!(
71                        "changeset file path is absolute: '{}'",
72                        file.file_path
73                    );
74                }
75                let dest = work_dir.join(&file.file_path);
76                // 3. Lexical prefix check: verify joined path stays under work_dir.
77                //    This catches any remaining edge cases without touching the filesystem.
78                if !dest.starts_with(&work_dir) {
79                    anyhow::bail!(
80                        "changeset file path escapes sandbox: '{}' resolves outside work_dir",
81                        file.file_path
82                    );
83                }
84                // Safe to create directories and write — path is validated
85                if let Some(parent) = dest.parent() {
86                    tokio::fs::create_dir_all(parent).await?;
87                }
88                tokio::fs::write(&dest, content).await?;
89            }
90        }
91
92        info!(
93            "copied repo and overlaid {} changeset files into {} for verification",
94            changeset_paths.len(),
95            work_dir.display()
96        );
97
98        // Intentionally load the pipeline from the canonical repo directory, not from
99        // work_dir (the changeset overlay). This prevents a submitted changeset from
100        // hijacking its own verification pipeline for security.
101        let workflow = self.load_workflow(&repo_dir, repo_id).await?;
102
103        // Auto-none: no pipeline configured, auto-approve with audit trail
104        if workflow.stages.is_empty() {
105            tracing::warn!(
106                changeset_id = %changeset_id,
107                repo = %repo_name,
108                "auto-approving changeset: no verification pipeline and no recognized project type"
109            );
110            return Ok(true);
111        }
112
113        validate_workflow(&workflow).context("workflow validation failed")?;
114
115        let mut env = HashMap::new();
116        env.insert("DKOD_CHANGESET_ID".to_string(), changeset_id.to_string());
117        env.insert("DKOD_REPO_ID".to_string(), repo_id.to_string());
118
119        let passed = tokio::time::timeout(
120            workflow.timeout,
121            scheduler::run_workflow(
122                &workflow,
123                self.executor.as_ref(),
124                &work_dir,
125                &changeset_paths,
126                &env,
127                &tx,
128                Some(&self.engine),
129                Some(repo_id),
130                Some(changeset_id),
131            ),
132        )
133        .await
134        .unwrap_or_else(|_| {
135            tracing::warn!("workflow '{}' timed out after {:?}", workflow.name, workflow.timeout);
136            false
137        });
138
139        // temp_dir cleaned up on drop
140        Ok(passed)
141    }
142
143    async fn load_workflow(&self, repo_dir: &Path, repo_id: Uuid) -> Result<Workflow> {
144        // Priority 1: .dkod/pipeline.yaml in repo
145        let yaml_path = repo_dir.join(".dkod/pipeline.yaml");
146        if yaml_path.exists() {
147            info!("loading workflow from {}", yaml_path.display());
148            let workflow = parse_yaml_workflow_file(&yaml_path).await?;
149            if workflow.stages.is_empty() {
150                anyhow::bail!(
151                    "pipeline.yaml exists but defines no stages — refusing to auto-approve; \
152                     add at least one stage or remove the file to use auto-detection"
153                );
154            }
155            return Ok(workflow);
156        }
157
158        // Check for legacy .dekode/pipeline.toml and warn about migration
159        let legacy_toml = repo_dir.join(".dekode/pipeline.toml");
160        if legacy_toml.exists() {
161            tracing::warn!(
162                path = %legacy_toml.display(),
163                "found legacy .dekode/pipeline.toml \u{2014} this format is no longer loaded; please migrate to .dkod/pipeline.yaml"
164            );
165        }
166
167        // Priority 2: DB-stored pipeline
168        let db_steps = self.engine
169            .pipeline_store()
170            .get_pipeline(repo_id)
171            .await
172            .unwrap_or_default();
173
174        if !db_steps.is_empty() {
175            info!(
176                "loading workflow from DB pipeline ({} steps)",
177                db_steps.len()
178            );
179            return Ok(db_pipeline_to_workflow(db_steps));
180        }
181
182        // Priority 3: Auto-detect from project files
183        info!("auto-detecting verification workflow from project files");
184        Ok(detect_workflow(repo_dir))
185    }
186}
187
188fn db_pipeline_to_workflow(steps: Vec<dk_engine::pipeline::PipelineStep>) -> Workflow {
189    let resolved_steps: Vec<Step> = steps
190        .into_iter()
191        .map(|s| {
192            let command = s
193                .config
194                .get("command")
195                .and_then(|v| v.as_str())
196                .unwrap_or("echo 'no command configured'")
197                .to_string();
198            let timeout_secs = s
199                .config
200                .get("timeout_secs")
201                .and_then(|v| v.as_u64())
202                .unwrap_or(120);
203
204            let step_type = match s.step_type.as_str() {
205                "agent-review" => StepType::AgentReview {
206                    prompt: "Review this changeset".to_string(),
207                },
208                "human-approve" => StepType::HumanApprove,
209                _ => StepType::Command { run: command },
210            };
211
212            Step {
213                name: s.step_type.clone(),
214                step_type,
215                timeout: Duration::from_secs(timeout_secs),
216                required: s.required,
217                changeset_aware: false,
218            }
219        })
220        .collect();
221
222    Workflow {
223        name: "db-pipeline".to_string(),
224        timeout: Duration::from_secs(600),
225        stages: vec![Stage {
226            name: "pipeline".to_string(),
227            parallel: false,
228            steps: resolved_steps,
229        }],
230        allowed_commands: vec![],
231    }
232}
233
234/// Auto-detect verification workflow from project files in the repo.
235/// Scans for ALL known language markers and creates steps for each.
236/// Returns a no-stage workflow (auto-approve) if no known project type found.
237pub fn detect_workflow(repo_dir: &Path) -> Workflow {
238    let mut steps: Vec<Step> = Vec::new();
239
240    // ── Rust ──
241    if repo_dir.join("Cargo.toml").exists() {
242        steps.push(Step {
243            name: "rust:check".to_string(),
244            step_type: StepType::Command { run: "cargo check".to_string() },
245            timeout: Duration::from_secs(60),
246            required: true,
247            changeset_aware: true,
248        });
249        steps.push(Step {
250            name: "rust:test".to_string(),
251            step_type: StepType::Command { run: "cargo test".to_string() },
252            timeout: Duration::from_secs(60),
253            required: true,
254            changeset_aware: true,
255        });
256    }
257
258    // ── Node / Bun ──
259    if repo_dir.join("package.json").exists() {
260        let is_bun = repo_dir.join("bun.lock").exists()
261            || repo_dir.join("bun.lockb").exists();
262        let (label, install_cmd, test_cmd) = if is_bun {
263            ("bun", "bun install --frozen-lockfile", "bun test")
264        } else {
265            ("node", "npm ci", "npm test")
266        };
267        steps.push(Step {
268            name: format!("{label}:install"),
269            step_type: StepType::Command { run: install_cmd.to_string() },
270            timeout: Duration::from_secs(120),
271            required: true,
272            changeset_aware: false,
273        });
274        steps.push(Step {
275            name: format!("{label}:test"),
276            step_type: StepType::Command { run: test_cmd.to_string() },
277            timeout: Duration::from_secs(60),
278            required: true,
279            changeset_aware: true,
280        });
281    }
282
283    // ── Python ──
284    if repo_dir.join("pyproject.toml").exists()
285        || repo_dir.join("requirements.txt").exists()
286    {
287        if repo_dir.join("pyproject.toml").exists() {
288            steps.push(Step {
289                name: "python:install".to_string(),
290                step_type: StepType::Command { run: "pip install -e .".to_string() },
291                timeout: Duration::from_secs(120),
292                required: true,
293                changeset_aware: false,
294            });
295        }
296        if repo_dir.join("requirements.txt").exists() {
297            steps.push(Step {
298                name: "python:install-deps".to_string(),
299                step_type: StepType::Command {
300                    run: "pip install -r requirements.txt".to_string(),
301                },
302                timeout: Duration::from_secs(120),
303                required: true,
304                changeset_aware: false,
305            });
306        }
307        steps.push(Step {
308            name: "python:test".to_string(),
309            step_type: StepType::Command { run: "pytest".to_string() },
310            timeout: Duration::from_secs(60),
311            required: true,
312            changeset_aware: true,
313        });
314    }
315
316    // ── Go ──
317    if repo_dir.join("go.mod").exists() {
318        steps.push(Step {
319            name: "go:build".to_string(),
320            step_type: StepType::Command { run: "go build ./...".to_string() },
321            timeout: Duration::from_secs(60),
322            required: true,
323            changeset_aware: true,
324        });
325        steps.push(Step {
326            name: "go:vet".to_string(),
327            step_type: StepType::Command { run: "go vet ./...".to_string() },
328            timeout: Duration::from_secs(60),
329            required: true,
330            changeset_aware: true,
331        });
332        steps.push(Step {
333            name: "go:test".to_string(),
334            step_type: StepType::Command { run: "go test ./...".to_string() },
335            timeout: Duration::from_secs(60),
336            required: true,
337            changeset_aware: true,
338        });
339    }
340
341    if steps.is_empty() {
342        return Workflow {
343            name: "auto-none".to_string(),
344            timeout: Duration::from_secs(30),
345            allowed_commands: vec![],
346            stages: vec![],
347        };
348    }
349
350    let name = if steps.iter().map(|s| s.name.split(':').next().unwrap_or("")).collect::<std::collections::HashSet<_>>().len() > 1 {
351        "auto-polyglot".to_string()
352    } else {
353        format!("auto-{}", steps[0].name.split(':').next().unwrap_or("unknown"))
354    };
355
356    // Derive timeout from the sum of individual step timeouts (with a floor of 60s).
357    let total_timeout_secs = steps.iter().map(|s| s.timeout.as_secs()).sum::<u64>().max(60);
358
359    Workflow {
360        name,
361        timeout: Duration::from_secs(total_timeout_secs),
362        allowed_commands: vec![],
363        stages: vec![Stage {
364            name: "checks".to_string(),
365            parallel: false,
366            steps,
367        }],
368    }
369}
370
371/// Recursively copy a directory tree, skipping the `.git` directory.
372async fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> {
373    tokio::fs::create_dir_all(dst).await?;
374    let mut entries = tokio::fs::read_dir(src).await?;
375    while let Some(entry) = entries.next_entry().await? {
376        let file_name = entry.file_name();
377        // Skip .git to avoid copying potentially large git objects
378        if file_name == ".git" {
379            continue;
380        }
381        let src_path = entry.path();
382        let dst_path = dst.join(&file_name);
383        let file_type = entry.file_type().await?;
384        if file_type.is_dir() {
385            Box::pin(copy_dir_recursive(&src_path, &dst_path)).await?;
386        } else if file_type.is_symlink() {
387            let target = tokio::fs::read_link(&src_path).await?;
388            // Security: only recreate relative symlinks whose resolved target
389            // stays within the destination tree. This prevents sandbox escapes
390            // via crafted symlinks (e.g., link -> /etc/passwd, link -> ../../..).
391            let target_str = target.to_string_lossy();
392            if target_str.starts_with('/') || target_str.contains("..") {
393                tracing::warn!(
394                    src = %src_path.display(),
395                    target = %target.display(),
396                    "skipping symlink that points outside sandbox"
397                );
398                continue;
399            }
400            #[cfg(unix)]
401            tokio::fs::symlink(target, &dst_path).await?;
402        } else {
403            tokio::fs::copy(&src_path, &dst_path).await?;
404        }
405    }
406    Ok(())
407}
408
409#[cfg(test)]
410mod tests {
411    use super::*;
412
413    #[tokio::test]
414    async fn test_detect_workflow_rust() {
415        let dir = tempfile::tempdir().unwrap();
416        tokio::fs::write(dir.path().join("Cargo.toml"), b"[package]\nname = \"test\"")
417            .await.unwrap();
418        let wf = detect_workflow(dir.path());
419        assert_eq!(wf.name, "auto-rust");
420        assert_eq!(wf.stages.len(), 1);
421        assert_eq!(wf.stages[0].steps.len(), 2);
422    }
423
424    #[tokio::test]
425    async fn test_detect_workflow_bun() {
426        let dir = tempfile::tempdir().unwrap();
427        tokio::fs::write(dir.path().join("package.json"), b"{}").await.unwrap();
428        tokio::fs::write(dir.path().join("bun.lock"), b"").await.unwrap();
429        let wf = detect_workflow(dir.path());
430        assert_eq!(wf.name, "auto-bun");
431        assert_eq!(wf.stages[0].steps.len(), 2);
432        let cmds: Vec<_> = wf.stages[0].steps.iter().filter_map(|s| {
433            if let StepType::Command { run } = &s.step_type { Some(run.as_str()) } else { None }
434        }).collect();
435        assert!(cmds.contains(&"bun install --frozen-lockfile"));
436        assert!(cmds.contains(&"bun test"));
437    }
438
439    #[tokio::test]
440    async fn test_detect_workflow_bun_lockb() {
441        let dir = tempfile::tempdir().unwrap();
442        tokio::fs::write(dir.path().join("package.json"), b"{}").await.unwrap();
443        tokio::fs::write(dir.path().join("bun.lockb"), b"\x00").await.unwrap();
444        let wf = detect_workflow(dir.path());
445        assert_eq!(wf.name, "auto-bun");
446        assert_eq!(wf.stages[0].steps.len(), 2);
447        let cmds: Vec<_> = wf.stages[0].steps.iter().filter_map(|s| {
448            if let StepType::Command { run } = &s.step_type { Some(run.as_str()) } else { None }
449        }).collect();
450        assert!(cmds.contains(&"bun install --frozen-lockfile"));
451        assert!(cmds.contains(&"bun test"));
452    }
453
454    #[tokio::test]
455    async fn test_detect_workflow_npm() {
456        let dir = tempfile::tempdir().unwrap();
457        tokio::fs::write(dir.path().join("package.json"), b"{}").await.unwrap();
458        let wf = detect_workflow(dir.path());
459        assert_eq!(wf.name, "auto-node");
460        assert_eq!(wf.stages[0].steps.len(), 2);
461        let cmds: Vec<_> = wf.stages[0].steps.iter().filter_map(|s| {
462            if let StepType::Command { run } = &s.step_type { Some(run.as_str()) } else { None }
463        }).collect();
464        assert!(cmds.contains(&"npm ci"));
465        assert!(cmds.contains(&"npm test"));
466    }
467
468    #[tokio::test]
469    async fn test_detect_workflow_python_pyproject() {
470        let dir = tempfile::tempdir().unwrap();
471        tokio::fs::write(dir.path().join("pyproject.toml"), b"[project]").await.unwrap();
472        let wf = detect_workflow(dir.path());
473        assert_eq!(wf.name, "auto-python");
474        // pyproject.toml only — install via pip install -e . plus test
475        assert_eq!(wf.stages[0].steps.len(), 2);
476        let cmds: Vec<_> = wf.stages[0].steps.iter().filter_map(|s| {
477            if let StepType::Command { run } = &s.step_type { Some(run.as_str()) } else { None }
478        }).collect();
479        assert!(cmds.contains(&"pip install -e ."));
480        assert!(cmds.contains(&"pytest"));
481    }
482
483    #[tokio::test]
484    async fn test_detect_workflow_python_dual_file() {
485        let dir = tempfile::tempdir().unwrap();
486        tokio::fs::write(dir.path().join("pyproject.toml"), b"[project]").await.unwrap();
487        tokio::fs::write(dir.path().join("requirements.txt"), b"pytest\nrequests").await.unwrap();
488        let wf = detect_workflow(dir.path());
489        assert_eq!(wf.name, "auto-python");
490        // Both files present — install pyproject + requirements + test
491        assert_eq!(wf.stages[0].steps.len(), 3);
492        let cmds: Vec<_> = wf.stages[0].steps.iter().filter_map(|s| {
493            if let StepType::Command { run } = &s.step_type { Some(run.as_str()) } else { None }
494        }).collect();
495        assert!(cmds.contains(&"pip install -e ."));
496        assert!(cmds.contains(&"pip install -r requirements.txt"));
497        assert!(cmds.contains(&"pytest"));
498    }
499
500    #[tokio::test]
501    async fn test_detect_workflow_python_requirements() {
502        let dir = tempfile::tempdir().unwrap();
503        tokio::fs::write(dir.path().join("requirements.txt"), b"pytest\nrequests").await.unwrap();
504        let wf = detect_workflow(dir.path());
505        assert_eq!(wf.name, "auto-python");
506        // requirements.txt only — install-deps + test
507        assert_eq!(wf.stages[0].steps.len(), 2);
508        let cmds: Vec<_> = wf.stages[0].steps.iter().filter_map(|s| {
509            if let StepType::Command { run } = &s.step_type { Some(run.as_str()) } else { None }
510        }).collect();
511        assert!(cmds.contains(&"pip install -r requirements.txt"));
512        assert!(cmds.contains(&"pytest"));
513    }
514
515    #[tokio::test]
516    async fn test_detect_workflow_python_both_files() {
517        let dir = tempfile::tempdir().unwrap();
518        tokio::fs::write(dir.path().join("pyproject.toml"), b"[project]").await.unwrap();
519        tokio::fs::write(dir.path().join("requirements.txt"), b"pytest\nrequests").await.unwrap();
520        let wf = detect_workflow(dir.path());
521        assert_eq!(wf.name, "auto-python");
522        // Both files — install-package + install-deps + test
523        assert_eq!(wf.stages[0].steps.len(), 3);
524        let cmds: Vec<_> = wf.stages[0].steps.iter().filter_map(|s| {
525            if let StepType::Command { run } = &s.step_type { Some(run.as_str()) } else { None }
526        }).collect();
527        assert!(cmds.contains(&"pip install -e ."));
528        assert!(cmds.contains(&"pip install -r requirements.txt"));
529        assert!(cmds.contains(&"pytest"));
530    }
531
532    #[tokio::test]
533    async fn test_detect_workflow_go() {
534        let dir = tempfile::tempdir().unwrap();
535        tokio::fs::write(dir.path().join("go.mod"), b"module example.com/test").await.unwrap();
536        let wf = detect_workflow(dir.path());
537        assert_eq!(wf.name, "auto-go");
538        assert_eq!(wf.stages[0].steps.len(), 3);
539    }
540
541    #[tokio::test]
542    async fn test_detect_workflow_unknown() {
543        let dir = tempfile::tempdir().unwrap();
544        let wf = detect_workflow(dir.path());
545        assert_eq!(wf.name, "auto-none");
546        assert!(wf.stages.is_empty());
547    }
548
549    #[tokio::test]
550    async fn test_copy_dir_recursive_copies_files() {
551        let src = tempfile::tempdir().unwrap();
552        let dst = tempfile::tempdir().unwrap();
553
554        tokio::fs::write(src.path().join("Cargo.toml"), b"[package]\nname = \"test\"")
555            .await
556            .unwrap();
557        tokio::fs::create_dir_all(src.path().join("src")).await.unwrap();
558        tokio::fs::write(src.path().join("src/main.rs"), b"fn main() {}")
559            .await
560            .unwrap();
561
562        // .git dir should be skipped
563        tokio::fs::create_dir_all(src.path().join(".git/objects")).await.unwrap();
564        tokio::fs::write(src.path().join(".git/HEAD"), b"ref: refs/heads/main")
565            .await
566            .unwrap();
567
568        copy_dir_recursive(src.path(), dst.path()).await.unwrap();
569
570        assert!(dst.path().join("Cargo.toml").exists(), "Cargo.toml must be at dst root");
571        assert!(dst.path().join("src/main.rs").exists(), "src/main.rs must exist");
572        assert!(!dst.path().join(".git").exists(), ".git must be skipped");
573    }
574
575    #[tokio::test]
576    async fn test_copy_dir_recursive_handles_symlinks() {
577        let src = tempfile::tempdir().unwrap();
578        let dst = tempfile::tempdir().unwrap();
579
580        // Create a regular file and a symlink to it
581        tokio::fs::write(src.path().join("real.txt"), b"hello").await.unwrap();
582        #[cfg(unix)]
583        tokio::fs::symlink("real.txt", src.path().join("link.txt")).await.unwrap();
584
585        copy_dir_recursive(src.path(), dst.path()).await.unwrap();
586
587        assert!(dst.path().join("real.txt").exists());
588        #[cfg(unix)]
589        {
590            let meta = tokio::fs::symlink_metadata(dst.path().join("link.txt")).await.unwrap();
591            assert!(meta.file_type().is_symlink(), "symlink should be preserved");
592            let target = tokio::fs::read_link(dst.path().join("link.txt")).await.unwrap();
593            assert_eq!(target.to_str().unwrap(), "real.txt");
594        }
595    }
596
597    #[tokio::test]
598    async fn test_copy_dir_recursive_handles_dir_symlinks() {
599        let src = tempfile::tempdir().unwrap();
600        let dst = tempfile::tempdir().unwrap();
601
602        // Create a real directory and a symlink to it
603        tokio::fs::create_dir_all(src.path().join("real_dir")).await.unwrap();
604        tokio::fs::write(src.path().join("real_dir/file.txt"), b"content").await.unwrap();
605        #[cfg(unix)]
606        tokio::fs::symlink("real_dir", src.path().join("linked_dir")).await.unwrap();
607
608        copy_dir_recursive(src.path(), dst.path()).await.unwrap();
609
610        assert!(dst.path().join("real_dir/file.txt").exists());
611        #[cfg(unix)]
612        {
613            let meta = tokio::fs::symlink_metadata(dst.path().join("linked_dir")).await.unwrap();
614            assert!(meta.file_type().is_symlink(), "dir symlink should be preserved");
615            let target = tokio::fs::read_link(dst.path().join("linked_dir")).await.unwrap();
616            assert_eq!(target.to_str().unwrap(), "real_dir");
617        }
618    }
619
620    #[test]
621    fn test_db_pipeline_conversion() {
622        let steps = vec![
623            dk_engine::pipeline::PipelineStep {
624                repo_id: Uuid::new_v4(),
625                step_order: 1,
626                step_type: "typecheck".to_string(),
627                config: serde_json::json!({"command": "cargo check", "timeout_secs": 120}),
628                required: true,
629            },
630            dk_engine::pipeline::PipelineStep {
631                repo_id: Uuid::new_v4(),
632                step_order: 2,
633                step_type: "test".to_string(),
634                config: serde_json::json!({"command": "cargo test", "timeout_secs": 300}),
635                required: true,
636            },
637        ];
638        let wf = db_pipeline_to_workflow(steps);
639        assert_eq!(wf.stages.len(), 1);
640        assert_eq!(wf.stages[0].steps.len(), 2);
641    }
642
643    #[tokio::test]
644    async fn test_detect_workflow_polyglot_rust_and_node() {
645        let dir = tempfile::tempdir().unwrap();
646        tokio::fs::write(dir.path().join("Cargo.toml"), b"[package]\nname = \"test\"").await.unwrap();
647        tokio::fs::write(dir.path().join("package.json"), b"{}").await.unwrap();
648        let wf = detect_workflow(dir.path());
649        assert_eq!(wf.name, "auto-polyglot");
650        assert_eq!(wf.stages.len(), 1);
651        let step_names: Vec<&str> = wf.stages[0].steps.iter().map(|s| s.name.as_str()).collect();
652        assert!(step_names.iter().any(|n| n.starts_with("rust:")), "missing rust steps");
653        assert!(step_names.iter().any(|n| n.starts_with("node:")), "missing node steps");
654    }
655
656    #[tokio::test]
657    async fn test_detect_workflow_polyglot_three_languages() {
658        let dir = tempfile::tempdir().unwrap();
659        tokio::fs::write(dir.path().join("Cargo.toml"), b"[package]\nname = \"test\"").await.unwrap();
660        tokio::fs::write(dir.path().join("package.json"), b"{}").await.unwrap();
661        tokio::fs::write(dir.path().join("pyproject.toml"), b"[project]\nname = \"test\"").await.unwrap();
662        let wf = detect_workflow(dir.path());
663        assert_eq!(wf.name, "auto-polyglot");
664        let step_names: Vec<&str> = wf.stages[0].steps.iter().map(|s| s.name.as_str()).collect();
665        assert!(step_names.iter().any(|n| n.starts_with("rust:")), "missing rust");
666        assert!(step_names.iter().any(|n| n.starts_with("node:")), "missing node");
667        assert!(step_names.iter().any(|n| n.starts_with("python:")), "missing python");
668    }
669
670    #[tokio::test]
671    async fn test_detect_workflow_polyglot_bun_and_go() {
672        let dir = tempfile::tempdir().unwrap();
673        tokio::fs::write(dir.path().join("package.json"), b"{}").await.unwrap();
674        tokio::fs::write(dir.path().join("bun.lock"), b"").await.unwrap();
675        tokio::fs::write(dir.path().join("go.mod"), b"module example.com/test").await.unwrap();
676        let wf = detect_workflow(dir.path());
677        assert_eq!(wf.name, "auto-polyglot");
678        let step_names: Vec<&str> = wf.stages[0].steps.iter().map(|s| s.name.as_str()).collect();
679        assert!(step_names.iter().any(|n| n.starts_with("bun:")), "missing bun steps");
680        assert!(step_names.iter().any(|n| n.starts_with("go:")), "missing go steps");
681    }
682}