Skip to main content

opal/ai/
prompt.rs

1use super::AiContext;
2use crate::config::AiSettingsConfig;
3use anyhow::{Context, Result};
4use include_dir::{Dir, include_dir};
5use std::collections::HashMap;
6use std::fs;
7use std::path::{Path, PathBuf};
8
9static EMBEDDED_PROMPTS: Dir<'static> = include_dir!("$CARGO_MANIFEST_DIR/prompts/ai");
10
11pub struct RenderedPrompt {
12    pub system: Option<String>,
13    pub prompt: String,
14}
15
16pub fn render_job_analysis_prompt(
17    workdir: &Path,
18    settings: &AiSettingsConfig,
19    context: &AiContext,
20) -> Result<RenderedPrompt> {
21    let vars = template_vars(context);
22    let system_template = load_template(
23        workdir,
24        settings.prompts.system_file.as_deref(),
25        "system.md",
26    )?;
27    let prompt_template = load_template(
28        workdir,
29        settings.prompts.job_analysis_file.as_deref(),
30        "job-analysis.md",
31    )?;
32
33    let system = render_template(&system_template, &vars).trim().to_string();
34    let prompt = render_template(&prompt_template, &vars);
35
36    Ok(RenderedPrompt {
37        system: (!system.is_empty()).then_some(system),
38        prompt,
39    })
40}
41
42fn load_template(
43    workdir: &Path,
44    override_path: Option<&str>,
45    embedded_name: &str,
46) -> Result<String> {
47    if let Some(path) = override_path.filter(|value| !value.trim().is_empty()) {
48        let path = resolve_prompt_path(workdir, path);
49        return fs::read_to_string(&path)
50            .with_context(|| format!("failed to read AI prompt template {}", path.display()));
51    }
52
53    let file = EMBEDDED_PROMPTS
54        .get_file(embedded_name)
55        .with_context(|| format!("embedded AI prompt {embedded_name} not found"))?;
56    file.contents_utf8()
57        .map(|text| text.to_string())
58        .with_context(|| format!("embedded AI prompt {embedded_name} is not valid utf-8"))
59}
60
61fn resolve_prompt_path(workdir: &Path, value: &str) -> PathBuf {
62    let path = PathBuf::from(value);
63    if path.is_absolute() {
64        path
65    } else {
66        workdir.join(path)
67    }
68}
69
70fn template_vars(context: &AiContext) -> HashMap<&'static str, String> {
71    HashMap::from([
72        ("job_name", context.job_name.clone()),
73        ("source_name", context.source_name.clone()),
74        ("stage", context.stage.clone()),
75        ("job_yaml", context.job_yaml.clone()),
76        ("runner_summary", context.runner_summary.clone()),
77        ("pipeline_summary", context.pipeline_summary.clone()),
78        (
79            "runtime_summary",
80            context.runtime_summary.clone().unwrap_or_default(),
81        ),
82        ("log_excerpt", context.log_excerpt.clone()),
83        (
84            "failure_hint",
85            context.failure_hint.clone().unwrap_or_default(),
86        ),
87    ])
88}
89
90fn render_template(template: &str, vars: &HashMap<&'static str, String>) -> String {
91    let mut rendered = template.to_string();
92    for (key, value) in vars {
93        rendered = rendered.replace(&format!("{{{{{key}}}}}"), value);
94    }
95    rendered
96}
97
98#[cfg(test)]
99mod tests {
100    use super::{render_job_analysis_prompt, render_template};
101    use crate::ai::AiContext;
102    use crate::config::AiSettingsConfig;
103    use std::collections::HashMap;
104    use tempfile::tempdir;
105
106    #[test]
107    fn render_template_replaces_known_placeholders() {
108        let vars = HashMap::from([("job_name", "unit-tests".to_string())]);
109        assert_eq!(render_template("Job {{job_name}}", &vars), "Job unit-tests");
110    }
111
112    #[test]
113    fn render_job_analysis_prompt_uses_embedded_defaults() {
114        let dir = tempdir().expect("tempdir");
115        let context = AiContext {
116            job_name: "unit-tests".into(),
117            source_name: "unit-tests".into(),
118            stage: "test".into(),
119            job_yaml: "unit-tests:\n  script:\n    - cargo test".into(),
120            runner_summary: "engine=container arch=arm64 vcpu=6 ram=3g".into(),
121            pipeline_summary: "dependencies: fetch-sources".into(),
122            runtime_summary: Some("container: opal-unit-tests-01".into()),
123            log_excerpt: "error: linker failed".into(),
124            failure_hint: Some("container command exited with status Some(101)".into()),
125        };
126
127        let rendered =
128            render_job_analysis_prompt(dir.path(), &AiSettingsConfig::default(), &context)
129                .expect("render prompt");
130        assert!(rendered.prompt.contains("unit-tests"));
131        assert!(rendered.prompt.contains("error: linker failed"));
132        assert!(rendered.system.is_some());
133    }
134}