Skip to main content

batty_cli/team/
prompt_compose.rs

1use std::path::Path;
2
3use super::config::RoleType;
4use super::hierarchy::MemberInstance;
5
6const POSTURE_DEEP_WORKER: &str = include_str!("templates/postures/deep_worker.md");
7const POSTURE_FAST_LANE: &str = include_str!("templates/postures/fast_lane.md");
8const POSTURE_ORCHESTRATOR: &str = include_str!("templates/postures/orchestrator.md");
9
10const MODEL_CLASS_FRONTIER: &str = include_str!("templates/model_classes/frontier.md");
11const MODEL_CLASS_STANDARD: &str = include_str!("templates/model_classes/standard.md");
12const MODEL_CLASS_FAST: &str = include_str!("templates/model_classes/fast.md");
13
14const PROVIDER_CLAUDE: &str = "## Provider: Claude\n- Prefer explicit delegation and clear acceptance criteria when coordinating work\n- Use larger synthesis passes when the full local context is available\n";
15const PROVIDER_CODEX: &str = "## Provider: Codex\n- Work in explicit implementation steps with concrete verification after each meaningful change\n- Prefer reading the directly relevant files before editing and keep progress updates factual\n";
16const PROVIDER_GEMINI: &str = "## Provider: Gemini\n- Keep tool use disciplined and summarize conclusions before moving to the next step\n- When a task depends on uncertain code paths, verify them directly instead of assuming\n";
17
18#[derive(Debug, Clone, Default, PartialEq, Eq)]
19pub struct PromptContext {
20    pub posture: Option<String>,
21    pub model_class: Option<String>,
22    pub provider_overlay: Option<String>,
23}
24
25pub fn compose_prompt(
26    base_role: &str,
27    posture: Option<&str>,
28    model_class: Option<&str>,
29    provider_overlay: Option<&str>,
30) -> String {
31    let mut layers = vec![base_role.trim_end().to_string()];
32
33    if let Some(text) = posture.and_then(load_posture) {
34        layers.push(text.to_string());
35    }
36    if let Some(text) = model_class.and_then(load_model_class) {
37        layers.push(text.to_string());
38    }
39    if let Some(text) = provider_overlay.and_then(load_provider_overlay) {
40        layers.push(text.to_string());
41    }
42
43    layers.join("\n\n")
44}
45
46pub fn render_member_prompt(
47    member: &MemberInstance,
48    config_dir: &Path,
49    context: &PromptContext,
50) -> String {
51    let path = config_dir.join(
52        member
53            .prompt
54            .as_deref()
55            .unwrap_or(default_prompt_file(member.role_type)),
56    );
57    let content = std::fs::read_to_string(&path).unwrap_or_else(|_| {
58        format!(
59            "You are {} (role: {:?}). Work on assigned tasks.",
60            member.name, member.role_type
61        )
62    });
63    let base = content
64        .replace("{{member_name}}", &member.name)
65        .replace("{{role_name}}", &member.role_name)
66        .replace(
67            "{{reports_to}}",
68            member.reports_to.as_deref().unwrap_or("none"),
69        );
70    compose_prompt(
71        &base,
72        context.posture.as_deref(),
73        context.model_class.as_deref(),
74        context.provider_overlay.as_deref(),
75    )
76}
77
78pub fn resolve_prompt_context(member: &MemberInstance) -> PromptContext {
79    let provider_overlay = member
80        .provider_overlay
81        .clone()
82        .or_else(|| infer_provider_overlay(member.agent.as_deref()));
83    let model_class = member.model_class.clone().or_else(|| {
84        infer_model_class(member.model.as_deref(), member.agent.as_deref()).map(str::to_string)
85    });
86
87    PromptContext {
88        posture: member.posture.clone(),
89        model_class,
90        provider_overlay,
91    }
92}
93
94pub fn default_prompt_file(role_type: RoleType) -> &'static str {
95    match role_type {
96        RoleType::Architect => "architect.md",
97        RoleType::Manager => "manager.md",
98        RoleType::Engineer => "engineer.md",
99        RoleType::User => "architect.md",
100    }
101}
102
103pub fn infer_provider_overlay(agent: Option<&str>) -> Option<String> {
104    match normalize_value(agent?) {
105        value if value.contains("claude") => Some("claude".to_string()),
106        value if value.contains("codex") || value.contains("gpt") => Some("codex".to_string()),
107        value if value.contains("gemini") => Some("gemini".to_string()),
108        _ => None,
109    }
110}
111
112pub fn infer_model_class(model: Option<&str>, agent: Option<&str>) -> Option<&'static str> {
113    let source = model.or(agent)?;
114    let value = normalize_value(source);
115
116    if value.starts_with("claude-opus-") || value == "gemini-2.5-pro" {
117        return Some("frontier");
118    }
119    if value.starts_with("claude-sonnet-")
120        || value == "gpt-5.4"
121        || value == "gpt-5.3"
122        || value == "claude"
123        || value == "claude-code"
124        || value == "codex"
125        || value == "codex-cli"
126    {
127        return Some("standard");
128    }
129    if value.starts_with("claude-haiku-")
130        || value == "gemini-2.5-flash"
131        || value == "gpt-5.2-mini"
132        || value == "haiku"
133    {
134        return Some("fast");
135    }
136
137    None
138}
139
140fn normalize_value(value: &str) -> String {
141    value.trim().to_ascii_lowercase()
142}
143
144fn load_posture(name: &str) -> Option<&'static str> {
145    match normalize_value(name).as_str() {
146        "deep_worker" => Some(POSTURE_DEEP_WORKER),
147        "fast_lane" => Some(POSTURE_FAST_LANE),
148        "orchestrator" => Some(POSTURE_ORCHESTRATOR),
149        _ => None,
150    }
151}
152
153fn load_model_class(name: &str) -> Option<&'static str> {
154    match normalize_value(name).as_str() {
155        "frontier" => Some(MODEL_CLASS_FRONTIER),
156        "standard" => Some(MODEL_CLASS_STANDARD),
157        "fast" => Some(MODEL_CLASS_FAST),
158        _ => None,
159    }
160}
161
162fn load_provider_overlay(name: &str) -> Option<&'static str> {
163    match normalize_value(name).as_str() {
164        "claude" => Some(PROVIDER_CLAUDE),
165        "codex" => Some(PROVIDER_CODEX),
166        "gemini" => Some(PROVIDER_GEMINI),
167        _ => None,
168    }
169}
170
171#[cfg(test)]
172mod tests {
173    use super::*;
174    use crate::team::config::RoleType;
175    use crate::team::hierarchy::MemberInstance;
176
177    #[test]
178    fn compose_prompt_appends_requested_layers() {
179        let prompt = compose_prompt("Base", Some("deep_worker"), Some("standard"), Some("codex"));
180        assert!(prompt.starts_with("Base"));
181        assert!(prompt.contains("## Posture: Deep Worker"));
182        assert!(prompt.contains("## Model Class: Standard"));
183        assert!(prompt.contains("## Provider: Codex"));
184    }
185
186    #[test]
187    fn infer_model_class_from_model_or_agent() {
188        assert_eq!(
189            infer_model_class(Some("claude-opus-4-1"), None),
190            Some("frontier")
191        );
192        assert_eq!(infer_model_class(Some("gpt-5.4"), None), Some("standard"));
193        assert_eq!(
194            infer_model_class(Some("gemini-2.5-flash"), None),
195            Some("fast")
196        );
197        assert_eq!(infer_model_class(None, Some("codex")), Some("standard"));
198    }
199
200    #[test]
201    fn resolve_prompt_context_infers_model_class_and_provider_from_member() {
202        let member = MemberInstance {
203            name: "eng-1-1".to_string(),
204            role_name: "engineer".to_string(),
205            role_type: RoleType::Engineer,
206            agent: Some("claude".to_string()),
207            model: Some("claude-opus-4-1".to_string()),
208            prompt: None,
209            posture: Some("deep_worker".to_string()),
210            model_class: None,
211            provider_overlay: None,
212            reports_to: Some("manager".to_string()),
213            use_worktrees: true,
214        };
215
216        let context = resolve_prompt_context(&member);
217
218        assert_eq!(context.posture.as_deref(), Some("deep_worker"));
219        assert_eq!(context.model_class.as_deref(), Some("frontier"));
220        assert_eq!(context.provider_overlay.as_deref(), Some("claude"));
221    }
222
223    #[test]
224    fn render_member_prompt_composes_layers_and_substitutes_variables() {
225        let tmp = tempfile::tempdir().unwrap();
226        std::fs::write(
227            tmp.path().join("batty_engineer.md"),
228            "Hello {{member_name}} from {{role_name}} -> {{reports_to}}",
229        )
230        .unwrap();
231        let member = MemberInstance {
232            name: "eng-1-1".to_string(),
233            role_name: "engineer".to_string(),
234            role_type: RoleType::Engineer,
235            agent: Some("codex".to_string()),
236            model: Some("gpt-5.4".to_string()),
237            prompt: Some("batty_engineer.md".to_string()),
238            posture: Some("deep_worker".to_string()),
239            model_class: None,
240            provider_overlay: None,
241            reports_to: Some("manager".to_string()),
242            use_worktrees: true,
243        };
244
245        let prompt = render_member_prompt(&member, tmp.path(), &resolve_prompt_context(&member));
246
247        assert!(prompt.contains("Hello eng-1-1 from engineer -> manager"));
248        assert!(prompt.contains("## Posture: Deep Worker"));
249        assert!(prompt.contains("## Model Class: Standard"));
250        assert!(prompt.contains("## Provider: Codex"));
251    }
252}