Skip to main content

zag_agent/providers/
ollama.rs

1// provider-updated: 2026-04-05
2use crate::agent::{Agent, ModelSize};
3use crate::output::AgentOutput;
4use crate::providers::common::CommonAgentState;
5use crate::session_log::HistoricalLogAdapter;
6use anyhow::Result;
7use async_trait::async_trait;
8use tokio::process::Command;
9
10pub const DEFAULT_MODEL: &str = "qwen3.5";
11pub const DEFAULT_SIZE: &str = "9b";
12
13pub const AVAILABLE_SIZES: &[&str] = &["0.8b", "2b", "4b", "9b", "27b", "35b", "122b"];
14
15pub struct Ollama {
16    pub common: CommonAgentState,
17    pub size: String,
18}
19
20pub struct OllamaHistoricalLogAdapter;
21
22impl Ollama {
23    pub fn new() -> Self {
24        Self {
25            common: CommonAgentState::new(DEFAULT_MODEL),
26            size: DEFAULT_SIZE.to_string(),
27        }
28    }
29
30    pub fn set_size(&mut self, size: String) {
31        self.size = size;
32    }
33
34    /// Get the display string for the model (e.g., "qwen3.5:9b").
35    pub fn display_model(&self) -> String {
36        self.model_tag()
37    }
38
39    /// Get the full model tag (e.g., "qwen3.5:9b").
40    fn model_tag(&self) -> String {
41        format!("{}:{}", self.common.model, self.size)
42    }
43
44    /// Build the argument list for a run invocation.
45    fn build_run_args(&self, interactive: bool, prompt: Option<&str>) -> Vec<String> {
46        let mut args = vec!["run".to_string()];
47
48        if let Some(ref format) = self.common.output_format
49            && format == "json"
50        {
51            args.extend(["--format".to_string(), "json".to_string()]);
52        }
53
54        if !interactive {
55            // --nowordwrap for clean piped output
56            args.push("--nowordwrap".to_string());
57        }
58
59        args.push("--hidethinking".to_string());
60
61        args.push(self.model_tag());
62
63        // ollama run has no --system flag; prepend system prompt to user prompt
64        let effective_prompt = match (self.common.system_prompt.is_empty(), prompt) {
65            (false, Some(p)) => Some(format!("{}\n\n{}", self.common.system_prompt, p)),
66            (false, None) => Some(self.common.system_prompt.clone()),
67            (true, p) => p.map(String::from),
68        };
69
70        if let Some(p) = effective_prompt {
71            args.push(p);
72        }
73
74        args
75    }
76
77    /// Create a `Command` either directly or wrapped in sandbox.
78    ///
79    /// Ollama uses a custom sandbox implementation with shell escaping
80    /// instead of the standard `build_sandbox_command`.
81    fn make_command(&self, agent_args: Vec<String>) -> Command {
82        if let Some(ref sb) = self.common.sandbox {
83            // For ollama in sandbox, we use the shell template:
84            // docker sandbox run shell <workspace> -- -c "ollama run ..."
85            let shell_cmd = format!(
86                "ollama {}",
87                agent_args
88                    .iter()
89                    .map(|a| shell_escape(a))
90                    .collect::<Vec<_>>()
91                    .join(" ")
92            );
93            let mut std_cmd = std::process::Command::new("docker");
94            std_cmd.args([
95                "sandbox",
96                "run",
97                "--name",
98                &sb.name,
99                &sb.template,
100                &sb.workspace,
101                "--",
102                "-c",
103                &shell_cmd,
104            ]);
105            log::debug!(
106                "Sandbox command: docker sandbox run --name {} {} {} -- -c {:?}",
107                sb.name,
108                sb.template,
109                sb.workspace,
110                shell_cmd
111            );
112            Command::from(std_cmd)
113        } else {
114            let mut cmd = Command::new("ollama");
115            if let Some(ref root) = self.common.root {
116                cmd.current_dir(root);
117            }
118            cmd.args(&agent_args);
119            for (key, value) in &self.common.env_vars {
120                cmd.env(key, value);
121            }
122            cmd
123        }
124    }
125
126    async fn execute(
127        &self,
128        interactive: bool,
129        prompt: Option<&str>,
130    ) -> Result<Option<AgentOutput>> {
131        let agent_args = self.build_run_args(interactive, prompt);
132        log::debug!("Ollama command: ollama {}", agent_args.join(" "));
133        if !self.common.system_prompt.is_empty() {
134            log::debug!("Ollama system prompt: {}", self.common.system_prompt);
135        }
136        if let Some(p) = prompt {
137            log::debug!("Ollama user prompt: {}", p);
138        }
139        let mut cmd = self.make_command(agent_args);
140
141        if interactive {
142            CommonAgentState::run_interactive_command(&mut cmd, "Ollama").await?;
143            Ok(None)
144        } else {
145            self.common
146                .run_non_interactive_simple(&mut cmd, "Ollama")
147                .await
148        }
149    }
150
151    /// Resolve a size alias to the appropriate parameter size.
152    pub fn size_for_model_size(size: ModelSize) -> &'static str {
153        match size {
154            ModelSize::Small => "2b",
155            ModelSize::Medium => "9b",
156            ModelSize::Large => "35b",
157        }
158    }
159}
160
161/// Escape a string for shell use. Wraps in single quotes if it contains special chars.
162fn shell_escape(s: &str) -> String {
163    if s.contains(' ')
164        || s.contains('\'')
165        || s.contains('"')
166        || s.contains('\\')
167        || s.contains('$')
168        || s.contains('`')
169        || s.contains('!')
170    {
171        format!("'{}'", s.replace('\'', "'\\''"))
172    } else {
173        s.to_string()
174    }
175}
176
177#[cfg(test)]
178#[path = "ollama_tests.rs"]
179mod tests;
180
181impl Default for Ollama {
182    fn default() -> Self {
183        Self::new()
184    }
185}
186
187impl HistoricalLogAdapter for OllamaHistoricalLogAdapter {
188    fn backfill(&self, _root: Option<&str>) -> Result<Vec<crate::session_log::BackfilledSession>> {
189        Ok(Vec::new())
190    }
191}
192
193#[async_trait]
194impl Agent for Ollama {
195    fn name(&self) -> &str {
196        "ollama"
197    }
198
199    fn default_model() -> &'static str
200    where
201        Self: Sized,
202    {
203        DEFAULT_MODEL
204    }
205
206    fn model_for_size(size: ModelSize) -> &'static str
207    where
208        Self: Sized,
209    {
210        // For ollama, model_for_size returns the size parameter, not the model name
211        Self::size_for_model_size(size)
212    }
213
214    fn available_models() -> &'static [&'static str]
215    where
216        Self: Sized,
217    {
218        // Ollama accepts any model — return common sizes for validation/help
219        AVAILABLE_SIZES
220    }
221
222    /// Ollama uses open model names — skip strict validation.
223    fn validate_model(_model: &str, _agent_name: &str) -> Result<()>
224    where
225        Self: Sized,
226    {
227        Ok(())
228    }
229
230    crate::providers::common::impl_common_agent_setters!();
231
232    fn set_skip_permissions(&mut self, _skip: bool) {
233        // Ollama runs locally — no permission concept
234        self.common.skip_permissions = true;
235    }
236
237    crate::providers::common::impl_as_any!();
238
239    async fn run(&self, prompt: Option<&str>) -> Result<Option<AgentOutput>> {
240        self.execute(false, prompt).await
241    }
242
243    async fn run_interactive(&self, prompt: Option<&str>) -> Result<()> {
244        self.execute(true, prompt).await?;
245        Ok(())
246    }
247
248    async fn run_resume(&self, _session_id: Option<&str>, _last: bool) -> Result<()> {
249        anyhow::bail!("Ollama does not support session resume")
250    }
251
252    async fn cleanup(&self) -> Result<()> {
253        Ok(())
254    }
255}