zag_agent/providers/
ollama.rs1use crate::agent::{Agent, ModelSize};
3use crate::output::AgentOutput;
4use crate::providers::common::CommonAgentState;
5use crate::session_log::HistoricalLogAdapter;
6use anyhow::Result;
7use async_trait::async_trait;
8use tokio::process::Command;
9
10pub const DEFAULT_MODEL: &str = "qwen3.5";
11pub const DEFAULT_SIZE: &str = "9b";
12
13pub const AVAILABLE_SIZES: &[&str] = &["0.8b", "2b", "4b", "9b", "27b", "35b", "122b"];
14
15pub struct Ollama {
16 pub common: CommonAgentState,
17 pub size: String,
18}
19
20pub struct OllamaHistoricalLogAdapter;
21
22impl Ollama {
23 pub fn new() -> Self {
24 Self {
25 common: CommonAgentState::new(DEFAULT_MODEL),
26 size: DEFAULT_SIZE.to_string(),
27 }
28 }
29
30 pub fn set_size(&mut self, size: String) {
31 self.size = size;
32 }
33
34 pub fn display_model(&self) -> String {
36 self.model_tag()
37 }
38
39 fn model_tag(&self) -> String {
41 format!("{}:{}", self.common.model, self.size)
42 }
43
44 fn build_run_args(&self, interactive: bool, prompt: Option<&str>) -> Vec<String> {
46 let mut args = vec!["run".to_string()];
47
48 if let Some(ref format) = self.common.output_format
49 && format == "json"
50 {
51 args.extend(["--format".to_string(), "json".to_string()]);
52 }
53
54 if !interactive {
55 args.push("--nowordwrap".to_string());
57 }
58
59 args.push("--hidethinking".to_string());
60
61 args.push(self.model_tag());
62
63 let effective_prompt = match (self.common.system_prompt.is_empty(), prompt) {
65 (false, Some(p)) => Some(format!("{}\n\n{}", self.common.system_prompt, p)),
66 (false, None) => Some(self.common.system_prompt.clone()),
67 (true, p) => p.map(String::from),
68 };
69
70 if let Some(p) = effective_prompt {
71 args.push("--".to_string());
74 args.push(p);
75 }
76
77 args
78 }
79
80 fn make_command(&self, agent_args: Vec<String>) -> Command {
85 if let Some(ref sb) = self.common.sandbox {
86 let shell_cmd = format!(
89 "ollama {}",
90 agent_args
91 .iter()
92 .map(|a| shell_escape(a))
93 .collect::<Vec<_>>()
94 .join(" ")
95 );
96 let mut std_cmd = std::process::Command::new("docker");
97 std_cmd.args([
98 "sandbox",
99 "run",
100 "--name",
101 &sb.name,
102 &sb.template,
103 &sb.workspace,
104 "--",
105 "-c",
106 &shell_cmd,
107 ]);
108 log::debug!(
109 "Sandbox command: docker sandbox run --name {} {} {} -- -c {:?}",
110 sb.name,
111 sb.template,
112 sb.workspace,
113 shell_cmd
114 );
115 Command::from(std_cmd)
116 } else {
117 let mut cmd = Command::new("ollama");
118 if let Some(ref root) = self.common.root {
119 cmd.current_dir(root);
120 }
121 cmd.args(&agent_args);
122 for (key, value) in &self.common.env_vars {
123 cmd.env(key, value);
124 }
125 cmd
126 }
127 }
128
129 async fn execute(
130 &self,
131 interactive: bool,
132 prompt: Option<&str>,
133 ) -> Result<Option<AgentOutput>> {
134 let agent_args = self.build_run_args(interactive, prompt);
135 log::debug!("Ollama command: ollama {}", agent_args.join(" "));
136 if !self.common.system_prompt.is_empty() {
137 log::debug!("Ollama system prompt: {}", self.common.system_prompt);
138 }
139 if let Some(p) = prompt {
140 log::debug!("Ollama user prompt: {p}");
141 }
142 let mut cmd = self.make_command(agent_args);
143
144 if interactive {
145 CommonAgentState::run_interactive_command_with_hook(
146 &mut cmd,
147 "Ollama",
148 self.common.on_spawn_hook.as_ref(),
149 )
150 .await?;
151 Ok(None)
152 } else {
153 self.common
154 .run_non_interactive_simple(&mut cmd, "Ollama")
155 .await
156 }
157 }
158
159 pub fn size_for_model_size(size: ModelSize) -> &'static str {
161 match size {
162 ModelSize::Small => "2b",
163 ModelSize::Medium => "9b",
164 ModelSize::Large => "35b",
165 }
166 }
167}
168
169fn shell_escape(s: &str) -> String {
171 if s.contains(' ')
172 || s.contains('\'')
173 || s.contains('"')
174 || s.contains('\\')
175 || s.contains('$')
176 || s.contains('`')
177 || s.contains('!')
178 {
179 format!("'{}'", s.replace('\'', "'\\''"))
180 } else {
181 s.to_string()
182 }
183}
184
185#[cfg(test)]
186#[path = "ollama_tests.rs"]
187mod tests;
188
189impl Default for Ollama {
190 fn default() -> Self {
191 Self::new()
192 }
193}
194
195impl HistoricalLogAdapter for OllamaHistoricalLogAdapter {
196 fn backfill(&self, _root: Option<&str>) -> Result<Vec<crate::session_log::BackfilledSession>> {
197 Ok(Vec::new())
198 }
199}
200
201#[async_trait]
202impl Agent for Ollama {
203 fn name(&self) -> &str {
204 "ollama"
205 }
206
207 fn default_model() -> &'static str
208 where
209 Self: Sized,
210 {
211 DEFAULT_MODEL
212 }
213
214 fn model_for_size(size: ModelSize) -> &'static str
215 where
216 Self: Sized,
217 {
218 Self::size_for_model_size(size)
220 }
221
222 fn available_models() -> &'static [&'static str]
223 where
224 Self: Sized,
225 {
226 AVAILABLE_SIZES
228 }
229
230 fn validate_model(_model: &str, _agent_name: &str) -> Result<()>
232 where
233 Self: Sized,
234 {
235 Ok(())
236 }
237
238 crate::providers::common::impl_common_agent_setters!();
239
240 fn set_skip_permissions(&mut self, _skip: bool) {
241 self.common.skip_permissions = true;
243 }
244
245 crate::providers::common::impl_as_any!();
246
247 async fn run(&self, prompt: Option<&str>) -> Result<Option<AgentOutput>> {
248 self.execute(false, prompt).await
249 }
250
251 async fn run_interactive(&self, prompt: Option<&str>) -> Result<()> {
252 self.execute(true, prompt).await?;
253 Ok(())
254 }
255
256 async fn run_resume(&self, _session_id: Option<&str>, _last: bool) -> Result<()> {
257 anyhow::bail!("Ollama does not support session resume")
258 }
259
260 async fn cleanup(&self) -> Result<()> {
261 Ok(())
262 }
263}