zag_agent/providers/
ollama.rs1use crate::agent::{Agent, ModelSize};
3use crate::output::AgentOutput;
4use crate::providers::common::CommonAgentState;
5use crate::session_log::HistoricalLogAdapter;
6use anyhow::Result;
7use async_trait::async_trait;
8use tokio::process::Command;
9
10pub const DEFAULT_MODEL: &str = "qwen3.5";
11pub const DEFAULT_SIZE: &str = "9b";
12
13pub const AVAILABLE_SIZES: &[&str] = &["0.8b", "2b", "4b", "9b", "27b", "35b", "122b"];
14
15pub struct Ollama {
16 pub common: CommonAgentState,
17 pub size: String,
18}
19
20pub struct OllamaHistoricalLogAdapter;
21
22impl Ollama {
23 pub fn new() -> Self {
24 Self {
25 common: CommonAgentState::new(DEFAULT_MODEL),
26 size: DEFAULT_SIZE.to_string(),
27 }
28 }
29
30 pub fn set_size(&mut self, size: String) {
31 self.size = size;
32 }
33
34 pub fn display_model(&self) -> String {
36 self.model_tag()
37 }
38
39 fn model_tag(&self) -> String {
41 format!("{}:{}", self.common.model, self.size)
42 }
43
44 fn build_run_args(&self, interactive: bool, prompt: Option<&str>) -> Vec<String> {
46 let mut args = vec!["run".to_string()];
47
48 if let Some(ref format) = self.common.output_format
49 && format == "json"
50 {
51 args.extend(["--format".to_string(), "json".to_string()]);
52 }
53
54 if !interactive {
55 args.push("--nowordwrap".to_string());
57 }
58
59 args.push("--hidethinking".to_string());
60
61 args.push(self.model_tag());
62
63 let effective_prompt = match (self.common.system_prompt.is_empty(), prompt) {
65 (false, Some(p)) => Some(format!("{}\n\n{}", self.common.system_prompt, p)),
66 (false, None) => Some(self.common.system_prompt.clone()),
67 (true, p) => p.map(String::from),
68 };
69
70 if let Some(p) = effective_prompt {
71 args.push(p);
72 }
73
74 args
75 }
76
77 fn make_command(&self, agent_args: Vec<String>) -> Command {
82 if let Some(ref sb) = self.common.sandbox {
83 let shell_cmd = format!(
86 "ollama {}",
87 agent_args
88 .iter()
89 .map(|a| shell_escape(a))
90 .collect::<Vec<_>>()
91 .join(" ")
92 );
93 let mut std_cmd = std::process::Command::new("docker");
94 std_cmd.args([
95 "sandbox",
96 "run",
97 "--name",
98 &sb.name,
99 &sb.template,
100 &sb.workspace,
101 "--",
102 "-c",
103 &shell_cmd,
104 ]);
105 log::debug!(
106 "Sandbox command: docker sandbox run --name {} {} {} -- -c {:?}",
107 sb.name,
108 sb.template,
109 sb.workspace,
110 shell_cmd
111 );
112 Command::from(std_cmd)
113 } else {
114 let mut cmd = Command::new("ollama");
115 if let Some(ref root) = self.common.root {
116 cmd.current_dir(root);
117 }
118 cmd.args(&agent_args);
119 for (key, value) in &self.common.env_vars {
120 cmd.env(key, value);
121 }
122 cmd
123 }
124 }
125
126 async fn execute(
127 &self,
128 interactive: bool,
129 prompt: Option<&str>,
130 ) -> Result<Option<AgentOutput>> {
131 let agent_args = self.build_run_args(interactive, prompt);
132 log::debug!("Ollama command: ollama {}", agent_args.join(" "));
133 if !self.common.system_prompt.is_empty() {
134 log::debug!("Ollama system prompt: {}", self.common.system_prompt);
135 }
136 if let Some(p) = prompt {
137 log::debug!("Ollama user prompt: {}", p);
138 }
139 let mut cmd = self.make_command(agent_args);
140
141 if interactive {
142 CommonAgentState::run_interactive_command(&mut cmd, "Ollama").await?;
143 Ok(None)
144 } else {
145 self.common
146 .run_non_interactive_simple(&mut cmd, "Ollama")
147 .await
148 }
149 }
150
151 pub fn size_for_model_size(size: ModelSize) -> &'static str {
153 match size {
154 ModelSize::Small => "2b",
155 ModelSize::Medium => "9b",
156 ModelSize::Large => "35b",
157 }
158 }
159}
160
161fn shell_escape(s: &str) -> String {
163 if s.contains(' ')
164 || s.contains('\'')
165 || s.contains('"')
166 || s.contains('\\')
167 || s.contains('$')
168 || s.contains('`')
169 || s.contains('!')
170 {
171 format!("'{}'", s.replace('\'', "'\\''"))
172 } else {
173 s.to_string()
174 }
175}
176
177#[cfg(test)]
178#[path = "ollama_tests.rs"]
179mod tests;
180
181impl Default for Ollama {
182 fn default() -> Self {
183 Self::new()
184 }
185}
186
187impl HistoricalLogAdapter for OllamaHistoricalLogAdapter {
188 fn backfill(&self, _root: Option<&str>) -> Result<Vec<crate::session_log::BackfilledSession>> {
189 Ok(Vec::new())
190 }
191}
192
193#[async_trait]
194impl Agent for Ollama {
195 fn name(&self) -> &str {
196 "ollama"
197 }
198
199 fn default_model() -> &'static str
200 where
201 Self: Sized,
202 {
203 DEFAULT_MODEL
204 }
205
206 fn model_for_size(size: ModelSize) -> &'static str
207 where
208 Self: Sized,
209 {
210 Self::size_for_model_size(size)
212 }
213
214 fn available_models() -> &'static [&'static str]
215 where
216 Self: Sized,
217 {
218 AVAILABLE_SIZES
220 }
221
222 fn validate_model(_model: &str, _agent_name: &str) -> Result<()>
224 where
225 Self: Sized,
226 {
227 Ok(())
228 }
229
230 crate::providers::common::impl_common_agent_setters!();
231
232 fn set_skip_permissions(&mut self, _skip: bool) {
233 self.common.skip_permissions = true;
235 }
236
237 crate::providers::common::impl_as_any!();
238
239 async fn run(&self, prompt: Option<&str>) -> Result<Option<AgentOutput>> {
240 self.execute(false, prompt).await
241 }
242
243 async fn run_interactive(&self, prompt: Option<&str>) -> Result<()> {
244 self.execute(true, prompt).await?;
245 Ok(())
246 }
247
248 async fn run_resume(&self, _session_id: Option<&str>, _last: bool) -> Result<()> {
249 anyhow::bail!("Ollama does not support session resume")
250 }
251
252 async fn cleanup(&self) -> Result<()> {
253 Ok(())
254 }
255}