Skip to main content

embacle/
copilot.rs

1// ABOUTME: GitHub Copilot CLI runner implementing the `LlmProvider` trait
2// ABOUTME: Wraps the `copilot` CLI with plain-text output parsing and streaming support
3//
4// SPDX-License-Identifier: Apache-2.0
5// Copyright (c) 2026 dravr.ai
6
7use std::any::Any;
8use std::io;
9use std::process::Stdio;
10use std::str;
11use std::time::Duration;
12
13use crate::types::{
14    ChatRequest, ChatResponse, ChatStream, LlmCapabilities, LlmProvider, RunnerError, StreamChunk,
15};
16use async_trait::async_trait;
17use tokio::io::{AsyncBufReadExt, BufReader};
18use tokio::process::Command;
19use tokio_stream::wrappers::LinesStream;
20use tokio_stream::StreamExt;
21use tracing::{debug, instrument, warn};
22
23use crate::config::RunnerConfig;
24use crate::process::{read_stderr_capped, run_cli_command};
25use crate::prompt::build_prompt;
26use crate::sandbox::{apply_sandbox, build_policy};
27use crate::stream::{GuardedStream, MAX_STREAMING_STDERR_BYTES};
28
29/// Maximum output size for a single Copilot CLI invocation (50 MiB)
30const MAX_OUTPUT_BYTES: usize = 50 * 1024 * 1024;
31
32/// Health check timeout (10 seconds)
33const HEALTH_CHECK_TIMEOUT: Duration = Duration::from_secs(10);
34
35/// Health check output limit (4 KiB)
36const HEALTH_CHECK_MAX_OUTPUT: usize = 4096;
37
38/// Default model for Copilot CLI
39const DEFAULT_MODEL: &str = "claude-opus-4.6";
40
41/// Fallback model list when `gh copilot models` discovery fails
42const FALLBACK_MODELS: &[&str] = &[
43    "claude-sonnet-4.6",
44    "claude-opus-4.6",
45    "claude-opus-4.6-fast",
46    "claude-sonnet-4.5",
47    "claude-haiku-4.5",
48    "claude-sonnet-4",
49    "gpt-5.2-codex",
50    "gpt-5.2",
51    "gpt-5.1-codex",
52    "gpt-5.1",
53    "gpt-5-mini",
54    "gpt-4.1",
55    "gemini-3-pro-preview",
56];
57
58/// Discover available Copilot models by running `gh copilot models`.
59///
60/// Uses a blocking subprocess call (safe at construction time before the async
61/// runtime is saturated). Returns `None` if `gh` is not found, the command
62/// fails, or the output cannot be parsed into a non-empty model list.
63fn discover_copilot_models_sync() -> Option<Vec<String>> {
64    let output = std::process::Command::new("gh")
65        .args(["copilot", "models"])
66        .stdout(std::process::Stdio::piped())
67        .stderr(std::process::Stdio::null())
68        .output()
69        .ok()?;
70
71    if !output.status.success() {
72        debug!(
73            exit_code = output.status.code().unwrap_or(-1),
74            "gh copilot models failed, falling back to static list"
75        );
76        return None;
77    }
78
79    let stdout = str::from_utf8(&output.stdout).ok()?;
80    let models: Vec<String> = stdout
81        .lines()
82        .map(str::trim)
83        .filter(|line| !line.is_empty())
84        .map(ToOwned::to_owned)
85        .collect();
86
87    if models.is_empty() {
88        debug!("gh copilot models returned empty output, falling back to static list");
89        return None;
90    }
91
92    debug!(
93        count = models.len(),
94        "Discovered available Copilot models via gh copilot models"
95    );
96    Some(models)
97}
98
99/// GitHub Copilot CLI runner
100///
101/// Implements `LlmProvider` by delegating to the `copilot` binary in
102/// non-interactive mode (`-p`). Copilot CLI outputs plain text (no JSON
103/// structure), so the raw stdout is captured as the response content.
104/// System messages are embedded into the user prompt since Copilot CLI
105/// has no `--system-prompt` flag.
106pub struct CopilotRunner {
107    config: RunnerConfig,
108    default_model: String,
109    available_models: Vec<String>,
110}
111
112impl CopilotRunner {
113    /// Create a new Copilot CLI runner with the given configuration.
114    ///
115    /// Attempts to discover available models by running `gh copilot models`.
116    /// Falls back to a static list if discovery fails.
117    #[must_use]
118    pub fn new(config: RunnerConfig) -> Self {
119        let default_model = config
120            .model
121            .clone()
122            .unwrap_or_else(|| DEFAULT_MODEL.to_owned());
123        let available_models = discover_copilot_models_sync()
124            .unwrap_or_else(|| FALLBACK_MODELS.iter().map(|s| (*s).to_owned()).collect());
125        Self {
126            config,
127            default_model,
128            available_models,
129        }
130    }
131
132    /// Build the base command with common arguments
133    fn build_command(&self, prompt: &str, silent: bool) -> Command {
134        let mut cmd = Command::new(&self.config.binary_path);
135
136        // Non-interactive prompt mode
137        cmd.args(["-p", prompt]);
138
139        let model = self
140            .config
141            .model
142            .as_deref()
143            .unwrap_or_else(|| self.default_model());
144        cmd.args(["--model", model]);
145
146        // Required for non-interactive mode
147        cmd.arg("--allow-all-tools");
148
149        // Disable MCP servers to force text-based tool catalog usage
150        cmd.arg("--disable-builtin-mcps");
151
152        // Prevent reading project AGENTS.md instructions
153        cmd.arg("--no-custom-instructions");
154
155        // Autonomous mode — no interactive prompts
156        cmd.arg("--no-ask-user");
157
158        // Clean text output
159        cmd.arg("--no-color");
160
161        if silent {
162            // Output only the agent response (no stats footer)
163            cmd.arg("-s");
164        }
165
166        for arg in &self.config.extra_args {
167            cmd.arg(arg);
168        }
169
170        if let Ok(policy) = build_policy(
171            self.config.working_directory.as_deref(),
172            &self.config.allowed_env_keys,
173        ) {
174            apply_sandbox(&mut cmd, &policy);
175        }
176
177        cmd
178    }
179
180    /// Parse plain-text output into a `ChatResponse`
181    fn parse_response(raw: &[u8]) -> Result<ChatResponse, RunnerError> {
182        let content = str::from_utf8(raw)
183            .map_err(|e| {
184                RunnerError::internal(format!("Copilot CLI output is not valid UTF-8: {e}"))
185            })?
186            .trim()
187            .to_owned();
188
189        Ok(ChatResponse {
190            content,
191            model: "copilot".to_owned(),
192            usage: None,
193            finish_reason: Some("stop".to_owned()),
194            warnings: None,
195        })
196    }
197}
198
199#[async_trait]
200impl LlmProvider for CopilotRunner {
201    fn name(&self) -> &'static str {
202        "copilot"
203    }
204
205    fn display_name(&self) -> &'static str {
206        "GitHub Copilot CLI"
207    }
208
209    fn capabilities(&self) -> LlmCapabilities {
210        // Copilot CLI has no --system-prompt flag; system messages are
211        // embedded into the prompt via build_prompt(). Streaming is
212        // supported by reading stdout line by line.
213        LlmCapabilities::STREAMING
214    }
215
216    fn default_model(&self) -> &str {
217        &self.default_model
218    }
219
220    fn available_models(&self) -> &[String] {
221        &self.available_models
222    }
223
224    #[instrument(skip_all, fields(runner = "copilot"))]
225    async fn complete(&self, request: &ChatRequest) -> Result<ChatResponse, RunnerError> {
226        let prompt = build_prompt(&request.messages);
227        let mut cmd = self.build_command(&prompt, true);
228
229        let output = run_cli_command(&mut cmd, self.config.timeout, MAX_OUTPUT_BYTES).await?;
230
231        if output.exit_code != 0 {
232            let stderr = String::from_utf8_lossy(&output.stderr);
233            let stdout = String::from_utf8_lossy(&output.stdout);
234            warn!(
235                exit_code = output.exit_code,
236                stdout_len = output.stdout.len(),
237                stderr_len = output.stderr.len(),
238                stdout_preview = %stdout.chars().take(500).collect::<String>(),
239                stderr_preview = %stderr.chars().take(500).collect::<String>(),
240                "Copilot CLI failed"
241            );
242            let detail = if stderr.is_empty() { &stdout } else { &stderr };
243            return Err(RunnerError::external_service(
244                "copilot",
245                format!("copilot exited with code {}: {detail}", output.exit_code),
246            ));
247        }
248
249        Self::parse_response(&output.stdout)
250    }
251
252    #[instrument(skip_all, fields(runner = "copilot"))]
253    async fn complete_stream(&self, request: &ChatRequest) -> Result<ChatStream, RunnerError> {
254        let prompt = build_prompt(&request.messages);
255        let mut cmd = self.build_command(&prompt, true);
256
257        // Enable streaming
258        cmd.args(["--stream", "on"]);
259
260        cmd.stdout(Stdio::piped());
261        cmd.stderr(Stdio::piped());
262
263        let mut child = cmd.spawn().map_err(|e| {
264            RunnerError::internal(format!("Failed to spawn copilot for streaming: {e}"))
265        })?;
266
267        let stdout = child.stdout.take().ok_or_else(|| {
268            RunnerError::internal("Failed to capture copilot stdout for streaming")
269        })?;
270
271        let stderr_task = tokio::spawn(read_stderr_capped(
272            child.stderr.take(),
273            MAX_STREAMING_STDERR_BYTES,
274        ));
275
276        let reader = BufReader::new(stdout);
277        let lines = LinesStream::new(reader.lines());
278
279        let stream = lines.map(move |line_result: Result<String, io::Error>| {
280            let line = line_result
281                .map_err(|e| RunnerError::internal(format!("Error reading copilot stream: {e}")))?;
282
283            Ok(StreamChunk {
284                delta: line,
285                is_final: false,
286                finish_reason: None,
287            })
288        });
289
290        Ok(Box::pin(GuardedStream::new(stream, child, stderr_task)))
291    }
292
293    async fn health_check(&self) -> Result<bool, RunnerError> {
294        let mut cmd = Command::new(&self.config.binary_path);
295        cmd.arg("--version");
296
297        let output =
298            run_cli_command(&mut cmd, HEALTH_CHECK_TIMEOUT, HEALTH_CHECK_MAX_OUTPUT).await?;
299
300        if output.exit_code == 0 {
301            debug!("Copilot CLI health check passed");
302            Ok(true)
303        } else {
304            warn!(
305                exit_code = output.exit_code,
306                "Copilot CLI health check failed"
307            );
308            Ok(false)
309        }
310    }
311
312    fn as_any(&self) -> &dyn Any {
313        self
314    }
315}