Skip to main content

embacle/
copilot.rs

1// ABOUTME: GitHub Copilot CLI runner implementing the `LlmProvider` trait
2// ABOUTME: Wraps the `copilot` CLI with plain-text output parsing and streaming support
3//
4// SPDX-License-Identifier: Apache-2.0
5// Copyright (c) 2026 dravr.ai
6
7use std::io;
8use std::process::Stdio;
9use std::str;
10
11use crate::cli_common::{CliRunnerBase, MAX_OUTPUT_BYTES};
12use crate::types::{
13    ChatRequest, ChatResponse, ChatStream, LlmCapabilities, LlmProvider, RunnerError, StreamChunk,
14};
15use async_trait::async_trait;
16use tokio::io::{AsyncBufReadExt, BufReader};
17use tokio::process::Command;
18use tokio_stream::wrappers::LinesStream;
19use tokio_stream::StreamExt;
20use tracing::{debug, instrument};
21
22use crate::config::RunnerConfig;
23use crate::process::{read_stderr_capped, run_cli_command};
24use crate::prompt::build_prompt;
25use crate::sandbox::{apply_sandbox, build_policy};
26use crate::stream::{GuardedStream, MAX_STREAMING_STDERR_BYTES};
27
28/// Default model for Copilot CLI
29const DEFAULT_MODEL: &str = "claude-opus-4.6-fast";
30
31/// Fallback model list when `gh copilot models` discovery fails
32const FALLBACK_MODELS: &[&str] = &[
33    "claude-sonnet-4.6",
34    "claude-opus-4.6",
35    "claude-opus-4.6-fast",
36    "claude-opus-4.5",
37    "claude-sonnet-4.5",
38    "claude-haiku-4.5",
39    "claude-sonnet-4",
40    "gemini-3-pro-preview",
41    "gpt-5.4",
42    "gpt-5.3-codex",
43    "gpt-5.2-codex",
44    "gpt-5.2",
45    "gpt-5.1-codex-max",
46    "gpt-5.1-codex",
47    "gpt-5.1",
48    "gpt-5.1-codex-mini",
49    "gpt-5-mini",
50    "gpt-4.1",
51];
52
53/// Discover available Copilot models by running `gh copilot models`.
54///
55/// Returns `None` if `gh` is not found, the command fails, or the output
56/// cannot be parsed into a non-empty model list.
57pub async fn discover_copilot_models() -> Option<Vec<String>> {
58    let output = Command::new("gh")
59        .args(["copilot", "models"])
60        .stdout(Stdio::piped())
61        .stderr(Stdio::null())
62        .output()
63        .await
64        .ok()?;
65
66    if !output.status.success() {
67        debug!(
68            exit_code = output.status.code().unwrap_or(-1),
69            "gh copilot models failed, falling back to static list"
70        );
71        return None;
72    }
73
74    let stdout = str::from_utf8(&output.stdout).ok()?;
75    let models: Vec<String> = stdout
76        .lines()
77        .map(str::trim)
78        .filter(|line| !line.is_empty())
79        .map(ToOwned::to_owned)
80        .collect();
81
82    if models.is_empty() {
83        debug!("gh copilot models returned empty output, falling back to static list");
84        return None;
85    }
86
87    debug!(
88        count = models.len(),
89        "Discovered available Copilot models via gh copilot models"
90    );
91    Some(models)
92}
93
94/// Fallback model list when `gh copilot models` discovery fails.
95///
96/// Used by both `CopilotRunner` and `CopilotHeadlessRunner`.
97pub fn copilot_fallback_models() -> Vec<String> {
98    FALLBACK_MODELS.iter().map(|s| (*s).to_owned()).collect()
99}
100
101/// GitHub Copilot CLI runner
102///
103/// Implements `LlmProvider` by delegating to the `copilot` binary in
104/// non-interactive mode (`-p`). Copilot CLI outputs plain text (no JSON
105/// structure), so the raw stdout is captured as the response content.
106/// System messages are embedded into the user prompt since Copilot CLI
107/// has no `--system-prompt` flag.
108pub struct CopilotRunner {
109    base: CliRunnerBase,
110}
111
112impl CopilotRunner {
113    /// Create a new Copilot CLI runner with the given configuration.
114    ///
115    /// Attempts to discover available models by running `gh copilot models`.
116    /// Falls back to a static list if discovery fails.
117    pub async fn new(config: RunnerConfig) -> Self {
118        let mut base = CliRunnerBase::new(config, DEFAULT_MODEL, FALLBACK_MODELS);
119        if let Some(models) = discover_copilot_models().await {
120            base.available_models = models;
121        }
122        Self { base }
123    }
124
125    /// Build the base command with common arguments
126    fn build_command(&self, prompt: &str, silent: bool) -> Command {
127        let mut cmd = Command::new(&self.base.config.binary_path);
128
129        // Non-interactive prompt mode
130        cmd.args(["-p", prompt]);
131
132        let model = self
133            .base
134            .config
135            .model
136            .as_deref()
137            .unwrap_or_else(|| self.base.default_model());
138        cmd.args(["--model", model]);
139
140        // Required for non-interactive mode
141        cmd.arg("--allow-all-tools");
142
143        // Disable MCP servers to force text-based tool catalog usage
144        cmd.arg("--disable-builtin-mcps");
145
146        // Prevent reading project AGENTS.md instructions
147        cmd.arg("--no-custom-instructions");
148
149        // Autonomous mode — no interactive prompts
150        cmd.arg("--no-ask-user");
151
152        // Clean text output
153        cmd.arg("--no-color");
154
155        if silent {
156            // Output only the agent response (no stats footer)
157            cmd.arg("-s");
158        }
159
160        for arg in &self.base.config.extra_args {
161            cmd.arg(arg);
162        }
163
164        if let Ok(policy) = build_policy(
165            self.base.config.working_directory.as_deref(),
166            &self.base.config.allowed_env_keys,
167        ) {
168            apply_sandbox(&mut cmd, &policy);
169        }
170
171        cmd
172    }
173
174    /// Parse plain-text output into a `ChatResponse`
175    fn parse_response(raw: &[u8]) -> Result<ChatResponse, RunnerError> {
176        let content = str::from_utf8(raw)
177            .map_err(|e| {
178                RunnerError::internal(format!("Copilot CLI output is not valid UTF-8: {e}"))
179            })?
180            .trim()
181            .to_owned();
182
183        Ok(ChatResponse {
184            content,
185            model: "copilot".to_owned(),
186            usage: None,
187            finish_reason: Some("stop".to_owned()),
188            warnings: None,
189            tool_calls: None,
190        })
191    }
192}
193
194#[async_trait]
195impl LlmProvider for CopilotRunner {
196    // Copilot CLI has no --system-prompt flag; system messages are
197    // embedded into the prompt via build_prompt(). Streaming is
198    // supported by reading stdout line by line.
199    crate::delegate_provider_base!("copilot", "GitHub Copilot CLI", LlmCapabilities::STREAMING);
200
201    #[instrument(skip_all, fields(runner = "copilot"))]
202    async fn complete(&self, request: &ChatRequest) -> Result<ChatResponse, RunnerError> {
203        let prompt = build_prompt(&request.messages);
204        let mut cmd = self.build_command(&prompt, true);
205
206        let output = run_cli_command(&mut cmd, self.base.config.timeout, MAX_OUTPUT_BYTES).await?;
207        self.base.check_exit_code(&output, "copilot")?;
208
209        Self::parse_response(&output.stdout)
210    }
211
212    #[instrument(skip_all, fields(runner = "copilot"))]
213    async fn complete_stream(&self, request: &ChatRequest) -> Result<ChatStream, RunnerError> {
214        let prompt = build_prompt(&request.messages);
215        let mut cmd = self.build_command(&prompt, true);
216
217        // Enable streaming
218        cmd.args(["--stream", "on"]);
219
220        cmd.stdout(Stdio::piped());
221        cmd.stderr(Stdio::piped());
222
223        let mut child = cmd.spawn().map_err(|e| {
224            RunnerError::internal(format!("Failed to spawn copilot for streaming: {e}"))
225        })?;
226
227        let stdout = child.stdout.take().ok_or_else(|| {
228            RunnerError::internal("Failed to capture copilot stdout for streaming")
229        })?;
230
231        let stderr_task = tokio::spawn(read_stderr_capped(
232            child.stderr.take(),
233            MAX_STREAMING_STDERR_BYTES,
234        ));
235
236        let reader = BufReader::new(stdout);
237        let lines = LinesStream::new(reader.lines());
238
239        let stream = lines.map(move |line_result: Result<String, io::Error>| {
240            let line = line_result
241                .map_err(|e| RunnerError::internal(format!("Error reading copilot stream: {e}")))?;
242
243            Ok(StreamChunk {
244                delta: line,
245                is_final: false,
246                finish_reason: None,
247            })
248        });
249
250        Ok(Box::pin(GuardedStream::new(stream, child, stderr_task)))
251    }
252}