aico/commands/
llm_shared.rs

1use crate::console::{display_cost_summary, is_stdin_terminal, is_stdout_terminal};
2use crate::exceptions::AicoError;
3use crate::models::{DerivedContent, HistoryRecord, Mode, Role};
4use crate::session::Session;
5use chrono::Utc;
6use std::io::{self, Read, Write};
7
8pub async fn run_llm_flow(
9    cli_prompt: Option<String>,
10    model: Option<String>,
11    system_prompt: String,
12    no_history: bool,
13    passthrough: bool,
14    mode: Mode,
15) -> Result<(), AicoError> {
16    let mut stdin_buffer = None;
17    if !is_stdin_terminal() {
18        let mut buffer = String::new();
19        if io::stdin().read_to_string(&mut buffer).is_ok() {
20            let trimmed = buffer.trim();
21            if !trimmed.is_empty() {
22                stdin_buffer = Some(buffer);
23            }
24        }
25    }
26
27    let (prompt_text, piped_content) = match (cli_prompt, stdin_buffer) {
28        (Some(p), Some(s)) => (p, Some(s)),
29        (Some(p), None) => (p, None),
30        (None, Some(s)) => (s, None),
31        (None, None) => {
32            if is_stdin_terminal() {
33                print!("Prompt: ");
34                io::stdout().flush()?;
35                let mut buffer = String::new();
36                io::stdin().read_line(&mut buffer)?;
37                let input = buffer.trim().to_string();
38                if input.is_empty() {
39                    return Err(AicoError::InvalidInput("Prompt cannot be empty.".into()));
40                }
41                (input, None)
42            } else {
43                return Err(AicoError::InvalidInput("Prompt is required.".into()));
44            }
45        }
46    };
47
48    let mut session = Session::load_active()?;
49    session.warn_missing_files();
50    let active_model = model.unwrap_or_else(|| session.view.model.clone());
51
52    let sys_prompt = if system_prompt.is_empty() {
53        crate::consts::DEFAULT_SYSTEM_PROMPT.to_string()
54    } else {
55        system_prompt
56    };
57
58    let config = crate::models::InteractionConfig {
59        mode: mode.clone(),
60        no_history,
61        passthrough,
62        model_override: Some(active_model.clone()),
63    };
64
65    let interaction = crate::llm::executor::execute_interaction(
66        &session,
67        &sys_prompt,
68        &prompt_text,
69        &piped_content,
70        config,
71    )
72    .await?;
73
74    if !is_stdout_terminal() {
75        if passthrough {
76            print!("{}", interaction.content);
77        } else {
78            print!(
79                "{}",
80                crate::console::format_piped_output(
81                    &interaction.unified_diff,
82                    &interaction.content,
83                    &mode
84                )
85            );
86        }
87        let _ = io::stdout().flush();
88    }
89
90    if let Some(usage) = &interaction.token_usage {
91        display_cost_summary(usage, interaction.cost, &session.store, &session.view);
92    }
93
94    let user_msg = HistoryRecord {
95        role: Role::User,
96        content: prompt_text,
97        mode: mode.clone(),
98        timestamp: Utc::now(),
99        passthrough,
100        piped_content,
101        model: None,
102        token_usage: None,
103        cost: None,
104        duration_ms: None,
105        derived: None,
106        edit_of: None,
107    };
108
109    let asst_msg = HistoryRecord {
110        role: Role::Assistant,
111        content: interaction.content,
112        mode: mode.clone(),
113        timestamp: Utc::now(),
114        passthrough,
115        piped_content: None,
116        model: Some(active_model),
117        token_usage: interaction.token_usage,
118        cost: interaction.cost,
119        duration_ms: Some(interaction.duration_ms),
120        derived: Some(DerivedContent {
121            unified_diff: interaction.unified_diff,
122            display_content: interaction.display_items,
123        }),
124        edit_of: None,
125    };
126
127    session.append_pair(user_msg, asst_msg)
128}