Skip to main content

tofu/
tofu.rs

1//! Tofu - A command-line tool for interacting with LLMs
2//!
3//! This library provides the core functionality for the Tofu CLI tool.
4
5#![forbid(unsafe_code)]
6#![warn(missing_docs)]
7
8use colored::Colorize;
9use dialoguer::Editor;
10use dialoguer::Input;
11use home::home_dir;
12use indicatif::{ProgressBar, ProgressStyle};
13use reqwest::Client;
14use serde::{Deserialize, Serialize};
15use std::error::Error;
16use std::fs;
17use std::io::Write;
18use std::path::PathBuf;
19
20mod theme;
21use theme::TofuTheme;
22
23/// Configuration loaded from config file
24#[derive(Debug, Serialize, Deserialize, Clone)]
25pub struct ConfigFile {
26    /// The LLM provider to use
27    pub provider: String,
28    /// The model to use
29    pub model: String,
30    /// Whether to JSON stream the response
31    #[serde(skip_serializing_if = "Option::is_none")]
32    pub stream: Option<bool>,
33    /// The system prompt to use
34    #[serde(skip_serializing_if = "Option::is_none")]
35    pub system_prompt: Option<String>,
36}
37
38/// Message in the conversation history
39#[derive(Debug, Clone, Serialize, Deserialize)]
40struct Message {
41    role: String,
42    content: String,
43}
44
45/// Configuration loaded from keys file
46#[derive(Debug, Serialize, Deserialize)]
47pub struct KeysFile {
48    /// The Google API key
49    pub google: Option<String>,
50    /// The OpenAI API key
51    pub openai: Option<String>,
52    /// The Anthropic API key
53    pub anthropic: Option<String>,
54}
55
56/// The main configuration for the Tofu application.
57#[derive(Debug)]
58pub struct Config {
59    /// Whether to enable verbose output
60    pub verbose: bool,
61    /// Whether to enable interactive mode
62    pub interactive: bool,
63    /// Optional message string
64    pub message: Option<String>,
65    /// Whether to JSON stream the response
66    pub stream: Option<bool>,
67    /// Configuration loaded from file
68    pub file: Option<ConfigFile>,
69}
70
71/// Runs the Tofu application with the given configuration.
72/// # Arguments
73/// * `config` - The configuration for the application
74/// # Returns
75/// Returns `Ok(())` on success, or an error if something went wrong.
76/// Gets the path to the config file.
77fn get_config_path() -> Result<PathBuf, Box<dyn Error>> {
78    let config_dir = if cfg!(windows) {
79        dirs::config_dir()
80            .ok_or("Could not determine config directory")?
81            .join("tofu")
82    } else {
83        home_dir()
84            .ok_or("Could not determine home directory")?
85            .join(".tofu")
86    };
87
88    // Create config directory if it doesn't exist
89    if !config_dir.exists() {
90        std::fs::create_dir_all(&config_dir)?;
91    }
92
93    Ok(config_dir.join("config.json"))
94}
95
96fn get_keys_path() -> Result<PathBuf, Box<dyn Error>> {
97    let config_dir = if cfg!(windows) {
98        dirs::config_dir()
99            .ok_or("Could not determine config directory")?
100            .join("tofu")
101    } else {
102        home_dir()
103            .ok_or("Could not determine home directory")?
104            .join(".tofu")
105    };
106
107    // Create config directory if it doesn't exist
108    if !config_dir.exists() {
109        std::fs::create_dir_all(&config_dir)?;
110    }
111
112    Ok(config_dir.join("keys.json"))
113}
114
115/// Loads the configuration from file.
116pub fn load_config(profile: Option<&str>) -> Result<ConfigFile, Box<dyn Error>> {
117    let config_path = get_config_path()?;
118
119    if !config_path.exists() {
120        // If config file doesn't exist, create it with default values as a multi-profile map
121        let default_config = ConfigFile {
122            provider: String::from("pollinations"),
123            model: String::from("openai"),
124            stream: Some(true),
125            system_prompt: Some(String::from("You are a helpful assistant named Tofu.")),
126        };
127        let gemini_config = ConfigFile {
128            provider: String::from("google"),
129            model: String::from("gemini-2.5-flash"),
130            stream: None,
131            system_prompt: None,
132        };
133        let openai_config = ConfigFile {
134            provider: String::from("openai"),
135            model: String::from("gpt-5-mini"),
136            stream: None,
137            system_prompt: None,
138        };
139        let anthropic_config = ConfigFile {
140            provider: String::from("anthropic"),
141            model: String::from("claude-sonnet-4-6"),
142            stream: None,
143            system_prompt: None,
144        };
145        let ollama_config = ConfigFile {
146            provider: String::from("ollama"),
147            model: String::from("llama3"),
148            stream: None,
149            system_prompt: None,
150        };
151        let profiles_json = serde_json::json!({
152            "default": &default_config,
153            "gemini": &gemini_config,
154            "openai": &openai_config,
155            "anthropic": &anthropic_config,
156            "ollama": &ollama_config
157        });
158        let config_json = serde_json::to_string_pretty(&profiles_json)?;
159        std::fs::write(&config_path, config_json)?;
160        return Ok(default_config);
161    }
162
163    let config_content = fs::read_to_string(&config_path)?;
164
165    // Try to parse as either legacy single-profile or multi-profile config
166    let root_value: serde_json::Value = serde_json::from_str(&config_content)
167        .map_err(|e| format!("Failed to parse config file: {}", e))?;
168
169    if let Some(obj) = root_value.as_object() {
170        // region deprecated
171        // remove: upon 1.0.0 release
172        // reason: legacy config files were only used in very early beta versions
173        let looks_like_legacy = obj.contains_key("provider")
174            || obj.contains_key("model")
175            || obj.contains_key("stream")
176            || obj.contains_key("system_prompt");
177
178        if looks_like_legacy {
179            // Legacy single-profile config
180            println!("WARNING: legacy config is deprecated");
181            let cfg: ConfigFile = serde_json::from_value(root_value)
182                .map_err(|e| format!("Failed to parse legacy config: {}", e))?;
183            if cfg.provider.is_empty() || cfg.model.is_empty() {
184                return Err("Invalid config: provider and model must not be empty".into());
185            }
186            return Ok(cfg);
187        }
188        //endregion deprecated
189
190        // Multi-profile config
191        let (selected_name, selected_value) = if let Some(name) = profile {
192            match obj.get(name) {
193                Some(v) => (name.to_string(), v.clone()),
194                None => {
195                    let available = obj.keys().cloned().collect::<Vec<_>>().join(", ");
196                    return Err(
197                        format!("Profile '{}' not found. Available: {}", name, available).into(),
198                    );
199                }
200            }
201        } else {
202            if let Some(v) = obj.get("default") {
203                (String::from("default"), v.clone())
204            } else {
205                match obj.iter().next() {
206                    Some((k, v)) => (k.clone(), v.clone()),
207                    None => return Err("Config file contains no profiles".into()),
208                }
209            }
210        };
211
212        let mut cfg: ConfigFile = serde_json::from_value(selected_value).map_err(|e| {
213            format!(
214                "Failed to parse selected profile '{}': {}",
215                selected_name, e
216            )
217        })?;
218
219        // If selected profile is not the default, fall back to default for None values
220        if selected_name != "default" {
221            if let Some(default_value) = obj.get("default") {
222                let default_cfg: ConfigFile = serde_json::from_value(default_value.clone())
223                    .map_err(|e| format!("Failed to parse default profile: {}", e))?;
224                if cfg.stream.is_none() {
225                    cfg.stream = default_cfg.stream;
226                }
227                if cfg.system_prompt.is_none() {
228                    cfg.system_prompt = default_cfg.system_prompt;
229                }
230            }
231        }
232
233        if cfg.provider.is_empty() || cfg.model.is_empty() {
234            return Err("Invalid config: provider and model must not be empty".into());
235        }
236        return Ok(cfg);
237    }
238
239    Err("Invalid config: root must be a JSON object".into())
240}
241
242/// Loads the keys file from the default location.
243pub fn load_keys() -> Result<KeysFile, Box<dyn Error>> {
244    let keys_path = get_keys_path()?;
245
246    if !keys_path.exists() {
247        // If keys file doesn't exist, create it with default values as a multi-profile map
248        let default_keys = serde_json::json!({
249            "google": "",
250            "openai": "",
251            "anthropic": ""
252        });
253
254        let keys_json = serde_json::to_string_pretty(&default_keys)?;
255        std::fs::write(&keys_path, keys_json)?;
256        return Ok(KeysFile {
257            google: None,
258            openai: None,
259            anthropic: None,
260        });
261    }
262
263    let keys_content = fs::read_to_string(&keys_path)?;
264    let keys_json: serde_json::Value = serde_json::from_str(&keys_content)?;
265    let keys: KeysFile = serde_json::from_value(keys_json)?;
266
267    return Ok(keys);
268}
269
270/// Opens the config file in the default editor.
271///
272/// # Returns
273/// Returns `Ok(())` on success, or an error if something went wrong.
274pub fn open_config() -> Result<(), Box<dyn Error>> {
275    println!("Opening config file...");
276    let config_path = get_config_path()?;
277
278    // Ensure config file exists by trying to load it or create a default one
279    if let Err(e) = load_config(None) {
280        eprintln!("Warning: {}", e);
281        eprintln!("Opening editor to fix the config file...");
282    }
283
284    // Open the config file in the default editor
285    let editor = std::env::var("EDITOR").unwrap_or_else(|_| {
286        if cfg!(windows) {
287            String::from("notepad")
288        } else {
289            String::from("nano")
290        }
291    });
292
293    let status = std::process::Command::new(editor)
294        .arg(&config_path)
295        .status()?;
296
297    if !status.success() {
298        return Err(format!("Editor exited with status: {}", status).into());
299    }
300
301    // Try to load the config after editing, but don't fail if it's still invalid
302    if let Err(e) = load_config(None) {
303        eprintln!("Warning: The config file is still invalid: {}", e);
304        eprintln!("Please fix the config file and try again.");
305    }
306
307    Ok(())
308}
309
310/// Gets the currently active profile name.
311///
312/// # Arguments
313/// * `current_config` - The currently loaded configuration to compare against
314/// # Returns
315/// Returns the name of the active profile as a String, or an error if something went wrong.
316fn get_active_profile(current_config: &ConfigFile) -> Result<String, Box<dyn Error>> {
317    let config_path = get_config_path()?;
318    let config_content = fs::read_to_string(&config_path)?;
319
320    let root_value: serde_json::Value = serde_json::from_str(&config_content)
321        .map_err(|e| format!("Failed to parse config file: {}", e))?;
322
323    if let Some(obj) = root_value.as_object() {
324        // Check each profile to see which one matches the current config
325        for (profile_name, profile_value) in obj.iter() {
326            if let Ok(profile_config) = serde_json::from_value::<ConfigFile>(profile_value.clone())
327            {
328                if profile_config.provider == current_config.provider
329                    && profile_config.model == current_config.model
330                {
331                    return Ok(profile_name.clone());
332                }
333            }
334        }
335
336        // If no exact match found, return "default" as fallback
337        Ok("default".to_string())
338    } else {
339        Err("Invalid config format - expected JSON object".into())
340    }
341}
342
343/// Lists all available profiles from the config file.
344///
345/// # Returns
346/// Returns `Ok(())` on success, or an error if something went wrong.
347fn list_profiles() -> Result<(), Box<dyn Error>> {
348    let path = get_config_path()?;
349    let config = fs::read_to_string(&path)?;
350
351    // Parse the JSON config to get root keys (profile names)
352    let root_value: serde_json::Value =
353        serde_json::from_str(&config).map_err(|e| format!("Failed to parse config file: {}", e))?;
354
355    println!("{}", "Available profiles:".bold());
356    if let Some(obj) = root_value.as_object() {
357        if obj.is_empty() {
358            println!("  No profiles found");
359        } else {
360            for key in obj.keys() {
361                println!("  {}", key);
362            }
363        }
364    } else {
365        eprintln!("  Invalid config format - expected JSON object");
366    }
367    Ok(())
368}
369
370/// Opens the keys file in the default editor.
371pub fn open_keys() -> Result<(), Box<dyn Error>> {
372    println!("Opening keys file...");
373    let config_path = get_keys_path()?;
374
375    // Ensure keys file exists by trying to load it or create a default one
376    if let Err(e) = load_keys() {
377        eprintln!("Warning: {}", e);
378        eprintln!("Opening editor to fix the keys file...");
379    }
380
381    // Open the file in the default editor
382    let editor = std::env::var("EDITOR").unwrap_or_else(|_| {
383        if cfg!(windows) {
384            String::from("notepad")
385        } else {
386            String::from("nano")
387        }
388    });
389
390    let status = std::process::Command::new(editor)
391        .arg(&config_path)
392        .status()?;
393
394    if !status.success() {
395        return Err(format!("Editor exited with status: {}", status).into());
396    }
397
398    Ok(())
399}
400
401/// Runs the Tofu application with the given configuration.
402/// # Arguments
403/// * `config` - The configuration for the application
404/// # Returns
405/// Returns `Ok(())` on success, or an error if something went wrong.
406/// Runs the Tofu application with the given configuration asynchronously.
407/// # Arguments
408/// * `config` - The configuration for the application
409/// # Returns
410/// Returns `Ok(())` on success, or an error if something went wrong.
411pub async fn run(config: Config) -> Result<(), Box<dyn Error>> {
412    if config.verbose {
413        println!(
414            "Tofu v{} initialized (verbose mode)",
415            env!("CARGO_PKG_VERSION")
416        );
417        println!("{:#?}", config);
418    }
419
420    if config.interactive {
421        run_interactive(config).await
422    } else {
423        let message = config.message.as_ref().unwrap_or(&String::new()).clone();
424        send_message(&message, &config, vec![]).await?;
425        Ok(())
426    }
427}
428
429async fn run_interactive(mut config: Config) -> Result<(), Box<dyn Error>> {
430    let mut conversation_history = vec![];
431
432    println!(
433        "{}",
434        format!("Tofu {}", env!("CARGO_PKG_VERSION")).bold().blue()
435    );
436    println!(
437        "{}",
438        "Ctrl+C or /q to exit • /? for commands".italic().dimmed()
439    );
440
441    loop {
442        let input: Result<String, _> = Input::with_theme(&TofuTheme::default()).interact_text();
443
444        match input {
445            Ok(mut line) => {
446                line = line.trim().to_string();
447                if line.is_empty() {
448                    continue;
449                }
450
451                // Check for commands starting with /
452                if line.starts_with('/') || line.starts_with("'''") || line.starts_with("\"\"\"") {
453                    let (should_exit, new_config, message_to_send) =
454                        handle_command(line.as_str(), &mut conversation_history, &config)?;
455                    if let Some(new_file_config) = new_config {
456                        config.file = Some(new_file_config);
457                    }
458                    if should_exit {
459                        break; // Exit the loop if command returns true
460                    }
461                    if let Some(message) = message_to_send {
462                        // Process the multiline message like a regular input
463                        line = message;
464                    } else {
465                        continue; // Skip sending to model for commands that don't return a message
466                    }
467                }
468
469                // Add user message to conversation history
470                conversation_history.push(Message {
471                    role: "user".to_string(),
472                    content: line.to_string(),
473                });
474
475                // If length > 100 messages, remove the oldest message (keep system prompt)
476                if conversation_history.len() > 100 {
477                    conversation_history.remove(1);
478                }
479
480                // Send message and get response
481                match send_message(line.as_str(), &config, conversation_history.clone()).await {
482                    Ok(response_content) => {
483                        // Add assistant response to conversation history
484                        conversation_history.push(Message {
485                            role: "assistant".to_string(),
486                            content: response_content.clone(),
487                        });
488                    }
489                    Err(e) => {
490                        if e.to_string().contains("localhost:11434") {
491                            eprintln!("{}", "Error: Ollama server not running".red());
492                        } else {
493                            eprintln!("{}", format!("Error: {}", e).red());
494                        }
495                        // Remove the failed message from history
496                        if !conversation_history.is_empty() {
497                            conversation_history.pop();
498                        }
499                        continue;
500                    }
501                }
502            }
503            Err(e) => {
504                eprintln!("{}", format!("Error reading input: {}", e).red());
505                break;
506            }
507        }
508    }
509
510    Ok(())
511}
512
513/// Handles special commands starting with /
514/// Returns a tuple: (should_exit, new_config_option, message_to_send)
515fn handle_command(
516    command: &str,
517    conversation_history: &mut Vec<Message>,
518    config: &Config,
519) -> Result<(bool, Option<ConfigFile>, Option<String>), Box<dyn Error>> {
520    match command {
521        "/exit" | "/quit" | "/q" | "/bye" => Ok((true, None, None)),
522        "/help" | "/h" | "/?" | "/commands" | "/cmds" => {
523            println!("{}", "Available commands:".bold());
524            println!("  /help, /            - Show this help message");
525            println!("  /exit, /quit, /q    - Exit the program");
526            println!(
527                "  /profile [name]     - Switch to a different config profile. If name not provided, lists profiles"
528            );
529            println!(
530                "  /model <name>, /m <name> - Switch to a different model (without changing profile)"
531            );
532            println!("  /listprofiles, /lsp - List all available profiles");
533            println!("  /clear              - Clear conversation history");
534            println!("  /keys               - Open the API keys file");
535            println!("  /show, /info        - Display profile & model info");
536            println!("  /multiline, /ml, // - Enter multiline input mode");
537            println!("* Most Ollama commands also work, such as \"\"\" and /bye.");
538            Ok((false, None, None))
539        }
540        "/clear" => {
541            conversation_history.clear();
542            println!("{}", "Conversation history cleared.".blue());
543            Ok((false, None, None))
544        }
545        "/keys" | "/key" | "/apikeys" | "/apikey" => {
546            open_keys()?;
547            Ok((false, None, None))
548        }
549        cmd if cmd.starts_with("/profile") || cmd.starts_with("/p") => {
550            let parts: Vec<&str> = command.split_whitespace().collect();
551            if parts.len() != 2 {
552                if let Err(e) = list_profiles() {
553                    eprintln!("{}", format!("Error listing profiles: {}", e).red());
554                } else {
555                    println!("Usage: /profile [profile_name]");
556                }
557                return Ok((false, None, None));
558            }
559
560            let profile_name = parts[1];
561            match load_config(Some(profile_name)) {
562                Ok(new_config) => {
563                    println!(
564                        "{}",
565                        format!("Switched to profile '{}'", profile_name).green()
566                    );
567                    Ok((false, Some(new_config), None))
568                }
569                Err(e) => {
570                    eprintln!(
571                        "{}",
572                        format!("Failed to switch to profile '{}': {}", profile_name, e).red()
573                    );
574                    Ok((false, None, None))
575                }
576            }
577        }
578        cmd if cmd.starts_with("/model") || cmd.starts_with("/m") => {
579            let parts: Vec<&str> = command.split_whitespace().collect();
580            if parts.len() != 2 {
581                eprintln!("{}", "Usage: /model <model_name>".red());
582                return Ok((false, None, None));
583            }
584
585            let new_model = parts[1];
586
587            // Get current config and create a new one with updated model
588            if let Some(mut current_config) = config.file.clone() {
589                current_config.model = new_model.to_string();
590                println!("{}", format!("Switched to model '{}'", new_model).green());
591                Ok((false, Some(current_config), None))
592            } else {
593                eprintln!("{}", "No configuration loaded".red());
594                Ok((false, None, None))
595            }
596        }
597        "/show" | "/info" | "/s" | "/i" => {
598            match config.file.as_ref() {
599                Some(current_config) => match get_active_profile(current_config) {
600                    Ok(profile) => println!("Profile: {}", profile),
601                    Err(_) => println!("Profile: unknown"),
602                },
603                None => println!("Profile: unknown (no config loaded)"),
604            }
605            if let Some(current_config) = config.file.as_ref() {
606                println!("Model: {}", current_config.model);
607            }
608            Ok((false, None, None))
609        }
610        "/listprofiles" | "/lsp" => {
611            if let Err(e) = list_profiles() {
612                eprintln!("{}", format!("Error listing profiles: {}", e).red());
613            }
614            Ok((false, None, None))
615        }
616        "/multiline" | "/ml" | "//" | "'''" | "\"\"\"" => {
617            if let Some(multiline_input) = Editor::new().edit("").unwrap() {
618                if !multiline_input.trim().is_empty() {
619                    println!("{}\n", multiline_input);
620                    // Return the multiline input as a message to be processed
621                    return Ok((false, None, Some(multiline_input)));
622                } else {
623                    println!("{}", "Empty input - cancelled".yellow());
624                }
625            } else {
626                eprintln!("{}", "Cancelled".red());
627            }
628            Ok((false, None, None))
629        }
630        _ => {
631            eprintln!(
632                "{}",
633                format!(
634                    "Unknown command: {}. Type /help for available commands.",
635                    command
636                )
637                .red()
638            );
639            Ok((false, None, None))
640        }
641    }
642}
643
644async fn send_message(
645    _message: &str,
646    config: &Config,
647    history: Vec<Message>,
648) -> Result<String, Box<dyn Error>> {
649    let spinner = ProgressBar::new_spinner();
650    spinner.enable_steady_tick(std::time::Duration::from_millis(100));
651    spinner.set_style(
652        ProgressStyle::with_template("{spinner:.blue} {msg} {elapsed:.bold}")
653            .unwrap()
654            .tick_chars("⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏"),
655    );
656    spinner.set_message("Thinking...");
657
658    // Build messages array with history
659    let mut messages = vec![];
660
661    if let Some(file) = &config.file {
662        if let Some(system_prompt) = &file.system_prompt {
663            messages.push(serde_json::json!({ "role": "system", "content": system_prompt }));
664        }
665    }
666
667    for msg in history {
668        messages.push(serde_json::json!({ "role": msg.role, "content": msg.content }));
669    }
670
671    let body = if let Some(file) = &config.file {
672        serde_json::json!({
673            "model": file.model,
674            "messages": messages,
675            "stream": config.stream,
676        })
677    } else {
678        return Err("No configuration file found".to_string().into());
679    };
680
681    // Send the message
682    let client = Client::new();
683
684    if config.verbose {
685        dbg!(&body);
686    }
687
688    let (url, auth_header) = if let Some(file) = &config.file {
689        match file.provider.as_str() {
690            "pollinations" => (
691                "https://gen.pollinations.ai/v1/chat/completions",
692                Some("Bearer pk_y1jVZsGNdFYuc12n".to_string()), // NOTE: public key; in the future, other Pollinations key options will be available.
693            ),
694            "google" => (
695                "https://generativelanguage.googleapis.com/v1beta/openai/chat/completions",
696                Some(format!("Bearer {}", load_keys().unwrap().google.unwrap())),
697            ),
698            "openai" => (
699                "https://api.openai.com/v1/chat/completions",
700                Some(format!("Bearer {}", load_keys().unwrap().openai.unwrap())),
701            ),
702            "anthropic" => (
703                "https://api.anthropic.com/v1/chat/completions",
704                Some(format!(
705                    "Bearer {}",
706                    load_keys().unwrap().anthropic.unwrap()
707                )),
708            ),
709            "ollama" => (
710                "http://localhost:11434/v1/chat/completions",
711                None, // Ollama typically doesn't require authentication for local setup
712            ),
713            provider => {
714                return Err(format!("Unsupported provider: {}", provider).into());
715            }
716        }
717    } else {
718        return Err("No configuration file found".to_string().into());
719    };
720
721    let mut request = client
722        .post(url)
723        .header("Content-Type", "application/json")
724        .body(serde_json::to_string(&body)?);
725
726    if let Some(auth) = auth_header {
727        request = request.header("Authorization", auth);
728    }
729
730    if config
731        .file
732        .as_ref()
733        .map(|f| f.provider == "anthropic")
734        .unwrap_or(false)
735    {
736        request = request.header("anthropic-version", "2023-06-01");
737    }
738
739    let mut response = request.send().await?;
740
741    if !response.status().is_success() {
742        let error_msg = format!("Request failed with status: {}", response.status());
743
744        // Provide more helpful error message for Ollama connection issues
745        if config
746            .file
747            .as_ref()
748            .map(|f| f.provider == "ollama")
749            .unwrap_or(false)
750        {
751            let status = response.status().as_u16();
752            if status == 101 || status == 0 {
753                return Err("Ollama server is not running. Please make sure Ollama is installed and running on localhost:11434. You can install Ollama from https://ollama.ai/".into());
754            }
755        }
756
757        return Err(error_msg.into());
758    }
759
760    spinner.finish_and_clear();
761
762    if config.stream == Some(true) {
763        spinner.finish_and_clear();
764        let mut buffer = String::new();
765        let mut response_content = String::new();
766        let mut done = false;
767        while let Some(chunk) = response.chunk().await? {
768            let chunk_str = String::from_utf8_lossy(&chunk);
769            buffer.push_str(&chunk_str);
770
771            loop {
772                if let Some(newline_idx) = buffer.find('\n') {
773                    let line = buffer[..newline_idx].trim_end_matches('\r').to_string();
774                    buffer.drain(..=newline_idx);
775                    if line.is_empty() {
776                        continue;
777                    }
778
779                    if line.starts_with("data: ") {
780                        let payload = line[6..].trim();
781                        if payload == "[DONE]" {
782                            done = true;
783                            println!(); // Fixes issue on Linux where % sign shows at end
784                            break;
785                        } else {
786                            if let Ok(v) = serde_json::from_str::<serde_json::Value>(payload) {
787                                if let Some(choices) = v.get("choices").and_then(|c| c.as_array()) {
788                                    for choice in choices {
789                                        if let Some(delta) = choice.get("delta") {
790                                            if let Some(content) =
791                                                delta.get("content").and_then(|c| c.as_str())
792                                            {
793                                                print!("{}", content);
794                                                let _ = std::io::stdout().flush();
795                                                response_content.push_str(content);
796                                            }
797                                        } else if let Some(content) = choice
798                                            .get("message")
799                                            .and_then(|m| m.get("content"))
800                                            .and_then(|c| c.as_str())
801                                        {
802                                            print!("{}", content);
803                                            let _ = std::io::stdout().flush();
804                                            response_content.push_str(content);
805                                        }
806                                    }
807                                }
808                            }
809                        }
810                    } else if config.verbose {
811                        eprintln!("{}", line);
812                    }
813                } else {
814                    break;
815                }
816            }
817
818            if done {
819                break;
820            }
821        }
822        Ok(response_content)
823    } else {
824        let response_text = response.text().await?;
825        let json: serde_json::Value = serde_json::from_str(&response_text)?;
826        let content = json["choices"][0]["message"]["content"]
827            .as_str()
828            .unwrap_or("")
829            .replace("\\n", "\n")
830            .trim_matches('"')
831            .to_string();
832        spinner.finish_and_clear();
833        println!("\n{}\n", content);
834        Ok(content)
835    }
836}
837
838#[cfg(test)]
839mod tests {
840    use super::*;
841
842    #[tokio::test]
843    async fn test_run() {
844        let config = Config {
845            verbose: false,
846            interactive: false,
847            message: Some(String::from("Hello, world!")),
848            stream: Some(false),
849            file: Some(ConfigFile {
850                provider: String::from("pollinations"),
851                model: String::from("openai"),
852                stream: Some(false),
853                system_prompt: Some(String::from("You are a helpful assistant named Tofu.")),
854            }),
855        };
856        let result = run(config).await;
857        assert!(result.is_ok());
858    }
859}