mermaid_cli/tui/state/model.rs
1/// Model state management
2///
3/// Handles LLM configuration and identity.
4
5use std::sync::Arc;
6use tokio::sync::RwLock;
7
8use crate::models::Model;
9
10/// Model state - LLM configuration and identity
11pub struct ModelState {
12 pub model: Arc<RwLock<Box<dyn Model>>>,
13 pub model_id: String,
14 pub model_name: String,
15 /// Thinking mode state:
16 /// - Some(true) = model supports thinking, currently enabled
17 /// - Some(false) = model supports thinking, currently disabled
18 /// - None = model does not support thinking (or unknown)
19 pub thinking_enabled: Option<bool>,
20}
21
22impl ModelState {
23 pub fn new(model: Box<dyn Model>, model_id: String) -> Self {
24 let model_name = model.name().to_string();
25 Self {
26 model: Arc::new(RwLock::new(model)),
27 model_id,
28 model_name,
29 // Default: thinking enabled (will be disabled if model doesn't support it)
30 thinking_enabled: Some(true),
31 }
32 }
33
34 /// Get a reference to the model for reading
35 pub fn model_ref(&self) -> &Arc<RwLock<Box<dyn Model>>> {
36 &self.model
37 }
38
39 /// Toggle thinking mode (only if model supports it)
40 /// Returns the new state, or None if model doesn't support thinking
41 pub fn toggle_thinking(&mut self) -> Option<bool> {
42 match self.thinking_enabled {
43 Some(enabled) => {
44 self.thinking_enabled = Some(!enabled);
45 self.thinking_enabled
46 }
47 None => None, // Model doesn't support thinking, can't toggle
48 }
49 }
50
51 /// Mark model as not supporting thinking
52 /// Called when we get "does not support thinking" error from Ollama
53 pub fn disable_thinking_support(&mut self) {
54 self.thinking_enabled = None;
55 }
56
57 /// Check if thinking is currently active
58 pub fn is_thinking_active(&self) -> bool {
59 self.thinking_enabled == Some(true)
60 }
61}