1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
//! AI Assistant state management
//!
//! Manages the state of the AI assistant popup including visibility, loading state,
//! responses, and channel handles for communication with the worker thread.
use std::sync::mpsc::{Receiver, Sender};
use super::selection::SelectionState;
use tokio_util::sync::CancellationToken;
// Re-export for backward compatibility
#[allow(unused_imports)]
pub use super::suggestion::{Suggestion, SuggestionType};
// Module declarations
#[path = "ai_state/lifecycle.rs"]
pub(crate) mod lifecycle;
#[path = "ai_state/response.rs"]
mod response;
#[path = "ai_state/suggestions.rs"]
mod suggestions;
// Test module
#[cfg(test)]
#[path = "ai_state_tests.rs"]
mod ai_state_tests;
/// Request messages sent to the AI worker thread
pub enum AiRequest {
/// Query the AI with the given context
Query {
prompt: String,
/// Unique ID for this request, used to filter stale responses
request_id: u64,
/// Cancellation token for aborting the request
cancel_token: CancellationToken,
},
}
/// Response messages received from the AI worker thread
#[derive(Debug)]
pub enum AiResponse {
/// A chunk of streaming text
Chunk {
text: String,
/// Request ID this chunk belongs to
request_id: u64,
},
/// The response is complete
Complete {
/// Request ID this completion belongs to
request_id: u64,
},
/// An error occurred
Error(String),
/// The request was cancelled
Cancelled {
/// Request ID that was cancelled (test assertion metadata)
#[allow(dead_code)]
request_id: u64,
},
}
/// AI Assistant state
pub struct AiState {
/// Whether the AI popup is visible
pub visible: bool,
/// Whether AI features are enabled (from config)
pub enabled: bool,
/// Whether the AI is properly configured (has API key)
pub configured: bool,
/// Name of the AI provider (e.g., "Anthropic", "Bedrock", "OpenAI")
pub provider_name: String,
/// Model name (e.g., "claude-3-5-sonnet-20241022", "gpt-4o-mini")
pub model_name: String,
/// Maximum character length for JSON context samples
pub max_context_length: usize,
/// Whether we're waiting for or receiving a response
pub loading: bool,
/// Current error message (if any)
pub error: Option<String>,
/// Current response text (accumulated from streaming chunks)
pub response: String,
/// Previous response (preserved when starting a new request)
pub previous_response: Option<String>,
/// Channel to send requests to the worker thread
pub request_tx: Option<Sender<AiRequest>>,
/// Channel to receive responses from the worker thread
pub response_rx: Option<Receiver<AiResponse>>,
/// Current request ID, incremented for each new request
/// Used to filter stale responses from previous requests
pub request_id: u64,
/// Hash of the last query text that triggered an AI request
/// Used to detect query changes - query change is the ONLY trigger for new AI requests
pub last_query_hash: Option<u64>,
/// ID of the currently in-flight request, if any
/// Used to track active requests for cancellation
pub in_flight_request_id: Option<u64>,
/// Current cancellation token for the in-flight request
/// Calling cancel() on this immediately aborts the HTTP request
pub current_cancel_token: Option<CancellationToken>,
/// Parsed suggestions from AI response
/// Empty if response couldn't be parsed into structured suggestions
pub suggestions: Vec<Suggestion>,
/// Selection state for suggestion navigation
/// Tracks which suggestion is selected and navigation mode
pub selection: SelectionState,
/// Previous popup height (when suggestions were last rendered)
/// Used to maintain consistent size during loading transitions
pub previous_popup_height: Option<u16>,
}
impl Default for AiState {
fn default() -> Self {
Self::new_with_config(false, false, "AI".to_string(), String::new(), 50_000)
}
}