use async_openai::types::ChatCompletionRequestMessage;
#[derive(Debug, Clone)]
pub enum AiResponse {
Command(String),
NaturalLanguage(String),
}
pub struct ConversationState {
pub(crate) messages: Vec<ChatCompletionRequestMessage>,
}
pub struct ConversationResult {
pub response: AiResponse,
pub conversation: ConversationState,
}