use crate::core::adapters::ai::KandilAI;
use crate::utils::cost_tracking::CostTracker;
use anyhow::Result;
use std::sync::Arc;
pub struct TrackedAI {
pub ai: Arc<KandilAI>,
cost_tracker: Arc<CostTracker>,
}
impl TrackedAI {
pub fn new(ai: Arc<KandilAI>, cost_tracker: Arc<CostTracker>) -> Self {
Self { ai, cost_tracker }
}
pub async fn chat(&self, message: &str) -> Result<String> {
let response = self.ai.chat(message).await?;
let provider_str = match self.ai.provider {
crate::core::adapters::ai::AIProvider::Ollama => "ollama",
crate::core::adapters::ai::AIProvider::Claude => "claude",
crate::core::adapters::ai::AIProvider::Qwen => "qwen",
crate::core::adapters::ai::AIProvider::OpenAI => "openai",
crate::core::adapters::ai::AIProvider::LmStudio => "lmstudio",
crate::core::adapters::ai::AIProvider::Gpt4All => "gpt4all",
crate::core::adapters::ai::AIProvider::FoundryLocal => "foundry",
};
self.cost_tracker.record_usage(
provider_str,
&self.ai.model,
message.len() as u32, response.len() as u32, );
Ok(response)
}
pub async fn chat_with_context(
&self,
message: &str,
workspace_path: Option<&str>,
) -> Result<String> {
let response = self.ai.chat_with_context(message, workspace_path).await?;
let provider_str = match self.ai.provider {
crate::core::adapters::ai::AIProvider::Ollama => "ollama",
crate::core::adapters::ai::AIProvider::Claude => "claude",
crate::core::adapters::ai::AIProvider::Qwen => "qwen",
crate::core::adapters::ai::AIProvider::OpenAI => "openai",
crate::core::adapters::ai::AIProvider::LmStudio => "lmstudio",
crate::core::adapters::ai::AIProvider::Gpt4All => "gpt4all",
crate::core::adapters::ai::AIProvider::FoundryLocal => "foundry",
};
let enhanced_message = if let Some(path) = workspace_path {
format!("Context from your project:\nFile: example.rs\nContent: example content\n\nUser Query: {}", message)
} else {
message.to_string()
};
self.cost_tracker.record_usage(
provider_str,
&self.ai.model,
enhanced_message.len() as u32, response.len() as u32, );
Ok(response)
}
pub fn get_provider(&self) -> String {
match self.ai.provider {
crate::core::adapters::ai::AIProvider::Ollama => "ollama".to_string(),
crate::core::adapters::ai::AIProvider::Claude => "claude".to_string(),
crate::core::adapters::ai::AIProvider::Qwen => "qwen".to_string(),
crate::core::adapters::ai::AIProvider::OpenAI => "openai".to_string(),
crate::core::adapters::ai::AIProvider::LmStudio => "lmstudio".to_string(),
crate::core::adapters::ai::AIProvider::Gpt4All => "gpt4all".to_string(),
crate::core::adapters::ai::AIProvider::FoundryLocal => "foundry".to_string(),
}
}
pub fn get_model(&self) -> &str {
&self.ai.model
}
}