use crate::app_paths::AppPaths;
use crate::history_protection::HistoryProtection;
use anyhow::Result;
use chrono::{DateTime, Utc};
use fuzzy_matcher::skim::SkimMatcherV2;
use fuzzy_matcher::FuzzyMatcher;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QueryMetadata {
#[serde(default)]
pub tables: Vec<String>, #[serde(default)]
pub select_columns: Vec<String>, #[serde(default)]
pub where_columns: Vec<String>, #[serde(default)]
pub order_by_columns: Vec<String>, #[serde(default)]
pub functions_used: Vec<String>, #[serde(default)]
pub query_type: String, }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HistoryEntry {
pub command: String,
pub timestamp: DateTime<Utc>,
pub execution_count: u32,
pub success: bool,
pub duration_ms: Option<u64>,
#[serde(default)]
pub schema_columns: Vec<String>, #[serde(default)]
pub data_source: Option<String>, #[serde(default)]
pub metadata: Option<QueryMetadata>, #[serde(default)]
pub is_starred: bool, #[serde(default)]
pub session_id: Option<String>, }
#[derive(Debug, Clone)]
pub struct HistoryMatch {
pub entry: HistoryEntry,
pub score: i64,
pub indices: Vec<usize>,
}
pub struct CommandHistory {
entries: Vec<HistoryEntry>,
history_file: PathBuf,
matcher: SkimMatcherV2,
command_counts: HashMap<String, u32>,
session_id: String,
session_entries: Vec<HistoryEntry>, protection: HistoryProtection,
}
impl CommandHistory {
pub fn new() -> Result<Self> {
let history_file = AppPaths::history_file()
.map_err(|e| anyhow::anyhow!("Failed to get history file path: {}", e))?;
let backup_dir = history_file
.parent()
.unwrap_or(std::path::Path::new("."))
.join("history_backups");
let session_id = format!("session_{}", Utc::now().timestamp_millis());
let mut history = Self {
entries: Vec::new(),
history_file,
matcher: SkimMatcherV2::default(),
command_counts: HashMap::new(),
session_id,
session_entries: Vec::new(),
protection: HistoryProtection::new(backup_dir),
};
history.load_from_file()?;
if !history.entries.is_empty() {
history.clean_and_save()?;
}
Ok(history)
}
pub fn add_entry(
&mut self,
command: String,
success: bool,
duration_ms: Option<u64>,
) -> Result<()> {
self.add_entry_with_schema(command, success, duration_ms, Vec::new(), None)
}
pub fn add_entry_with_schema(
&mut self,
command: String,
success: bool,
duration_ms: Option<u64>,
schema_columns: Vec<String>,
data_source: Option<String>,
) -> Result<()> {
if command.trim().is_empty() {
return Ok(());
}
if let Some(last_entry) = self.entries.last() {
if last_entry.command == command {
return Ok(());
}
}
let metadata = self.extract_query_metadata(&command);
let entry = HistoryEntry {
command: command.clone(),
timestamp: Utc::now(),
execution_count: *self.command_counts.get(&command).unwrap_or(&0) + 1,
success,
duration_ms,
schema_columns,
data_source,
metadata,
is_starred: false,
session_id: Some(self.session_id.clone()),
};
self.session_entries.push(entry.clone());
*self.command_counts.entry(command.clone()).or_insert(0) += 1;
self.entries.retain(|e| e.command != command);
self.entries.push(entry);
if self.entries.len() > 1000 {
self.entries.drain(0..self.entries.len() - 1000);
}
self.save_to_file()?;
Ok(())
}
pub fn search(&self, query: &str) -> Vec<HistoryMatch> {
self.search_with_schema(query, &[], None)
}
pub fn search_with_schema(
&self,
query: &str,
current_columns: &[String],
current_source: Option<&str>,
) -> Vec<HistoryMatch> {
if query.is_empty() {
let mut entries: Vec<_> = self
.entries
.iter()
.rev()
.take(100)
.map(|entry| {
let schema_score =
self.calculate_schema_match_score(entry, current_columns, current_source);
HistoryMatch {
entry: entry.clone(),
score: 100 + schema_score,
indices: Vec::new(),
}
})
.collect();
entries.sort_by(|a, b| b.score.cmp(&a.score));
entries.truncate(50);
return entries;
}
let mut matches: Vec<HistoryMatch> = self
.entries
.iter()
.filter_map(|entry| {
if let Some((score, indices)) = self.matcher.fuzzy_indices(&entry.command, query) {
let schema_score =
self.calculate_schema_match_score(entry, current_columns, current_source);
Some(HistoryMatch {
entry: entry.clone(),
score: score + schema_score,
indices,
})
} else {
None
}
})
.collect();
matches.sort_by(|a, b| {
let score_cmp = b.score.cmp(&a.score);
if score_cmp != std::cmp::Ordering::Equal {
return score_cmp;
}
let count_cmp = b.entry.execution_count.cmp(&a.entry.execution_count);
if count_cmp != std::cmp::Ordering::Equal {
return count_cmp;
}
b.entry.timestamp.cmp(&a.entry.timestamp)
});
matches.truncate(20); matches
}
fn calculate_schema_match_score(
&self,
entry: &HistoryEntry,
current_columns: &[String],
current_source: Option<&str>,
) -> i64 {
let mut score = 0i64;
if let (Some(entry_source), Some(current)) = (&entry.data_source, current_source) {
if entry_source == current {
score += 50; }
}
if !current_columns.is_empty() && !entry.schema_columns.is_empty() {
let matching_columns = entry
.schema_columns
.iter()
.filter(|col| current_columns.contains(col))
.count();
let total_columns = entry.schema_columns.len().max(current_columns.len());
if total_columns > 0 {
let match_percentage = (matching_columns * 100) / total_columns;
score += (match_percentage as i64) / 2; }
}
if let Some(metadata) = &entry.metadata {
let metadata_columns: Vec<&String> = metadata
.select_columns
.iter()
.chain(metadata.where_columns.iter())
.chain(metadata.order_by_columns.iter())
.collect();
let matching_metadata = metadata_columns
.iter()
.filter(|col| current_columns.contains(col))
.count();
if matching_metadata > 0 {
score += (matching_metadata as i64) * 5; }
}
score
}
fn extract_query_metadata(&self, query: &str) -> Option<QueryMetadata> {
let query_upper = query.to_uppercase();
let query_type = if query_upper.starts_with("SELECT") {
"SELECT"
} else if query_upper.starts_with("INSERT") {
"INSERT"
} else if query_upper.starts_with("UPDATE") {
"UPDATE"
} else if query_upper.starts_with("DELETE") {
"DELETE"
} else {
"OTHER"
}
.to_string();
let mut tables = Vec::new();
if let Some(from_idx) = query_upper.find(" FROM ") {
let after_from = &query[from_idx + 6..];
if let Some(end_idx) = after_from.find([' ', '(', ';']) {
let table_name = after_from[..end_idx].trim().to_string();
if !table_name.is_empty() {
tables.push(table_name);
}
}
}
let mut select_columns = Vec::new();
if query_type == "SELECT" {
if let Some(select_idx) = query_upper.find("SELECT ") {
let after_select = &query[select_idx + 7..];
if let Some(from_idx) = after_select.to_uppercase().find(" FROM") {
let select_clause = &after_select[..from_idx];
if !select_clause.trim().eq("*") {
for col in select_clause.split(',') {
let col_name = col
.split_whitespace()
.next()
.unwrap_or("")
.trim_matches('"')
.to_string();
if !col_name.is_empty() {
select_columns.push(col_name);
}
}
}
}
}
}
let mut where_columns = Vec::new();
let mut functions_used = Vec::new();
if let Some(where_idx) = query_upper.find(" WHERE ") {
let after_where = &query[where_idx + 7..];
let linq_methods = [
"Contains",
"StartsWith",
"EndsWith",
"Length",
"ToUpper",
"ToLower",
"IsNullOrEmpty",
];
for method in &linq_methods {
if after_where.contains(method) {
functions_used.push((*method).to_string());
}
}
let words: Vec<&str> = after_where
.split(|c: char| !c.is_alphanumeric() && c != '_')
.filter(|s| !s.is_empty())
.collect();
for (i, word) in words.iter().enumerate() {
if i + 1 < words.len() {
let next = words[i + 1];
if linq_methods.contains(&next)
|| ["IS", "NOT", "LIKE", "BETWEEN"].contains(&next.to_uppercase().as_str())
{
where_columns.push((*word).to_string());
}
}
}
}
let mut order_by_columns = Vec::new();
if let Some(order_idx) = query_upper.find(" ORDER BY ") {
let after_order = &query[order_idx + 10..];
let end_idx = after_order.find([';', ')']).unwrap_or(after_order.len());
let order_clause = &after_order[..end_idx];
for col in order_clause.split(',') {
let col_name = col
.split_whitespace()
.next()
.unwrap_or("")
.trim_matches('"')
.to_string();
if !col_name.is_empty()
&& col_name.to_uppercase() != "ASC"
&& col_name.to_uppercase() != "DESC"
{
order_by_columns.push(col_name);
}
}
}
Some(QueryMetadata {
tables,
select_columns,
where_columns,
order_by_columns,
functions_used,
query_type,
})
}
pub fn get_recent(&self, limit: usize) -> Vec<&HistoryEntry> {
self.entries.iter().rev().take(limit).collect()
}
pub fn get_all(&self) -> &[HistoryEntry] {
&self.entries
}
pub fn get_last_entry(&self) -> Option<&HistoryEntry> {
self.entries.last()
}
pub fn get_session_entries(&self) -> &[HistoryEntry] {
&self.session_entries
}
pub fn get_navigation_entries(&self) -> Vec<HistoryEntry> {
let mut entries = self.session_entries.clone();
for entry in &self.entries {
if entry.is_starred
&& !self
.session_entries
.iter()
.any(|e| e.command == entry.command)
{
entries.push(entry.clone());
}
}
entries.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
let mut seen = std::collections::HashSet::new();
entries.retain(|e| seen.insert(e.command.clone()));
entries
}
pub fn toggle_star(&mut self, command: &str) -> Result<()> {
for entry in &mut self.entries {
if entry.command == command {
entry.is_starred = !entry.is_starred;
break;
}
}
for entry in &mut self.session_entries {
if entry.command == command {
entry.is_starred = !entry.is_starred;
break;
}
}
self.save_to_file()
}
pub fn clear(&mut self) -> Result<()> {
let current_count = self.entries.len();
if current_count > 0 {
eprintln!("[HISTORY WARNING] Clearing {current_count} entries - creating backup");
if let Ok(content) = serde_json::to_string_pretty(&self.entries) {
self.protection.backup_before_write(&content, current_count);
}
}
self.entries.clear();
self.command_counts.clear();
self.save_to_file()?;
Ok(())
}
fn load_from_file(&mut self) -> Result<()> {
if !self.history_file.exists() {
eprintln!("[History] No history file found at {:?}", self.history_file);
return Ok(());
}
eprintln!("[History] Loading history from {:?}", self.history_file);
let content = fs::read_to_string(&self.history_file)?;
if content.trim().is_empty() {
eprintln!("[History] History file is empty");
return Ok(());
}
let entries: Vec<HistoryEntry> = match serde_json::from_str(&content) {
Ok(entries) => entries,
Err(e) => {
eprintln!("[History] ERROR: Failed to parse history file: {e}");
eprintln!("[History] Attempting recovery from backup...");
if let Some(backup_content) = self.protection.recover_from_backup() {
eprintln!("[History] Found backup, attempting to restore...");
match serde_json::from_str::<Vec<HistoryEntry>>(&backup_content) {
Ok(backup_entries) => {
eprintln!(
"[History] Successfully recovered {} entries from backup",
backup_entries.len()
);
fs::write(&self.history_file, &backup_content)?;
let corrupted_path = self.history_file.with_extension("json.corrupted");
let _ = fs::rename(
self.history_file.with_extension("json"),
&corrupted_path,
);
eprintln!("[History] Corrupted file moved to {corrupted_path:?}");
backup_entries
}
Err(backup_err) => {
eprintln!("[History] Backup also corrupted: {backup_err}");
eprintln!("[History] Starting with empty history");
Vec::new()
}
}
} else {
eprintln!("[History] No backup available, starting with empty history");
let corrupted_path = self.history_file.with_extension("json.corrupted");
let _ = fs::copy(&self.history_file, &corrupted_path);
eprintln!("[History] Corrupted file copied to {corrupted_path:?}");
Vec::new()
}
}
};
eprintln!(
"[History] Loaded {} entries from history file",
entries.len()
);
let original_count = entries.len();
let mut seen_commands = std::collections::HashSet::new();
let mut deduplicated = Vec::new();
for entry in entries.into_iter().rev() {
if seen_commands.insert(entry.command.clone()) {
deduplicated.push(entry);
}
}
deduplicated.reverse();
deduplicated.sort_by(|a, b| a.timestamp.cmp(&b.timestamp));
let removed_count = original_count - deduplicated.len();
if removed_count > 0 {
eprintln!("[sql-cli] Cleaned {removed_count} duplicate commands from history");
}
self.command_counts.clear();
for entry in &deduplicated {
*self
.command_counts
.entry(entry.command.clone())
.or_insert(0) = entry.execution_count;
}
self.entries = deduplicated;
eprintln!(
"[History] Final history contains {} unique entries",
self.entries.len()
);
Ok(())
}
fn save_to_file(&self) -> Result<()> {
let new_content = serde_json::to_string_pretty(&self.entries)?;
let new_count = self.entries.len();
let old_count = if self.history_file.exists() {
if let Ok(existing) = fs::read_to_string(&self.history_file) {
existing.matches("\"command\":").count()
} else {
0
}
} else {
0
};
if !self
.protection
.validate_write(old_count, new_count, &new_content)
{
eprintln!("[HISTORY PROTECTION] Write blocked! Attempting recovery from backup...");
if let Some(backup_content) = self.protection.recover_from_backup() {
fs::write(&self.history_file, backup_content)?;
return Ok(());
}
return Err(anyhow::anyhow!("History write validation failed"));
}
if old_count > 0 && (old_count != new_count || old_count > 10) {
self.protection.backup_before_write(&new_content, new_count);
}
let temp_file = self.history_file.with_extension("json.tmp");
fs::write(&temp_file, new_content)?;
fs::rename(temp_file, &self.history_file)?;
Ok(())
}
pub fn clean_and_save(&mut self) -> Result<()> {
self.save_to_file()?;
Ok(())
}
pub fn stats(&self) -> HistoryStats {
let total_commands = self.entries.len();
let unique_commands = self.command_counts.len();
let successful_commands = self.entries.iter().filter(|e| e.success).count();
let failed_commands = total_commands - successful_commands;
let most_used = self
.command_counts
.iter()
.max_by_key(|(_, &count)| count)
.map(|(cmd, &count)| (cmd.clone(), count));
HistoryStats {
total_commands,
unique_commands,
successful_commands,
failed_commands,
most_used_command: most_used,
}
}
}
#[derive(Debug)]
pub struct HistoryStats {
pub total_commands: usize,
pub unique_commands: usize,
pub successful_commands: usize,
pub failed_commands: usize,
pub most_used_command: Option<(String, u32)>,
}
impl Clone for CommandHistory {
fn clone(&self) -> Self {
Self {
entries: self.entries.clone(),
history_file: self.history_file.clone(),
matcher: SkimMatcherV2::default(), command_counts: self.command_counts.clone(),
session_id: self.session_id.clone(),
session_entries: self.session_entries.clone(),
protection: HistoryProtection::new(
self.history_file
.parent()
.unwrap_or(std::path::Path::new("."))
.join("history_backups"),
),
}
}
}
impl Default for CommandHistory {
fn default() -> Self {
let session_id = format!("session_{}", Utc::now().timestamp_millis());
Self::new().unwrap_or_else(|_| {
let history_file =
AppPaths::history_file().unwrap_or_else(|_| PathBuf::from(".sql_cli_history.json"));
let backup_dir = history_file
.parent()
.unwrap_or(std::path::Path::new("."))
.join("history_backups");
Self {
entries: Vec::new(),
history_file,
matcher: SkimMatcherV2::default(),
command_counts: HashMap::new(),
session_id,
session_entries: Vec::new(),
protection: HistoryProtection::new(backup_dir),
}
})
}
}