use crate::error::{Result, SofosError};
use crate::repl::Repl;
use crate::session::{SessionMetadata, SessionTokenCounters};
use colored::Colorize;
fn provider_of(model: &str) -> &'static str {
if model.starts_with("gpt-") {
"OpenAI"
} else {
"Anthropic"
}
}
impl Repl {
pub fn list_saved_sessions(&self) -> Result<Vec<SessionMetadata>> {
self.history_manager.list_sessions()
}
pub fn save_current_session(&self) -> Result<()> {
if self.session_state.conversation.messages().is_empty() {
return Ok(());
}
self.history_manager.save_session(
&self.session_state.session_id,
self.session_state.conversation.messages(),
&self.session_state.display_messages,
self.session_state.conversation.system_prompt(),
SessionTokenCounters {
total_input_tokens: self.session_state.total_input_tokens,
total_output_tokens: self.session_state.total_output_tokens,
total_cache_read_tokens: self.session_state.total_cache_read_tokens,
total_cache_creation_tokens: self.session_state.total_cache_creation_tokens,
peak_single_turn_input_tokens: self.session_state.peak_single_turn_input_tokens,
},
&self.model_config.model,
self.safe_mode,
)?;
Ok(())
}
pub fn handle_resume_command(&mut self) -> Result<()> {
let sessions = self.history_manager.list_sessions()?;
if sessions.is_empty() {
println!("{}", "No saved sessions found.".yellow());
return Ok(());
}
let selected_id = crate::session::select_session(sessions)?;
if let Some(session_id) = selected_id {
self.load_session_by_id(&session_id)?;
println!(
"{} {}",
"Session loaded:".bright_green(),
"Continue your conversation below".dimmed()
);
println!();
}
Ok(())
}
pub fn load_session_by_id(&mut self, session_id: &str) -> Result<()> {
let session = self.history_manager.load_session(session_id)?;
if let Some(saved_model) = session.model.as_deref() {
if !saved_model.is_empty() {
let saved_provider = provider_of(saved_model);
let current_provider = self.client.provider_name();
if saved_provider != current_provider {
return Err(SofosError::Config(format!(
"Session was saved under model '{}' ({}), but the current client is {}. \
Re-launch with `--model {}` to resume.",
saved_model, saved_provider, current_provider, saved_model
)));
}
if saved_model != self.model_config.model {
if let Some(msg) = crate::api::model_info::effort_support_error(
saved_model,
self.model_config.reasoning_effort,
) {
return Err(SofosError::Config(format!(
"{} Re-launch with `--reasoning-effort` set to a level the saved model accepts.",
msg
)));
}
}
}
}
self.session_state.session_id = session.id.clone();
self.session_state.conversation.clear();
self.session_state
.conversation
.restore_messages(session.api_messages.clone());
if !session.system_prompt.is_empty() {
self.session_state
.conversation
.set_system_prompt(session.system_prompt.clone());
}
self.session_state.display_messages = session.display_messages.clone();
self.session_state.total_input_tokens = session.token_counters.total_input_tokens;
self.session_state.total_output_tokens = session.token_counters.total_output_tokens;
self.session_state.total_cache_read_tokens = session.token_counters.total_cache_read_tokens;
self.session_state.total_cache_creation_tokens =
session.token_counters.total_cache_creation_tokens;
self.session_state.peak_single_turn_input_tokens =
session.token_counters.peak_single_turn_input_tokens;
if let Some(saved_model) = session.model.as_deref() {
if !saved_model.is_empty() && saved_model != self.model_config.model {
println!(
"{} session was saved under model '{}'; continuing with that instead of '{}'",
"Note:".dimmed(),
saved_model,
self.model_config.model
);
self.model_config.model = saved_model.to_string();
}
}
if let Some(saved_safe_mode) = session.safe_mode {
if saved_safe_mode != self.safe_mode {
self.safe_mode = saved_safe_mode;
self.tool_executor.set_safe_mode(saved_safe_mode);
self.refresh_available_tools();
}
}
println!(
"{} {} ({} messages)",
"Loaded session:".bright_green(),
session.id,
session.api_messages.len()
);
println!();
self.ui.display_session(&session)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::provider_of;
#[test]
fn provider_of_matches_build_llm_client_routing() {
assert_eq!(provider_of("gpt-5.5"), "OpenAI");
assert_eq!(provider_of("gpt-5.3-codex"), "OpenAI");
assert_eq!(provider_of("claude-opus-4-7"), "Anthropic");
assert_eq!(provider_of("claude-sonnet-4-6"), "Anthropic");
assert_eq!(provider_of("claude-haiku-4-5"), "Anthropic");
assert_eq!(provider_of("unknown-model"), "Anthropic");
}
}