use crate::cycle_manager::CycleBriefing;
use crate::models::{Message, SystemPrompt, Usage};
use crate::project_context::{ProjectContext, load_project_context_with_parents};
use crate::tui::approval::ApprovalMode;
use crate::working_set::WorkingSet;
use chrono::{DateTime, Utc};
use std::path::PathBuf;
#[derive(Debug, Clone)]
pub struct Session {
pub model: String,
pub reasoning_effort: Option<String>,
pub reasoning_effort_auto: bool,
pub auto_model: bool,
pub workspace: PathBuf,
pub system_prompt: Option<SystemPrompt>,
pub last_system_prompt_hash: Option<u64>,
pub compaction_summary_prompt: Option<SystemPrompt>,
pub messages: Vec<Message>,
pub total_usage: SessionUsage,
pub allow_shell: bool,
pub trust_mode: bool,
pub auto_approve: bool,
pub approval_mode: ApprovalMode,
pub notes_path: PathBuf,
pub mcp_config_path: PathBuf,
pub id: String,
pub project_context: Option<ProjectContext>,
pub working_set: WorkingSet,
pub cycle_count: u32,
pub current_cycle_started: DateTime<Utc>,
pub cycle_briefings: Vec<CycleBriefing>,
}
#[derive(Debug, Clone, Default)]
#[allow(clippy::struct_field_names)]
pub struct SessionUsage {
pub input_tokens: u64,
pub output_tokens: u64,
pub cache_creation_input_tokens: Option<u64>,
pub cache_read_input_tokens: Option<u64>,
}
impl SessionUsage {
pub fn add(&mut self, usage: &Usage) {
self.input_tokens += u64::from(usage.input_tokens);
self.output_tokens += u64::from(usage.output_tokens);
if let Some(tokens) = usage.prompt_cache_miss_tokens {
self.cache_creation_input_tokens =
Some(self.cache_creation_input_tokens.unwrap_or(0) + u64::from(tokens));
}
if let Some(tokens) = usage.prompt_cache_hit_tokens {
self.cache_read_input_tokens =
Some(self.cache_read_input_tokens.unwrap_or(0) + u64::from(tokens));
}
}
}
impl Session {
pub fn new(
model: String,
workspace: PathBuf,
allow_shell: bool,
trust_mode: bool,
notes_path: PathBuf,
mcp_config_path: PathBuf,
) -> Self {
let project_context = load_project_context_with_parents(&workspace);
let has_context = project_context.has_instructions();
Self {
model,
reasoning_effort: None,
reasoning_effort_auto: false,
auto_model: false,
workspace,
system_prompt: None,
compaction_summary_prompt: None,
messages: Vec::new(),
total_usage: SessionUsage::default(),
allow_shell,
trust_mode,
auto_approve: false,
approval_mode: ApprovalMode::Suggest,
notes_path,
mcp_config_path,
id: uuid::Uuid::new_v4().to_string(),
project_context: if has_context {
Some(project_context)
} else {
None
},
last_system_prompt_hash: None,
working_set: WorkingSet::default(),
cycle_count: 0,
current_cycle_started: Utc::now(),
cycle_briefings: Vec::new(),
}
}
pub fn add_message(&mut self, message: Message) {
self.messages.push(message);
}
pub fn rebuild_working_set(&mut self) {
self.working_set
.rebuild_from_messages(&self.messages, &self.workspace);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn session_usage_cache_starts_none() {
let usage = SessionUsage::default();
assert!(usage.cache_creation_input_tokens.is_none());
assert!(usage.cache_read_input_tokens.is_none());
}
#[test]
fn session_usage_cache_remains_none_when_api_omits_cache() {
let mut usage = SessionUsage::default();
let api_usage = Usage {
input_tokens: 100,
output_tokens: 50,
prompt_cache_hit_tokens: None,
prompt_cache_miss_tokens: None,
reasoning_tokens: None,
reasoning_replay_tokens: None,
server_tool_use: None,
};
usage.add(&api_usage);
assert!(usage.cache_creation_input_tokens.is_none());
assert!(usage.cache_read_input_tokens.is_none());
}
#[test]
fn session_usage_cache_accumulates_when_reported() {
let mut usage = SessionUsage::default();
let api_usage = Usage {
input_tokens: 100,
output_tokens: 50,
prompt_cache_hit_tokens: Some(30),
prompt_cache_miss_tokens: Some(70),
reasoning_tokens: None,
reasoning_replay_tokens: None,
server_tool_use: None,
};
usage.add(&api_usage);
assert_eq!(usage.cache_read_input_tokens, Some(30));
assert_eq!(usage.cache_creation_input_tokens, Some(70));
usage.add(&api_usage);
assert_eq!(usage.cache_read_input_tokens, Some(60));
assert_eq!(usage.cache_creation_input_tokens, Some(140));
}
#[test]
fn session_usage_cache_preserves_explicit_zero() {
let mut usage = SessionUsage::default();
let api_usage = Usage {
input_tokens: 100,
output_tokens: 50,
prompt_cache_hit_tokens: Some(0), prompt_cache_miss_tokens: Some(1234),
reasoning_tokens: None,
reasoning_replay_tokens: None,
server_tool_use: None,
};
usage.add(&api_usage);
assert_eq!(usage.cache_read_input_tokens, Some(0));
assert_eq!(usage.cache_creation_input_tokens, Some(1234));
}
}