Skip to main content

cc_token_usage/data/
loader.rs

1use anyhow::{Context, Result};
2use chrono::{DateTime, Utc};
3use std::collections::{HashMap, HashSet};
4use std::fs::File;
5use std::io::{BufRead, BufReader};
6use std::path::Path;
7
8use super::models::{DataQuality, GlobalDataQuality, SessionData};
9use super::parser::parse_session_file;
10use super::scanner::{resolve_agent_parents, scan_claude_home};
11
12/// Extract the Claude Code version string from the first line of a JSONL file.
13///
14/// Both `user` and `assistant` entries carry a `version` field at the top level.
15fn extract_version(path: &Path) -> Option<String> {
16    let file = File::open(path).ok()?;
17    let reader = BufReader::new(file);
18    let first_line = reader.lines().next()?.ok()?;
19    let val: serde_json::Value = serde_json::from_str(&first_line).ok()?;
20    val.get("version")
21        .and_then(|v| v.as_str())
22        .map(|s| s.to_string())
23}
24
25/// Compute the min and max timestamps from a slice of turns that have timestamps.
26fn time_range<'a, I>(timestamps: I) -> (Option<DateTime<Utc>>, Option<DateTime<Utc>>)
27where
28    I: Iterator<Item = &'a DateTime<Utc>>,
29{
30    let mut min: Option<DateTime<Utc>> = None;
31    let mut max: Option<DateTime<Utc>> = None;
32    for ts in timestamps {
33        min = Some(min.map_or(*ts, |m: DateTime<Utc>| m.min(*ts)));
34        max = Some(max.map_or(*ts, |m: DateTime<Utc>| m.max(*ts)));
35    }
36    (min, max)
37}
38
39/// Build a set of requestIds from the main session turns for cross-file dedup.
40fn request_id_set(turns: &[super::models::ValidatedTurn]) -> HashSet<String> {
41    turns
42        .iter()
43        .filter_map(|t| t.request_id.as_ref())
44        .cloned()
45        .collect()
46}
47
48/// Merge agent turns into a parent session, deduplicating by requestId.
49///
50/// Claude Code writes agent responses to both the main session file and the
51/// agent file. We keep the main session's copy and skip duplicates from agents.
52fn merge_agent_turns(parent: &mut SessionData, agent_turns: Vec<super::models::ValidatedTurn>, quality: &DataQuality) {
53    let existing_rids = request_id_set(&parent.turns);
54    let before = parent.agent_turns.len();
55
56    for turn in agent_turns {
57        let dominated = turn
58            .request_id
59            .as_ref()
60            .is_some_and(|rid| existing_rids.contains(rid));
61        if !dominated {
62            parent.agent_turns.push(turn);
63        }
64    }
65
66    let added = parent.agent_turns.len() - before;
67    let deduped = quality.valid_turns.saturating_sub(added);
68
69    // Accumulate agent quality into parent's quality
70    parent.quality.total_lines += quality.total_lines;
71    parent.quality.valid_turns += added;
72    parent.quality.skipped_synthetic += quality.skipped_synthetic;
73    parent.quality.skipped_sidechain += quality.skipped_sidechain;
74    parent.quality.skipped_invalid += quality.skipped_invalid;
75    parent.quality.skipped_parse_error += quality.skipped_parse_error;
76    parent.quality.duplicate_turns += quality.duplicate_turns + deduped;
77}
78
79/// Load all session data from a Claude home directory.
80///
81/// 1. Scans for JSONL files (main sessions + agents)
82/// 2. Resolves legacy agent parent relationships
83/// 3. Parses main sessions first, then merges agent turns into their parents
84/// 4. Computes global time range and quality metrics
85pub fn load_all(claude_home: &Path) -> Result<(Vec<SessionData>, GlobalDataQuality)> {
86    let mut files = scan_claude_home(claude_home)
87        .context("failed to scan claude home for session files")?;
88    resolve_agent_parents(&mut files)
89        .context("failed to resolve agent parent sessions")?;
90    load_from_files(files)
91}
92
93/// Shared loading logic: partition files, parse sessions, merge agents, compute time ranges.
94fn load_from_files(files: Vec<super::models::SessionFile>) -> Result<(Vec<SessionData>, GlobalDataQuality)> {
95    let (main_files, agent_files): (Vec<_>, Vec<_>) =
96        files.into_iter().partition(|f| !f.is_agent);
97
98    let mut global_quality = GlobalDataQuality {
99        total_session_files: main_files.len(),
100        total_agent_files: agent_files.len(),
101        ..Default::default()
102    };
103
104    // Process all main sessions
105    let mut sessions: HashMap<String, SessionData> = HashMap::new();
106
107    for sf in &main_files {
108        let (turns, quality) = parse_session_file(&sf.file_path, false)
109            .with_context(|| format!("failed to parse session: {}", sf.file_path.display()))?;
110
111        let version = extract_version(&sf.file_path);
112        let (first_ts, last_ts) = time_range(turns.iter().map(|t| &t.timestamp));
113
114        global_quality.total_valid_turns += quality.valid_turns;
115        global_quality.total_skipped +=
116            quality.skipped_synthetic + quality.skipped_sidechain + quality.skipped_invalid + quality.skipped_parse_error;
117
118        sessions.insert(sf.session_id.clone(), SessionData {
119            session_id: sf.session_id.clone(),
120            project: sf.project.clone(),
121            turns,
122            agent_turns: Vec::new(),
123            first_timestamp: first_ts,
124            last_timestamp: last_ts,
125            version,
126            quality,
127        });
128    }
129
130    // Process agent files and merge into parent sessions
131    for sf in &agent_files {
132        let (agent_turns, quality) = parse_session_file(&sf.file_path, true)
133            .with_context(|| format!("failed to parse agent file: {}", sf.file_path.display()))?;
134
135        global_quality.total_valid_turns += quality.valid_turns;
136        global_quality.total_skipped +=
137            quality.skipped_synthetic + quality.skipped_sidechain + quality.skipped_invalid + quality.skipped_parse_error;
138
139        let target_id = sf.parent_session_id.clone().unwrap_or_else(|| sf.session_id.clone());
140        if !sessions.contains_key(&target_id) {
141            let project = sf.project.clone().or_else(|| Some("(orphan)".to_string()));
142            sessions.insert(target_id.clone(), SessionData {
143                session_id: target_id.clone(),
144                project,
145                turns: Vec::new(),
146                agent_turns: Vec::new(),
147                first_timestamp: None,
148                last_timestamp: None,
149                version: None,
150                quality: DataQuality::default(),
151            });
152            global_quality.orphan_agents += 1;
153        }
154
155        let parent = sessions.get_mut(&target_id).unwrap();
156        merge_agent_turns(parent, agent_turns, &quality);
157    }
158
159    // Recompute time ranges to include agent turns
160    let mut result: Vec<SessionData> = sessions.into_values().collect();
161    let mut global_min: Option<DateTime<Utc>> = None;
162    let mut global_max: Option<DateTime<Utc>> = None;
163
164    for session in &mut result {
165        let all_timestamps = session.all_responses();
166        let (first_ts, last_ts) = time_range(all_timestamps.iter().map(|t| &t.timestamp));
167        session.first_timestamp = first_ts;
168        session.last_timestamp = last_ts;
169
170        if let Some(ts) = first_ts {
171            global_min = Some(global_min.map_or(ts, |m: DateTime<Utc>| m.min(ts)));
172        }
173        if let Some(ts) = last_ts {
174            global_max = Some(global_max.map_or(ts, |m: DateTime<Utc>| m.max(ts)));
175        }
176    }
177
178    global_quality.time_range = match (global_min, global_max) {
179        (Some(min), Some(max)) => Some((min, max)),
180        _ => None,
181    };
182
183    Ok((result, global_quality))
184}