Skip to main content

cc_token_usage/data/
loader.rs

1use anyhow::{Context, Result};
2use chrono::{DateTime, Utc};
3use std::collections::{HashMap, HashSet};
4use std::fs::File;
5use std::io::{BufRead, BufReader};
6use std::path::Path;
7
8use super::models::{DataQuality, GlobalDataQuality, SessionData};
9use super::parser::parse_session_file;
10use super::scanner::{resolve_agent_parents, scan_claude_home, scan_projects_dir};
11
12/// Extract the Claude Code version string from the first line of a JSONL file.
13///
14/// Both `user` and `assistant` entries carry a `version` field at the top level.
15fn extract_version(path: &Path) -> Option<String> {
16    let file = File::open(path).ok()?;
17    let reader = BufReader::new(file);
18    let first_line = reader.lines().next()?.ok()?;
19    let val: serde_json::Value = serde_json::from_str(&first_line).ok()?;
20    val.get("version")
21        .and_then(|v| v.as_str())
22        .map(|s| s.to_string())
23}
24
25/// Compute the min and max timestamps from a slice of turns that have timestamps.
26fn time_range<'a, I>(timestamps: I) -> (Option<DateTime<Utc>>, Option<DateTime<Utc>>)
27where
28    I: Iterator<Item = &'a DateTime<Utc>>,
29{
30    let mut min: Option<DateTime<Utc>> = None;
31    let mut max: Option<DateTime<Utc>> = None;
32    for ts in timestamps {
33        min = Some(min.map_or(*ts, |m: DateTime<Utc>| m.min(*ts)));
34        max = Some(max.map_or(*ts, |m: DateTime<Utc>| m.max(*ts)));
35    }
36    (min, max)
37}
38
39/// Build a set of requestIds from the main session turns for cross-file dedup.
40fn request_id_set(turns: &[super::models::ValidatedTurn]) -> HashSet<String> {
41    turns
42        .iter()
43        .filter_map(|t| t.request_id.as_ref())
44        .cloned()
45        .collect()
46}
47
48/// Merge agent turns into a parent session, deduplicating by requestId.
49///
50/// Claude Code writes agent responses to both the main session file and the
51/// agent file. We keep the main session's copy and skip duplicates from agents.
52fn merge_agent_turns(parent: &mut SessionData, agent_turns: Vec<super::models::ValidatedTurn>, quality: &DataQuality) {
53    let existing_rids = request_id_set(&parent.turns);
54    let before = parent.agent_turns.len();
55
56    for turn in agent_turns {
57        let dominated = turn
58            .request_id
59            .as_ref()
60            .is_some_and(|rid| existing_rids.contains(rid));
61        if !dominated {
62            parent.agent_turns.push(turn);
63        }
64    }
65
66    let added = parent.agent_turns.len() - before;
67    let deduped = quality.valid_turns.saturating_sub(added);
68
69    // Accumulate agent quality into parent's quality
70    parent.quality.total_lines += quality.total_lines;
71    parent.quality.valid_turns += added;
72    parent.quality.skipped_synthetic += quality.skipped_synthetic;
73    parent.quality.skipped_sidechain += quality.skipped_sidechain;
74    parent.quality.skipped_invalid += quality.skipped_invalid;
75    parent.quality.skipped_parse_error += quality.skipped_parse_error;
76    parent.quality.duplicate_turns += quality.duplicate_turns + deduped;
77}
78
79/// Load all session data from a Claude home directory.
80///
81/// 1. Scans for JSONL files (main sessions + agents)
82/// 2. Resolves legacy agent parent relationships
83/// 3. Parses main sessions first, then merges agent turns into their parents
84/// 4. Computes global time range and quality metrics
85pub fn load_all(claude_home: &Path) -> Result<(Vec<SessionData>, GlobalDataQuality)> {
86    let mut files = scan_claude_home(claude_home)
87        .context("failed to scan claude home for session files")?;
88    resolve_agent_parents(&mut files)
89        .context("failed to resolve agent parent sessions")?;
90    load_from_files(files)
91}
92
93/// Load all session data from a projects directory directly.
94///
95/// Unlike `load_all` which expects a Claude home directory (and appends `projects/`),
96/// this function takes the projects directory itself. Useful for loading data from
97/// archive directories like `~/.config/superpowers/conversation-archive/`.
98pub fn load_from_projects_dir(projects_dir: &Path) -> Result<(Vec<SessionData>, GlobalDataQuality)> {
99    let mut files = scan_projects_dir(projects_dir)
100        .context("failed to scan projects dir for session files")?;
101    resolve_agent_parents(&mut files)
102        .context("failed to resolve agent parent sessions")?;
103    load_from_files(files)
104}
105
106/// Shared loading logic: partition files, parse sessions, merge agents, compute time ranges.
107fn load_from_files(files: Vec<super::models::SessionFile>) -> Result<(Vec<SessionData>, GlobalDataQuality)> {
108    let (main_files, agent_files): (Vec<_>, Vec<_>) =
109        files.into_iter().partition(|f| !f.is_agent);
110
111    let mut global_quality = GlobalDataQuality {
112        total_session_files: main_files.len(),
113        total_agent_files: agent_files.len(),
114        ..Default::default()
115    };
116
117    // Process all main sessions
118    let mut sessions: HashMap<String, SessionData> = HashMap::new();
119
120    for sf in &main_files {
121        let (turns, quality) = parse_session_file(&sf.file_path, false)
122            .with_context(|| format!("failed to parse session: {}", sf.file_path.display()))?;
123
124        let version = extract_version(&sf.file_path);
125        let (first_ts, last_ts) = time_range(turns.iter().map(|t| &t.timestamp));
126
127        global_quality.total_valid_turns += quality.valid_turns;
128        global_quality.total_skipped +=
129            quality.skipped_synthetic + quality.skipped_sidechain + quality.skipped_invalid + quality.skipped_parse_error;
130
131        sessions.insert(sf.session_id.clone(), SessionData {
132            session_id: sf.session_id.clone(),
133            project: sf.project.clone(),
134            turns,
135            agent_turns: Vec::new(),
136            first_timestamp: first_ts,
137            last_timestamp: last_ts,
138            version,
139            quality,
140        });
141    }
142
143    // Process agent files and merge into parent sessions
144    for sf in &agent_files {
145        let (agent_turns, quality) = parse_session_file(&sf.file_path, true)
146            .with_context(|| format!("failed to parse agent file: {}", sf.file_path.display()))?;
147
148        global_quality.total_valid_turns += quality.valid_turns;
149        global_quality.total_skipped +=
150            quality.skipped_synthetic + quality.skipped_sidechain + quality.skipped_invalid + quality.skipped_parse_error;
151
152        let target_id = sf.parent_session_id.clone().unwrap_or_else(|| sf.session_id.clone());
153        if !sessions.contains_key(&target_id) {
154            let project = sf.project.clone().or_else(|| Some("(orphan)".to_string()));
155            sessions.insert(target_id.clone(), SessionData {
156                session_id: target_id.clone(),
157                project,
158                turns: Vec::new(),
159                agent_turns: Vec::new(),
160                first_timestamp: None,
161                last_timestamp: None,
162                version: None,
163                quality: DataQuality::default(),
164            });
165            global_quality.orphan_agents += 1;
166        }
167
168        let parent = sessions.get_mut(&target_id).unwrap();
169        merge_agent_turns(parent, agent_turns, &quality);
170    }
171
172    // Recompute time ranges to include agent turns
173    let mut result: Vec<SessionData> = sessions.into_values().collect();
174    let mut global_min: Option<DateTime<Utc>> = None;
175    let mut global_max: Option<DateTime<Utc>> = None;
176
177    for session in &mut result {
178        let all_timestamps = session.all_responses();
179        let (first_ts, last_ts) = time_range(all_timestamps.iter().map(|t| &t.timestamp));
180        session.first_timestamp = first_ts;
181        session.last_timestamp = last_ts;
182
183        if let Some(ts) = first_ts {
184            global_min = Some(global_min.map_or(ts, |m: DateTime<Utc>| m.min(ts)));
185        }
186        if let Some(ts) = last_ts {
187            global_max = Some(global_max.map_or(ts, |m: DateTime<Utc>| m.max(ts)));
188        }
189    }
190
191    global_quality.time_range = match (global_min, global_max) {
192        (Some(min), Some(max)) => Some((min, max)),
193        _ => None,
194    };
195
196    Ok((result, global_quality))
197}