Skip to main content

tracevault_cli/commands/
push.rs

1use crate::api_client::{resolve_credentials, ApiClient, PushTraceRequest};
2use crate::config::TracevaultConfig;
3use serde::{Deserialize, Serialize};
4use std::collections::{HashMap, HashSet};
5use std::fs;
6use std::io::{BufRead, BufReader};
7use std::path::{Path, PathBuf};
8use std::process::Command;
9use tracevault_core::diff::parse_unified_diff;
10
11#[derive(Debug, Serialize, Deserialize, Default)]
12struct PushState {
13    last_event_index: usize,
14    last_transcript_index: usize,
15}
16
17fn read_push_state(session_dir: &Path) -> Option<PushState> {
18    let path = session_dir.join(".push_state");
19    let content = fs::read_to_string(path).ok()?;
20    serde_json::from_str(&content).ok()
21}
22
23fn write_push_state(
24    session_dir: &Path,
25    state: &PushState,
26) -> Result<(), Box<dyn std::error::Error>> {
27    let path = session_dir.join(".push_state");
28    let json = serde_json::to_string(state)?;
29    fs::write(path, json)?;
30    Ok(())
31}
32
33fn count_lines(path: &Path) -> usize {
34    let file = match fs::File::open(path) {
35        Ok(f) => f,
36        Err(_) => return 0,
37    };
38    BufReader::new(file).lines().count()
39}
40
41struct GitInfo {
42    repo_name: String,
43    branch: Option<String>,
44    head_sha: String,
45}
46
47fn git_info(project_root: &Path) -> GitInfo {
48    let run = |args: &[&str]| -> Option<String> {
49        Command::new("git")
50            .args(args)
51            .current_dir(project_root)
52            .output()
53            .ok()
54            .filter(|o| o.status.success())
55            .map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
56            .filter(|s| !s.is_empty())
57    };
58
59    let repo_name = run(&["rev-parse", "--show-toplevel"])
60        .as_deref()
61        .and_then(|p| p.rsplit('/').next())
62        .map(String::from)
63        .unwrap_or_else(|| "unknown".into());
64
65    let branch = run(&["rev-parse", "--abbrev-ref", "HEAD"]).filter(|b| b != "HEAD");
66
67    let head_sha = run(&["rev-parse", "HEAD"]).unwrap_or_else(|| "unknown".into());
68
69    GitInfo {
70        repo_name,
71        branch,
72        head_sha,
73    }
74}
75
76fn get_commit_author(project_root: &Path, commit_sha: &str) -> String {
77    Command::new("git")
78        .args(["log", "-1", "--format=%aN", commit_sha])
79        .current_dir(project_root)
80        .output()
81        .ok()
82        .filter(|o| o.status.success())
83        .map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
84        .filter(|s| !s.is_empty())
85        .unwrap_or_else(|| "unknown".into())
86}
87
88fn last_pushed_sha_path(project_root: &Path) -> PathBuf {
89    project_root
90        .join(".tracevault")
91        .join("cache")
92        .join(".last_pushed_sha")
93}
94
95fn read_last_pushed_sha(project_root: &Path) -> Option<String> {
96    fs::read_to_string(last_pushed_sha_path(project_root))
97        .ok()
98        .map(|s| s.trim().to_string())
99        .filter(|s| !s.is_empty())
100}
101
102fn write_last_pushed_sha(project_root: &Path, sha: &str) -> Result<(), Box<dyn std::error::Error>> {
103    let path = last_pushed_sha_path(project_root);
104    if let Some(parent) = path.parent() {
105        fs::create_dir_all(parent)?;
106    }
107    fs::write(&path, sha)?;
108    Ok(())
109}
110
111/// Returns commit SHAs in chronological order (oldest first) that haven't been pushed yet.
112fn get_unpushed_commits(
113    project_root: &Path,
114    last_pushed: Option<&str>,
115    head_sha: &str,
116) -> Vec<String> {
117    let last_pushed = match last_pushed {
118        Some(sha) => sha,
119        None => return vec![head_sha.to_string()], // First push: just HEAD
120    };
121
122    if last_pushed == head_sha {
123        return vec![]; // No new commits
124    }
125
126    // Verify last_pushed SHA still exists in history (handles rebase/force-push)
127    let exists = Command::new("git")
128        .args(["cat-file", "-t", last_pushed])
129        .current_dir(project_root)
130        .output()
131        .ok()
132        .map(|o| o.status.success())
133        .unwrap_or(false);
134
135    if !exists {
136        return vec![head_sha.to_string()]; // Fallback: SHA gone after rebase
137    }
138
139    // Get all commits between last_pushed and HEAD, oldest first
140    let output = Command::new("git")
141        .args(["rev-list", "--reverse", &format!("{last_pushed}..HEAD")])
142        .current_dir(project_root)
143        .output()
144        .ok();
145
146    match output {
147        Some(o) if o.status.success() => {
148            let shas: Vec<String> = String::from_utf8_lossy(&o.stdout)
149                .lines()
150                .map(|s| s.trim().to_string())
151                .filter(|s| !s.is_empty())
152                .collect();
153            if shas.is_empty() {
154                vec![]
155            } else {
156                shas
157            }
158        }
159        _ => vec![head_sha.to_string()], // Fallback
160    }
161}
162
163struct SessionSummary {
164    event_count: usize,
165    total_event_count: usize,
166    files_modified: Vec<String>,
167    tools_used: HashSet<String>,
168    models: HashSet<String>,
169    events: Vec<serde_json::Value>,
170}
171
172fn summarize_session(session_dir: &Path, skip_events: usize) -> Option<SessionSummary> {
173    let events_path = session_dir.join("events.jsonl");
174    if !events_path.exists() {
175        return None;
176    }
177
178    let content = fs::read_to_string(&events_path).ok()?;
179    let mut files_modified = Vec::new();
180    let mut files_seen = HashSet::new();
181    let mut tools_used = HashSet::new();
182    let mut models = HashSet::new();
183    let mut events = Vec::new();
184    let mut total_lines = 0usize;
185
186    for line in content.lines() {
187        total_lines += 1;
188        if total_lines <= skip_events {
189            continue;
190        }
191
192        let event: serde_json::Value = match serde_json::from_str(line) {
193            Ok(v) => v,
194            Err(_) => continue,
195        };
196
197        if let Some(tool) = event.get("tool_name").and_then(|v| v.as_str()) {
198            tools_used.insert(tool.to_string());
199        }
200
201        if let Some(model) = event.get("model").and_then(|v| v.as_str()) {
202            models.insert(model.to_string());
203        }
204
205        // Track unique file modifications
206        if let Some(path) = event
207            .get("tool_input")
208            .and_then(|v| v.get("file_path"))
209            .and_then(|v| v.as_str())
210        {
211            if files_seen.insert(path.to_string()) {
212                files_modified.push(path.to_string());
213            }
214        }
215
216        events.push(event);
217    }
218
219    Some(SessionSummary {
220        event_count: events.len(),
221        total_event_count: total_lines,
222        files_modified,
223        tools_used,
224        models,
225        events,
226    })
227}
228
229struct ModelTokens {
230    input_tokens: i64,
231    output_tokens: i64,
232    cache_read_tokens: i64,
233    cache_creation_tokens: i64,
234    requests: i64,
235}
236
237struct TranscriptData {
238    transcript: Option<serde_json::Value>,
239    total_line_count: usize,
240    model: Option<String>,
241    input_tokens: Option<i64>,
242    output_tokens: Option<i64>,
243    total_tokens: Option<i64>,
244    model_usage: Option<serde_json::Value>,
245    duration_ms: Option<i64>,
246    started_at: Option<String>,
247    ended_at: Option<String>,
248    user_messages: Option<i32>,
249    assistant_messages: Option<i32>,
250    tool_calls_map: Option<serde_json::Value>,
251    total_tool_calls: Option<i32>,
252    cache_read_tokens: Option<i64>,
253    cache_write_tokens: Option<i64>,
254    compactions: Option<i32>,
255    compaction_tokens_saved: Option<i64>,
256}
257
258fn accumulate_usage(
259    model_tokens: &mut HashMap<String, ModelTokens>,
260    model: &str,
261    usage: &serde_json::Value,
262) {
263    let entry = model_tokens
264        .entry(model.to_string())
265        .or_insert(ModelTokens {
266            input_tokens: 0,
267            output_tokens: 0,
268            cache_read_tokens: 0,
269            cache_creation_tokens: 0,
270            requests: 0,
271        });
272    entry.requests += 1;
273    if let Some(n) = usage.get("input_tokens").and_then(|v| v.as_i64()) {
274        entry.input_tokens += n;
275    }
276    if let Some(n) = usage.get("output_tokens").and_then(|v| v.as_i64()) {
277        entry.output_tokens += n;
278    }
279    if let Some(n) = usage
280        .get("cache_read_input_tokens")
281        .and_then(|v| v.as_i64())
282    {
283        entry.cache_read_tokens += n;
284    }
285    if let Some(n) = usage
286        .get("cache_creation_input_tokens")
287        .and_then(|v| v.as_i64())
288    {
289        entry.cache_creation_tokens += n;
290    }
291}
292
293fn extract_usage_from_message(
294    model_tokens: &mut HashMap<String, ModelTokens>,
295    message: &serde_json::Value,
296) {
297    let model = message
298        .get("model")
299        .and_then(|v| v.as_str())
300        .unwrap_or("unknown");
301    if let Some(usage) = message.get("usage") {
302        accumulate_usage(model_tokens, model, usage);
303    }
304}
305
306fn extract_nested_usage(
307    model_tokens: &mut HashMap<String, ModelTokens>,
308    entry: &serde_json::Value,
309) {
310    // Handle subagent progress messages nested in content blocks:
311    // entry.message.content[].data.message (where type == "progress" or data.type == "agent_progress")
312    let content = match entry
313        .get("message")
314        .and_then(|m| m.get("content"))
315        .and_then(|c| c.as_array())
316    {
317        Some(c) => c,
318        None => return,
319    };
320    for block in content {
321        // Look for tool_result or progress blocks that contain nested assistant messages
322        if let Some(data) = block.get("data") {
323            let data_type = data.get("type").and_then(|v| v.as_str()).unwrap_or("");
324            if data_type == "progress" || data_type == "agent_progress" {
325                if let Some(msg) = data.get("message") {
326                    extract_usage_from_message(model_tokens, msg);
327                }
328            }
329        }
330    }
331}
332
333fn read_transcript(metadata: &Option<serde_json::Value>, skip_lines: usize) -> TranscriptData {
334    let empty = TranscriptData {
335        transcript: None,
336        total_line_count: 0,
337        model: None,
338        input_tokens: None,
339        output_tokens: None,
340        total_tokens: None,
341        model_usage: None,
342        duration_ms: None,
343        started_at: None,
344        ended_at: None,
345        user_messages: None,
346        assistant_messages: None,
347        tool_calls_map: None,
348        total_tool_calls: None,
349        cache_read_tokens: None,
350        cache_write_tokens: None,
351        compactions: None,
352        compaction_tokens_saved: None,
353    };
354
355    let transcript_path = metadata
356        .as_ref()
357        .and_then(|m| m.get("transcript_path"))
358        .and_then(|v| v.as_str());
359
360    let path = match transcript_path {
361        Some(p) => std::path::PathBuf::from(p),
362        None => return empty,
363    };
364
365    let content = match fs::read_to_string(&path) {
366        Ok(c) => c,
367        Err(_) => return empty,
368    };
369
370    let mut lines: Vec<serde_json::Value> = Vec::new();
371    let mut total_input: i64 = 0;
372    let mut total_output: i64 = 0;
373    let mut model_tokens: HashMap<String, ModelTokens> = HashMap::new();
374    let mut first_timestamp: Option<String> = None;
375    let mut last_timestamp: Option<String> = None;
376    let mut user_message_count: i32 = 0;
377    let mut assistant_message_count: i32 = 0;
378    let mut tool_calls_map: HashMap<String, i32> = HashMap::new();
379    let mut total_tool_call_count: i32 = 0;
380    let mut compaction_count: i32 = 0;
381    let mut compaction_tokens_saved_total: i64 = 0;
382    let mut total_lines = 0usize;
383
384    for line in content.lines() {
385        total_lines += 1;
386        if total_lines <= skip_lines {
387            continue;
388        }
389
390        let entry: serde_json::Value = match serde_json::from_str(line) {
391            Ok(v) => v,
392            Err(_) => continue,
393        };
394
395        // Track timestamps
396        if let Some(ts) = entry.get("timestamp").and_then(|v| v.as_str()) {
397            if first_timestamp.is_none() {
398                first_timestamp = Some(ts.to_string());
399            }
400            last_timestamp = Some(ts.to_string());
401        }
402
403        // Count messages by type and extract token usage
404        let entry_type = entry.get("type").and_then(|v| v.as_str());
405        if entry_type == Some("user") {
406            user_message_count += 1;
407        }
408        if entry_type == Some("assistant") {
409            assistant_message_count += 1;
410
411            if let Some(usage) = entry.get("message").and_then(|m| m.get("usage")) {
412                if let Some(n) = usage.get("input_tokens").and_then(|v| v.as_i64()) {
413                    total_input += n;
414                }
415                if let Some(n) = usage.get("output_tokens").and_then(|v| v.as_i64()) {
416                    total_output += n;
417                }
418                if let Some(n) = usage
419                    .get("cache_creation_input_tokens")
420                    .and_then(|v| v.as_i64())
421                {
422                    total_input += n;
423                }
424                if let Some(n) = usage
425                    .get("cache_read_input_tokens")
426                    .and_then(|v| v.as_i64())
427                {
428                    total_input += n;
429                }
430            }
431
432            // Per-model breakdown from top-level assistant message
433            if let Some(message) = entry.get("message") {
434                extract_usage_from_message(&mut model_tokens, message);
435            }
436
437            // Also check for nested subagent messages
438            extract_nested_usage(&mut model_tokens, &entry);
439
440            // Count tool calls in content blocks
441            if let Some(content) = entry
442                .get("message")
443                .and_then(|m| m.get("content"))
444                .and_then(|c| c.as_array())
445            {
446                for block in content {
447                    if block.get("type").and_then(|v| v.as_str()) == Some("tool_use") {
448                        if let Some(name) = block.get("name").and_then(|v| v.as_str()) {
449                            *tool_calls_map.entry(name.to_string()).or_insert(0) += 1;
450                            total_tool_call_count += 1;
451                        }
452                    }
453                }
454            }
455        }
456
457        // Track compactions
458        if entry.get("compactMetadata").is_some() {
459            compaction_count += 1;
460        }
461        if let Some(micro) = entry.get("microcompactMetadata") {
462            compaction_count += 1;
463            if let Some(saved) = micro.get("tokensSaved").and_then(|v| v.as_i64()) {
464                compaction_tokens_saved_total += saved;
465            }
466        }
467
468        lines.push(entry);
469    }
470
471    if lines.is_empty() {
472        return TranscriptData {
473            total_line_count: total_lines,
474            ..empty
475        };
476    }
477
478    // Primary model = most requests
479    let model = model_tokens
480        .iter()
481        .max_by_key(|(_, t)| t.requests)
482        .map(|(name, _)| name.clone());
483
484    let total = total_input + total_output;
485
486    // Compute duration
487    let duration_ms = match (&first_timestamp, &last_timestamp) {
488        (Some(first), Some(last)) => {
489            let start = chrono::DateTime::parse_from_rfc3339(first).ok();
490            let end = chrono::DateTime::parse_from_rfc3339(last).ok();
491            match (start, end) {
492                (Some(s), Some(e)) => Some((e - s).num_milliseconds()),
493                _ => None,
494            }
495        }
496        _ => None,
497    };
498
499    // Sum cache tokens across all models
500    let total_cache_read: i64 = model_tokens.values().map(|t| t.cache_read_tokens).sum();
501    let total_cache_write: i64 = model_tokens.values().map(|t| t.cache_creation_tokens).sum();
502
503    // Build model_usage JSON array
504    let model_usage = if model_tokens.is_empty() {
505        None
506    } else {
507        let arr: Vec<serde_json::Value> = model_tokens
508            .into_iter()
509            .map(|(name, t)| {
510                serde_json::json!({
511                    "model": name,
512                    "input_tokens": t.input_tokens,
513                    "output_tokens": t.output_tokens,
514                    "cache_read_tokens": t.cache_read_tokens,
515                    "cache_creation_tokens": t.cache_creation_tokens,
516                    "requests": t.requests,
517                })
518            })
519            .collect();
520        Some(serde_json::Value::Array(arr))
521    };
522
523    TranscriptData {
524        transcript: Some(serde_json::Value::Array(lines)),
525        total_line_count: total_lines,
526        model,
527        input_tokens: if total > 0 { Some(total_input) } else { None },
528        output_tokens: if total > 0 { Some(total_output) } else { None },
529        total_tokens: if total > 0 { Some(total) } else { None },
530        model_usage,
531        duration_ms,
532        started_at: first_timestamp,
533        ended_at: last_timestamp,
534        user_messages: if user_message_count > 0 {
535            Some(user_message_count)
536        } else {
537            None
538        },
539        assistant_messages: if assistant_message_count > 0 {
540            Some(assistant_message_count)
541        } else {
542            None
543        },
544        tool_calls_map: if tool_calls_map.is_empty() {
545            None
546        } else {
547            serde_json::to_value(&tool_calls_map).ok()
548        },
549        total_tool_calls: if total_tool_call_count > 0 {
550            Some(total_tool_call_count)
551        } else {
552            None
553        },
554        cache_read_tokens: if total_cache_read > 0 {
555            Some(total_cache_read)
556        } else {
557            None
558        },
559        cache_write_tokens: if total_cache_write > 0 {
560            Some(total_cache_write)
561        } else {
562            None
563        },
564        compactions: if compaction_count > 0 {
565            Some(compaction_count)
566        } else {
567            None
568        },
569        compaction_tokens_saved: if compaction_tokens_saved_total > 0 {
570            Some(compaction_tokens_saved_total)
571        } else {
572            None
573        },
574    }
575}
576
577fn read_git_diff(
578    project_root: &Path,
579    commit_sha: &str,
580) -> Option<Vec<tracevault_core::diff::FileDiff>> {
581    let output = Command::new("git")
582        .args(["diff", &format!("{commit_sha}~1..{commit_sha}")])
583        .current_dir(project_root)
584        .output()
585        .ok()?;
586
587    let raw = if output.status.success() {
588        String::from_utf8_lossy(&output.stdout).to_string()
589    } else {
590        // May fail for initial commit — try diffing against empty tree
591        let output = Command::new("git")
592            .args([
593                "diff",
594                "4b825dc642cb6eb9a060e54bf899d69f245df2c1",
595                commit_sha,
596            ])
597            .current_dir(project_root)
598            .output()
599            .ok()?;
600        if !output.status.success() {
601            return None;
602        }
603        String::from_utf8_lossy(&output.stdout).to_string()
604    };
605
606    if raw.is_empty() {
607        return None;
608    }
609    Some(parse_unified_diff(&raw))
610}
611
612pub async fn push_traces(project_root: &Path) -> Result<(), Box<dyn std::error::Error>> {
613    let (server_url, token) = resolve_credentials(project_root);
614
615    let server_url = match server_url {
616        Some(url) => url,
617        None => {
618            return Err("No server URL configured. Run 'tracevault login' first.".into());
619        }
620    };
621
622    if token.is_none() {
623        return Err("Not logged in. Run 'tracevault login' to push traces.".into());
624    }
625
626    let org_slug = TracevaultConfig::load(project_root)
627        .and_then(|c| c.org_slug)
628        .ok_or("No org_slug in config. Run 'tracevault init' first.")?;
629
630    let client = ApiClient::new(&server_url, token.as_deref());
631
632    let sessions_dir = project_root.join(".tracevault").join("sessions");
633
634    let git = git_info(project_root);
635
636    // Step 1: Discover and register all unpushed commits
637    let last_pushed = read_last_pushed_sha(project_root);
638    let unpushed = get_unpushed_commits(project_root, last_pushed.as_deref(), &git.head_sha);
639
640    let mut commits_registered = 0;
641    for sha in &unpushed {
642        let author = get_commit_author(project_root, sha);
643        let diff_files = read_git_diff(project_root, sha);
644        let diff_data = diff_files
645            .as_ref()
646            .and_then(|f| serde_json::to_value(f).ok());
647
648        let commit_req = PushTraceRequest {
649            repo_name: git.repo_name.clone(),
650            commit_sha: sha.clone(),
651            branch: git.branch.clone(),
652            author,
653            model: None,
654            tool: None,
655            session_id: None,
656            total_tokens: None,
657            input_tokens: None,
658            output_tokens: None,
659            estimated_cost_usd: None,
660            api_calls: None,
661            session_data: None,
662            transcript: None,
663            diff_data,
664            model_usage: None,
665            duration_ms: None,
666            started_at: None,
667            ended_at: None,
668            user_messages: None,
669            assistant_messages: None,
670            tool_calls: None,
671            total_tool_calls: None,
672            cache_read_tokens: None,
673            cache_write_tokens: None,
674            compactions: None,
675            compaction_tokens_saved: None,
676        };
677
678        let commit_resp = client
679            .push_trace(&org_slug, commit_req)
680            .await
681            .map_err(|e| {
682                format!(
683                    "Failed to register commit {}: {e}",
684                    &sha[..8.min(sha.len())]
685                )
686            })?;
687        println!(
688            "Registered commit {} -> {}",
689            &sha[..8.min(sha.len())],
690            commit_resp.commit_id
691        );
692        commits_registered += 1;
693    }
694
695    if unpushed.is_empty() {
696        println!("No new commits to register.");
697    }
698
699    // Step 2: Push session deltas (attached to HEAD)
700    let mut pushed = 0;
701    let mut failed = 0;
702
703    if sessions_dir.exists() {
704        for entry in fs::read_dir(&sessions_dir)? {
705            let entry = entry?;
706            if !entry.file_type()?.is_dir() {
707                continue;
708            }
709
710            let session_dir = entry.path();
711
712            // Determine push state: .push_state > .pushed migration > fresh start
713            let push_state = if let Some(state) = read_push_state(&session_dir) {
714                state
715            } else if session_dir.join(".pushed").exists() {
716                // Migrate from old .pushed marker: treat everything as already pushed
717                let events_path = session_dir.join("events.jsonl");
718                let event_count = count_lines(&events_path);
719
720                let meta_path = session_dir.join("metadata.json");
721                let metadata: Option<serde_json::Value> = meta_path
722                    .exists()
723                    .then(|| fs::read_to_string(&meta_path).ok())
724                    .flatten()
725                    .and_then(|c| serde_json::from_str(&c).ok());
726                let transcript_count = metadata
727                    .as_ref()
728                    .and_then(|m| m.get("transcript_path"))
729                    .and_then(|v| v.as_str())
730                    .map(|p| count_lines(Path::new(p)))
731                    .unwrap_or(0);
732
733                let state = PushState {
734                    last_event_index: event_count,
735                    last_transcript_index: transcript_count,
736                };
737                // Persist migrated state and remove old marker
738                let _ = write_push_state(&session_dir, &state);
739                let _ = fs::remove_file(session_dir.join(".pushed"));
740                state
741            } else {
742                PushState::default()
743            };
744
745            let summary = match summarize_session(&session_dir, push_state.last_event_index) {
746                Some(s) => s,
747                None => continue,
748            };
749
750            let meta_path = session_dir.join("metadata.json");
751            let metadata: Option<serde_json::Value> = meta_path
752                .exists()
753                .then(|| fs::read_to_string(&meta_path).ok())
754                .flatten()
755                .and_then(|c| serde_json::from_str(&c).ok());
756
757            let transcript_data = read_transcript(&metadata, push_state.last_transcript_index);
758
759            // Skip if no new events AND no new transcript lines
760            if summary.event_count == 0 && transcript_data.transcript.is_none() {
761                continue;
762            }
763
764            let session_data = serde_json::json!({
765                "session_id": entry.file_name().to_string_lossy(),
766                "metadata": metadata,
767                "event_count": summary.event_count,
768                "files_modified": summary.files_modified,
769                "tools_used": summary.tools_used.iter().collect::<Vec<_>>(),
770                "events": summary.events,
771            });
772
773            // Prefer model from transcript, fall back to events
774            let model = transcript_data
775                .model
776                .or_else(|| summary.models.iter().next().cloned());
777
778            let session_name = entry.file_name().to_string_lossy().to_string();
779            let author = get_commit_author(project_root, &git.head_sha);
780
781            let req = PushTraceRequest {
782                repo_name: git.repo_name.clone(),
783                commit_sha: git.head_sha.clone(),
784                branch: git.branch.clone(),
785                author,
786                model,
787                tool: Some("claude-code".into()),
788                session_id: Some(session_name.clone()),
789                total_tokens: transcript_data.total_tokens,
790                input_tokens: transcript_data.input_tokens,
791                output_tokens: transcript_data.output_tokens,
792                estimated_cost_usd: None,
793                api_calls: Some(summary.event_count as i32),
794                session_data: Some(session_data),
795                transcript: transcript_data.transcript,
796                diff_data: None, // commit-level only
797                model_usage: transcript_data.model_usage,
798                duration_ms: transcript_data.duration_ms,
799                started_at: transcript_data.started_at.clone(),
800                ended_at: transcript_data.ended_at.clone(),
801                user_messages: transcript_data.user_messages,
802                assistant_messages: transcript_data.assistant_messages,
803                tool_calls: transcript_data.tool_calls_map.clone(),
804                total_tool_calls: transcript_data.total_tool_calls,
805                cache_read_tokens: transcript_data.cache_read_tokens,
806                cache_write_tokens: transcript_data.cache_write_tokens,
807                compactions: transcript_data.compactions,
808                compaction_tokens_saved: transcript_data.compaction_tokens_saved,
809            };
810
811            match client.push_trace(&org_slug, req).await {
812                Ok(resp) => {
813                    println!(
814                        "Pushed session {} ({} new events, {} files) -> {}",
815                        session_name,
816                        summary.event_count,
817                        summary.files_modified.len(),
818                        resp.commit_id,
819                    );
820                    // Update push state with new total counts
821                    let new_state = PushState {
822                        last_event_index: summary.total_event_count,
823                        last_transcript_index: transcript_data.total_line_count,
824                    };
825                    write_push_state(&session_dir, &new_state)?;
826                    pushed += 1;
827                }
828                Err(e) => {
829                    eprintln!("Failed to push {session_name}: {e}");
830                    failed += 1;
831                }
832            }
833        }
834    }
835
836    if pushed > 0 || failed > 0 {
837        println!("\nPushed {pushed} session(s), {failed} failed.");
838    } else if sessions_dir.exists() {
839        println!("No new sessions to push.");
840    }
841
842    if failed > 0 {
843        return Err(format!("{failed} session(s) failed to push").into());
844    }
845
846    // Only update last_pushed_sha after everything succeeds
847    if commits_registered > 0 || pushed > 0 {
848        write_last_pushed_sha(project_root, &git.head_sha)?;
849    }
850
851    Ok(())
852}