1use crate::api_client::{resolve_credentials, ApiClient, PushTraceRequest};
2use crate::config::TracevaultConfig;
3use serde::{Deserialize, Serialize};
4use std::collections::{HashMap, HashSet};
5use std::fs;
6use std::io::{BufRead, BufReader};
7use std::path::{Path, PathBuf};
8use std::process::Command;
9use tracevault_core::diff::parse_unified_diff;
10use tracevault_core::gitai::{gitai_to_attribution, parse_gitai_note};
11
12#[derive(Debug, Serialize, Deserialize, Default)]
13struct PushState {
14 last_event_index: usize,
15 last_transcript_index: usize,
16}
17
18fn read_push_state(session_dir: &Path) -> Option<PushState> {
19 let path = session_dir.join(".push_state");
20 let content = fs::read_to_string(path).ok()?;
21 serde_json::from_str(&content).ok()
22}
23
24fn write_push_state(
25 session_dir: &Path,
26 state: &PushState,
27) -> Result<(), Box<dyn std::error::Error>> {
28 let path = session_dir.join(".push_state");
29 let json = serde_json::to_string(state)?;
30 fs::write(path, json)?;
31 Ok(())
32}
33
34fn count_lines(path: &Path) -> usize {
35 let file = match fs::File::open(path) {
36 Ok(f) => f,
37 Err(_) => return 0,
38 };
39 BufReader::new(file).lines().count()
40}
41
42struct GitInfo {
43 repo_name: String,
44 branch: Option<String>,
45 head_sha: String,
46}
47
48fn git_info(project_root: &Path) -> GitInfo {
49 let run = |args: &[&str]| -> Option<String> {
50 Command::new("git")
51 .args(args)
52 .current_dir(project_root)
53 .output()
54 .ok()
55 .filter(|o| o.status.success())
56 .map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
57 .filter(|s| !s.is_empty())
58 };
59
60 let repo_name = run(&["rev-parse", "--show-toplevel"])
61 .as_deref()
62 .and_then(|p| p.rsplit('/').next())
63 .map(String::from)
64 .unwrap_or_else(|| "unknown".into());
65
66 let branch = run(&["rev-parse", "--abbrev-ref", "HEAD"]).filter(|b| b != "HEAD");
67
68 let head_sha = run(&["rev-parse", "HEAD"]).unwrap_or_else(|| "unknown".into());
69
70 GitInfo {
71 repo_name,
72 branch,
73 head_sha,
74 }
75}
76
77fn get_commit_author(project_root: &Path, commit_sha: &str) -> String {
78 Command::new("git")
79 .args(["log", "-1", "--format=%aN", commit_sha])
80 .current_dir(project_root)
81 .output()
82 .ok()
83 .filter(|o| o.status.success())
84 .map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
85 .filter(|s| !s.is_empty())
86 .unwrap_or_else(|| "unknown".into())
87}
88
89fn last_pushed_sha_path(project_root: &Path) -> PathBuf {
90 project_root
91 .join(".tracevault")
92 .join("cache")
93 .join(".last_pushed_sha")
94}
95
96fn read_last_pushed_sha(project_root: &Path) -> Option<String> {
97 fs::read_to_string(last_pushed_sha_path(project_root))
98 .ok()
99 .map(|s| s.trim().to_string())
100 .filter(|s| !s.is_empty())
101}
102
103fn write_last_pushed_sha(project_root: &Path, sha: &str) -> Result<(), Box<dyn std::error::Error>> {
104 let path = last_pushed_sha_path(project_root);
105 if let Some(parent) = path.parent() {
106 fs::create_dir_all(parent)?;
107 }
108 fs::write(&path, sha)?;
109 Ok(())
110}
111
112fn get_unpushed_commits(
114 project_root: &Path,
115 last_pushed: Option<&str>,
116 head_sha: &str,
117) -> Vec<String> {
118 let last_pushed = match last_pushed {
119 Some(sha) => sha,
120 None => return vec![head_sha.to_string()], };
122
123 if last_pushed == head_sha {
124 return vec![]; }
126
127 let exists = Command::new("git")
129 .args(["cat-file", "-t", last_pushed])
130 .current_dir(project_root)
131 .output()
132 .ok()
133 .map(|o| o.status.success())
134 .unwrap_or(false);
135
136 if !exists {
137 return vec![head_sha.to_string()]; }
139
140 let output = Command::new("git")
142 .args(["rev-list", "--reverse", &format!("{last_pushed}..HEAD")])
143 .current_dir(project_root)
144 .output()
145 .ok();
146
147 match output {
148 Some(o) if o.status.success() => {
149 let shas: Vec<String> = String::from_utf8_lossy(&o.stdout)
150 .lines()
151 .map(|s| s.trim().to_string())
152 .filter(|s| !s.is_empty())
153 .collect();
154 if shas.is_empty() {
155 vec![]
156 } else {
157 shas
158 }
159 }
160 _ => vec![head_sha.to_string()], }
162}
163
164struct SessionSummary {
165 event_count: usize,
166 total_event_count: usize,
167 files_modified: Vec<String>,
168 tools_used: HashSet<String>,
169 models: HashSet<String>,
170 events: Vec<serde_json::Value>,
171}
172
173fn summarize_session(session_dir: &Path, skip_events: usize) -> Option<SessionSummary> {
174 let events_path = session_dir.join("events.jsonl");
175 if !events_path.exists() {
176 return None;
177 }
178
179 let content = fs::read_to_string(&events_path).ok()?;
180 let mut files_modified = Vec::new();
181 let mut files_seen = HashSet::new();
182 let mut tools_used = HashSet::new();
183 let mut models = HashSet::new();
184 let mut events = Vec::new();
185 let mut total_lines = 0usize;
186
187 for line in content.lines() {
188 total_lines += 1;
189 if total_lines <= skip_events {
190 continue;
191 }
192
193 let event: serde_json::Value = match serde_json::from_str(line) {
194 Ok(v) => v,
195 Err(_) => continue,
196 };
197
198 if let Some(tool) = event.get("tool_name").and_then(|v| v.as_str()) {
199 tools_used.insert(tool.to_string());
200 }
201
202 if let Some(model) = event.get("model").and_then(|v| v.as_str()) {
203 models.insert(model.to_string());
204 }
205
206 if let Some(path) = event
208 .get("tool_input")
209 .and_then(|v| v.get("file_path"))
210 .and_then(|v| v.as_str())
211 {
212 if files_seen.insert(path.to_string()) {
213 files_modified.push(path.to_string());
214 }
215 }
216
217 events.push(event);
218 }
219
220 Some(SessionSummary {
221 event_count: events.len(),
222 total_event_count: total_lines,
223 files_modified,
224 tools_used,
225 models,
226 events,
227 })
228}
229
230struct ModelTokens {
231 input_tokens: i64,
232 output_tokens: i64,
233 cache_read_tokens: i64,
234 cache_creation_tokens: i64,
235 requests: i64,
236}
237
238struct TranscriptData {
239 transcript: Option<serde_json::Value>,
240 total_line_count: usize,
241 model: Option<String>,
242 input_tokens: Option<i64>,
243 output_tokens: Option<i64>,
244 total_tokens: Option<i64>,
245 model_usage: Option<serde_json::Value>,
246 duration_ms: Option<i64>,
247 started_at: Option<String>,
248 ended_at: Option<String>,
249 user_messages: Option<i32>,
250 assistant_messages: Option<i32>,
251 tool_calls_map: Option<serde_json::Value>,
252 total_tool_calls: Option<i32>,
253 cache_read_tokens: Option<i64>,
254 cache_write_tokens: Option<i64>,
255 compactions: Option<i32>,
256 compaction_tokens_saved: Option<i64>,
257}
258
259fn accumulate_usage(
260 model_tokens: &mut HashMap<String, ModelTokens>,
261 model: &str,
262 usage: &serde_json::Value,
263) {
264 let entry = model_tokens
265 .entry(model.to_string())
266 .or_insert(ModelTokens {
267 input_tokens: 0,
268 output_tokens: 0,
269 cache_read_tokens: 0,
270 cache_creation_tokens: 0,
271 requests: 0,
272 });
273 entry.requests += 1;
274 if let Some(n) = usage.get("input_tokens").and_then(|v| v.as_i64()) {
275 entry.input_tokens += n;
276 }
277 if let Some(n) = usage.get("output_tokens").and_then(|v| v.as_i64()) {
278 entry.output_tokens += n;
279 }
280 if let Some(n) = usage
281 .get("cache_read_input_tokens")
282 .and_then(|v| v.as_i64())
283 {
284 entry.cache_read_tokens += n;
285 }
286 if let Some(n) = usage
287 .get("cache_creation_input_tokens")
288 .and_then(|v| v.as_i64())
289 {
290 entry.cache_creation_tokens += n;
291 }
292}
293
294fn extract_usage_from_message(
295 model_tokens: &mut HashMap<String, ModelTokens>,
296 message: &serde_json::Value,
297) {
298 let model = message
299 .get("model")
300 .and_then(|v| v.as_str())
301 .unwrap_or("unknown");
302 if let Some(usage) = message.get("usage") {
303 accumulate_usage(model_tokens, model, usage);
304 }
305}
306
307fn extract_nested_usage(
308 model_tokens: &mut HashMap<String, ModelTokens>,
309 entry: &serde_json::Value,
310) {
311 let content = match entry
314 .get("message")
315 .and_then(|m| m.get("content"))
316 .and_then(|c| c.as_array())
317 {
318 Some(c) => c,
319 None => return,
320 };
321 for block in content {
322 if let Some(data) = block.get("data") {
324 let data_type = data.get("type").and_then(|v| v.as_str()).unwrap_or("");
325 if data_type == "progress" || data_type == "agent_progress" {
326 if let Some(msg) = data.get("message") {
327 extract_usage_from_message(model_tokens, msg);
328 }
329 }
330 }
331 }
332}
333
334fn read_transcript(metadata: &Option<serde_json::Value>, skip_lines: usize) -> TranscriptData {
335 let empty = TranscriptData {
336 transcript: None,
337 total_line_count: 0,
338 model: None,
339 input_tokens: None,
340 output_tokens: None,
341 total_tokens: None,
342 model_usage: None,
343 duration_ms: None,
344 started_at: None,
345 ended_at: None,
346 user_messages: None,
347 assistant_messages: None,
348 tool_calls_map: None,
349 total_tool_calls: None,
350 cache_read_tokens: None,
351 cache_write_tokens: None,
352 compactions: None,
353 compaction_tokens_saved: None,
354 };
355
356 let transcript_path = metadata
357 .as_ref()
358 .and_then(|m| m.get("transcript_path"))
359 .and_then(|v| v.as_str());
360
361 let path = match transcript_path {
362 Some(p) => std::path::PathBuf::from(p),
363 None => return empty,
364 };
365
366 let content = match fs::read_to_string(&path) {
367 Ok(c) => c,
368 Err(_) => return empty,
369 };
370
371 let mut lines: Vec<serde_json::Value> = Vec::new();
372 let mut total_input: i64 = 0;
373 let mut total_output: i64 = 0;
374 let mut model_tokens: HashMap<String, ModelTokens> = HashMap::new();
375 let mut first_timestamp: Option<String> = None;
376 let mut last_timestamp: Option<String> = None;
377 let mut user_message_count: i32 = 0;
378 let mut assistant_message_count: i32 = 0;
379 let mut tool_calls_map: HashMap<String, i32> = HashMap::new();
380 let mut total_tool_call_count: i32 = 0;
381 let mut compaction_count: i32 = 0;
382 let mut compaction_tokens_saved_total: i64 = 0;
383 let mut total_lines = 0usize;
384
385 for line in content.lines() {
386 total_lines += 1;
387 if total_lines <= skip_lines {
388 continue;
389 }
390
391 let entry: serde_json::Value = match serde_json::from_str(line) {
392 Ok(v) => v,
393 Err(_) => continue,
394 };
395
396 if let Some(ts) = entry.get("timestamp").and_then(|v| v.as_str()) {
398 if first_timestamp.is_none() {
399 first_timestamp = Some(ts.to_string());
400 }
401 last_timestamp = Some(ts.to_string());
402 }
403
404 let entry_type = entry.get("type").and_then(|v| v.as_str());
406 if entry_type == Some("user") {
407 user_message_count += 1;
408 }
409 if entry_type == Some("assistant") {
410 assistant_message_count += 1;
411
412 if let Some(usage) = entry.get("message").and_then(|m| m.get("usage")) {
413 if let Some(n) = usage.get("input_tokens").and_then(|v| v.as_i64()) {
414 total_input += n;
415 }
416 if let Some(n) = usage.get("output_tokens").and_then(|v| v.as_i64()) {
417 total_output += n;
418 }
419 if let Some(n) = usage
420 .get("cache_creation_input_tokens")
421 .and_then(|v| v.as_i64())
422 {
423 total_input += n;
424 }
425 if let Some(n) = usage
426 .get("cache_read_input_tokens")
427 .and_then(|v| v.as_i64())
428 {
429 total_input += n;
430 }
431 }
432
433 if let Some(message) = entry.get("message") {
435 extract_usage_from_message(&mut model_tokens, message);
436 }
437
438 extract_nested_usage(&mut model_tokens, &entry);
440
441 if let Some(content) = entry
443 .get("message")
444 .and_then(|m| m.get("content"))
445 .and_then(|c| c.as_array())
446 {
447 for block in content {
448 if block.get("type").and_then(|v| v.as_str()) == Some("tool_use") {
449 if let Some(name) = block.get("name").and_then(|v| v.as_str()) {
450 *tool_calls_map.entry(name.to_string()).or_insert(0) += 1;
451 total_tool_call_count += 1;
452 }
453 }
454 }
455 }
456 }
457
458 if entry.get("compactMetadata").is_some() {
460 compaction_count += 1;
461 }
462 if let Some(micro) = entry.get("microcompactMetadata") {
463 compaction_count += 1;
464 if let Some(saved) = micro.get("tokensSaved").and_then(|v| v.as_i64()) {
465 compaction_tokens_saved_total += saved;
466 }
467 }
468
469 lines.push(entry);
470 }
471
472 if lines.is_empty() {
473 return TranscriptData {
474 total_line_count: total_lines,
475 ..empty
476 };
477 }
478
479 let model = model_tokens
481 .iter()
482 .max_by_key(|(_, t)| t.requests)
483 .map(|(name, _)| name.clone());
484
485 let total = total_input + total_output;
486
487 let duration_ms = match (&first_timestamp, &last_timestamp) {
489 (Some(first), Some(last)) => {
490 let start = chrono::DateTime::parse_from_rfc3339(first).ok();
491 let end = chrono::DateTime::parse_from_rfc3339(last).ok();
492 match (start, end) {
493 (Some(s), Some(e)) => Some((e - s).num_milliseconds()),
494 _ => None,
495 }
496 }
497 _ => None,
498 };
499
500 let total_cache_read: i64 = model_tokens.values().map(|t| t.cache_read_tokens).sum();
502 let total_cache_write: i64 = model_tokens.values().map(|t| t.cache_creation_tokens).sum();
503
504 let model_usage = if model_tokens.is_empty() {
506 None
507 } else {
508 let arr: Vec<serde_json::Value> = model_tokens
509 .into_iter()
510 .map(|(name, t)| {
511 serde_json::json!({
512 "model": name,
513 "input_tokens": t.input_tokens,
514 "output_tokens": t.output_tokens,
515 "cache_read_tokens": t.cache_read_tokens,
516 "cache_creation_tokens": t.cache_creation_tokens,
517 "requests": t.requests,
518 })
519 })
520 .collect();
521 Some(serde_json::Value::Array(arr))
522 };
523
524 TranscriptData {
525 transcript: Some(serde_json::Value::Array(lines)),
526 total_line_count: total_lines,
527 model,
528 input_tokens: if total > 0 { Some(total_input) } else { None },
529 output_tokens: if total > 0 { Some(total_output) } else { None },
530 total_tokens: if total > 0 { Some(total) } else { None },
531 model_usage,
532 duration_ms,
533 started_at: first_timestamp,
534 ended_at: last_timestamp,
535 user_messages: if user_message_count > 0 {
536 Some(user_message_count)
537 } else {
538 None
539 },
540 assistant_messages: if assistant_message_count > 0 {
541 Some(assistant_message_count)
542 } else {
543 None
544 },
545 tool_calls_map: if tool_calls_map.is_empty() {
546 None
547 } else {
548 serde_json::to_value(&tool_calls_map).ok()
549 },
550 total_tool_calls: if total_tool_call_count > 0 {
551 Some(total_tool_call_count)
552 } else {
553 None
554 },
555 cache_read_tokens: if total_cache_read > 0 {
556 Some(total_cache_read)
557 } else {
558 None
559 },
560 cache_write_tokens: if total_cache_write > 0 {
561 Some(total_cache_write)
562 } else {
563 None
564 },
565 compactions: if compaction_count > 0 {
566 Some(compaction_count)
567 } else {
568 None
569 },
570 compaction_tokens_saved: if compaction_tokens_saved_total > 0 {
571 Some(compaction_tokens_saved_total)
572 } else {
573 None
574 },
575 }
576}
577
578fn read_git_diff(
579 project_root: &Path,
580 commit_sha: &str,
581) -> Option<Vec<tracevault_core::diff::FileDiff>> {
582 let output = Command::new("git")
583 .args(["diff", &format!("{commit_sha}~1..{commit_sha}")])
584 .current_dir(project_root)
585 .output()
586 .ok()?;
587
588 let raw = if output.status.success() {
589 String::from_utf8_lossy(&output.stdout).to_string()
590 } else {
591 let output = Command::new("git")
593 .args([
594 "diff",
595 "4b825dc642cb6eb9a060e54bf899d69f245df2c1",
596 commit_sha,
597 ])
598 .current_dir(project_root)
599 .output()
600 .ok()?;
601 if !output.status.success() {
602 return None;
603 }
604 String::from_utf8_lossy(&output.stdout).to_string()
605 };
606
607 if raw.is_empty() {
608 return None;
609 }
610 Some(parse_unified_diff(&raw))
611}
612
613fn read_gitai_attribution(
614 project_root: &Path,
615 commit_sha: &str,
616 diff_files: &[tracevault_core::diff::FileDiff],
617) -> Option<serde_json::Value> {
618 let output = Command::new("git")
619 .args(["notes", "--ref", "refs/notes/ai", "show", commit_sha])
620 .current_dir(project_root)
621 .output()
622 .ok()?;
623
624 if !output.status.success() {
625 return None; }
627
628 let note = String::from_utf8_lossy(&output.stdout);
629 let log = parse_gitai_note(¬e)?;
630 let attribution = gitai_to_attribution(&log, diff_files);
631 serde_json::to_value(&attribution).ok()
632}
633
634fn is_gitai_installed() -> bool {
635 Command::new("git")
636 .args(["ai", "--version"])
637 .stdout(std::process::Stdio::null())
638 .stderr(std::process::Stdio::null())
639 .status()
640 .map(|s| s.success())
641 .unwrap_or(false)
642}
643
644pub async fn push_traces(project_root: &Path) -> Result<(), Box<dyn std::error::Error>> {
645 let (server_url, token) = resolve_credentials(project_root);
646
647 let server_url = match server_url {
648 Some(url) => url,
649 None => {
650 return Err("No server URL configured. Run 'tracevault login' first.".into());
651 }
652 };
653
654 if token.is_none() {
655 return Err("Not logged in. Run 'tracevault login' to push traces.".into());
656 }
657
658 let org_slug = TracevaultConfig::load(project_root)
659 .and_then(|c| c.org_slug)
660 .ok_or("No org_slug in config. Run 'tracevault init' first.")?;
661
662 if !is_gitai_installed() {
663 eprintln!("Warning: git-ai is not installed. AI attribution data will not be available.");
664 eprintln!(" Install it with: npm install -g @anthropic-ai/git-ai");
665 eprintln!(" See: https://github.com/anthropics/git-ai");
666 eprintln!();
667 }
668
669 let client = ApiClient::new(&server_url, token.as_deref());
670
671 let sessions_dir = project_root.join(".tracevault").join("sessions");
672
673 let git = git_info(project_root);
674
675 let last_pushed = read_last_pushed_sha(project_root);
677 let unpushed = get_unpushed_commits(project_root, last_pushed.as_deref(), &git.head_sha);
678
679 let mut commits_registered = 0;
680 for sha in &unpushed {
681 let author = get_commit_author(project_root, sha);
682 let diff_files = read_git_diff(project_root, sha);
683 let diff_data = diff_files
684 .as_ref()
685 .and_then(|f| serde_json::to_value(f).ok());
686 let attribution =
687 read_gitai_attribution(project_root, sha, diff_files.as_deref().unwrap_or(&[]));
688
689 let commit_req = PushTraceRequest {
690 repo_name: git.repo_name.clone(),
691 commit_sha: sha.clone(),
692 branch: git.branch.clone(),
693 author,
694 model: None,
695 tool: None,
696 session_id: None,
697 total_tokens: None,
698 input_tokens: None,
699 output_tokens: None,
700 estimated_cost_usd: None,
701 api_calls: None,
702 session_data: None,
703 attribution,
704 transcript: None,
705 diff_data,
706 model_usage: None,
707 duration_ms: None,
708 started_at: None,
709 ended_at: None,
710 user_messages: None,
711 assistant_messages: None,
712 tool_calls: None,
713 total_tool_calls: None,
714 cache_read_tokens: None,
715 cache_write_tokens: None,
716 compactions: None,
717 compaction_tokens_saved: None,
718 };
719
720 let commit_resp = client
721 .push_trace(&org_slug, commit_req)
722 .await
723 .map_err(|e| {
724 format!(
725 "Failed to register commit {}: {e}",
726 &sha[..8.min(sha.len())]
727 )
728 })?;
729 println!(
730 "Registered commit {} -> {}",
731 &sha[..8.min(sha.len())],
732 commit_resp.commit_id
733 );
734 commits_registered += 1;
735 }
736
737 if unpushed.is_empty() {
738 println!("No new commits to register.");
739 }
740
741 let mut pushed = 0;
743 let mut failed = 0;
744
745 if sessions_dir.exists() {
746 for entry in fs::read_dir(&sessions_dir)? {
747 let entry = entry?;
748 if !entry.file_type()?.is_dir() {
749 continue;
750 }
751
752 let session_dir = entry.path();
753
754 let push_state = if let Some(state) = read_push_state(&session_dir) {
756 state
757 } else if session_dir.join(".pushed").exists() {
758 let events_path = session_dir.join("events.jsonl");
760 let event_count = count_lines(&events_path);
761
762 let meta_path = session_dir.join("metadata.json");
763 let metadata: Option<serde_json::Value> = meta_path
764 .exists()
765 .then(|| fs::read_to_string(&meta_path).ok())
766 .flatten()
767 .and_then(|c| serde_json::from_str(&c).ok());
768 let transcript_count = metadata
769 .as_ref()
770 .and_then(|m| m.get("transcript_path"))
771 .and_then(|v| v.as_str())
772 .map(|p| count_lines(Path::new(p)))
773 .unwrap_or(0);
774
775 let state = PushState {
776 last_event_index: event_count,
777 last_transcript_index: transcript_count,
778 };
779 let _ = write_push_state(&session_dir, &state);
781 let _ = fs::remove_file(session_dir.join(".pushed"));
782 state
783 } else {
784 PushState::default()
785 };
786
787 let summary = match summarize_session(&session_dir, push_state.last_event_index) {
788 Some(s) => s,
789 None => continue,
790 };
791
792 let meta_path = session_dir.join("metadata.json");
793 let metadata: Option<serde_json::Value> = meta_path
794 .exists()
795 .then(|| fs::read_to_string(&meta_path).ok())
796 .flatten()
797 .and_then(|c| serde_json::from_str(&c).ok());
798
799 let transcript_data = read_transcript(&metadata, push_state.last_transcript_index);
800
801 if summary.event_count == 0 && transcript_data.transcript.is_none() {
803 continue;
804 }
805
806 let session_data = serde_json::json!({
807 "session_id": entry.file_name().to_string_lossy(),
808 "metadata": metadata,
809 "event_count": summary.event_count,
810 "files_modified": summary.files_modified,
811 "tools_used": summary.tools_used.iter().collect::<Vec<_>>(),
812 "events": summary.events,
813 });
814
815 let model = transcript_data
817 .model
818 .or_else(|| summary.models.iter().next().cloned());
819
820 let session_name = entry.file_name().to_string_lossy().to_string();
821 let author = get_commit_author(project_root, &git.head_sha);
822
823 let req = PushTraceRequest {
824 repo_name: git.repo_name.clone(),
825 commit_sha: git.head_sha.clone(),
826 branch: git.branch.clone(),
827 author,
828 model,
829 tool: Some("claude-code".into()),
830 session_id: Some(session_name.clone()),
831 total_tokens: transcript_data.total_tokens,
832 input_tokens: transcript_data.input_tokens,
833 output_tokens: transcript_data.output_tokens,
834 estimated_cost_usd: None,
835 api_calls: Some(summary.event_count as i32),
836 session_data: Some(session_data),
837 attribution: None, transcript: transcript_data.transcript,
839 diff_data: None, model_usage: transcript_data.model_usage,
841 duration_ms: transcript_data.duration_ms,
842 started_at: transcript_data.started_at.clone(),
843 ended_at: transcript_data.ended_at.clone(),
844 user_messages: transcript_data.user_messages,
845 assistant_messages: transcript_data.assistant_messages,
846 tool_calls: transcript_data.tool_calls_map.clone(),
847 total_tool_calls: transcript_data.total_tool_calls,
848 cache_read_tokens: transcript_data.cache_read_tokens,
849 cache_write_tokens: transcript_data.cache_write_tokens,
850 compactions: transcript_data.compactions,
851 compaction_tokens_saved: transcript_data.compaction_tokens_saved,
852 };
853
854 match client.push_trace(&org_slug, req).await {
855 Ok(resp) => {
856 println!(
857 "Pushed session {} ({} new events, {} files) -> {}",
858 session_name,
859 summary.event_count,
860 summary.files_modified.len(),
861 resp.commit_id,
862 );
863 let new_state = PushState {
865 last_event_index: summary.total_event_count,
866 last_transcript_index: transcript_data.total_line_count,
867 };
868 write_push_state(&session_dir, &new_state)?;
869 pushed += 1;
870 }
871 Err(e) => {
872 eprintln!("Failed to push {session_name}: {e}");
873 failed += 1;
874 }
875 }
876 }
877 }
878
879 if pushed > 0 || failed > 0 {
880 println!("\nPushed {pushed} session(s), {failed} failed.");
881 } else if sessions_dir.exists() {
882 println!("No new sessions to push.");
883 }
884
885 if failed > 0 {
886 return Err(format!("{failed} session(s) failed to push").into());
887 }
888
889 if commits_registered > 0 || pushed > 0 {
891 write_last_pushed_sha(project_root, &git.head_sha)?;
892 }
893
894 Ok(())
895}