1use std::path::Path;
9
10use colored::Colorize;
11
12use crate::storage::sqlite::SqliteStorage;
13use crate::sync::file::{count_lines, file_size};
14use crate::sync::types::{ExportFileInfo, SyncError, SyncResult, SyncStatus};
15
16const EXPORT_FILES: [&str; 7] = [
18 "sessions.jsonl",
19 "issues.jsonl",
20 "context_items.jsonl",
21 "memories.jsonl",
22 "checkpoints.jsonl",
23 "plans.jsonl",
24 "deletions.jsonl",
25];
26
27pub fn get_sync_status(
42 storage: &SqliteStorage,
43 export_dir: &Path,
44 project_path: &str,
45) -> SyncResult<SyncStatus> {
46 let dirty_sessions = storage
48 .get_dirty_sessions_by_project(project_path)
49 .map_err(|e| SyncError::Database(e.to_string()))?
50 .len();
51 let dirty_issues = storage
52 .get_dirty_issues_by_project(project_path)
53 .map_err(|e| SyncError::Database(e.to_string()))?
54 .len();
55 let dirty_context_items = storage
56 .get_dirty_context_items_by_project(project_path)
57 .map_err(|e| SyncError::Database(e.to_string()))?
58 .len();
59 let pending_deletions = storage
60 .count_pending_deletions(project_path)
61 .map_err(|e| SyncError::Database(e.to_string()))?;
62
63 let counts = storage
65 .get_project_counts(project_path)
66 .map_err(|e| SyncError::Database(e.to_string()))?;
67
68 let mut export_files = Vec::new();
70 for filename in EXPORT_FILES {
71 let path = export_dir.join(filename);
72 if path.exists() {
73 let size = file_size(&path);
74 let line_count = count_lines(&path).unwrap_or(0);
75 export_files.push(ExportFileInfo {
76 name: filename.to_string(),
77 size,
78 line_count,
79 });
80 }
81 }
82
83 let has_export_files = !export_files.is_empty();
84
85 let total_dirty = dirty_sessions + dirty_issues + dirty_context_items;
87 let needs_backfill = total_dirty == 0 && counts.total() > 0 && !has_export_files;
88
89 Ok(SyncStatus {
90 dirty_sessions,
91 dirty_issues,
92 dirty_context_items,
93 pending_deletions,
94 total_sessions: counts.sessions,
95 total_issues: counts.issues,
96 total_context_items: counts.context_items,
97 needs_backfill,
98 has_export_files,
99 export_files,
100 })
101}
102
103pub fn print_status(status: &SyncStatus) {
105 println!("{}", "Sync Status".bold().underline());
106 println!();
107
108 let total_data =
110 status.total_sessions + status.total_issues + status.total_context_items;
111 if total_data > 0 {
112 println!("{}", "Project Data:".blue().bold());
113 if status.total_sessions > 0 {
114 println!(" Sessions: {}", status.total_sessions);
115 }
116 if status.total_issues > 0 {
117 println!(" Issues: {}", status.total_issues);
118 }
119 if status.total_context_items > 0 {
120 println!(" Context Items: {}", status.total_context_items);
121 }
122 println!();
123 }
124
125 let total_dirty =
127 status.dirty_sessions + status.dirty_issues + status.dirty_context_items + status.pending_deletions;
128 if total_dirty > 0 {
129 println!("{}", "Pending Export:".yellow().bold());
130 if status.dirty_sessions > 0 {
131 println!(" Sessions: {}", status.dirty_sessions);
132 }
133 if status.dirty_issues > 0 {
134 println!(" Issues: {}", status.dirty_issues);
135 }
136 if status.dirty_context_items > 0 {
137 println!(" Context Items: {}", status.dirty_context_items);
138 }
139 if status.pending_deletions > 0 {
140 println!(" Deletions: {}", status.pending_deletions);
141 }
142 println!(" {}: {}", "Total".bold(), total_dirty);
143 println!();
144 println!(
145 "{}",
146 "Run 'sc sync export' to export pending changes.".dimmed()
147 );
148 } else if status.needs_backfill {
149 println!(
150 "{}",
151 "First Export Required:".yellow().bold()
152 );
153 println!(
154 " {} records exist but haven't been exported yet.",
155 total_data
156 );
157 println!();
158 println!(
159 "{}",
160 "Run 'sc sync export' to perform initial export (backfill will run automatically).".dimmed()
161 );
162 } else if total_data == 0 {
163 println!("{}", "No data for this project.".dimmed());
164 } else {
165 println!("{}", "No pending changes to export.".green());
166 }
167
168 println!();
169
170 if status.has_export_files {
172 println!("{}", "Export Files:".blue().bold());
173 for file in &status.export_files {
174 let size_str = format_size(file.size);
175 println!(
176 " {} ({}, {} records)",
177 file.name, size_str, file.line_count
178 );
179 }
180 } else {
181 println!("{}", "No export files found.".dimmed());
182 if total_data > 0 {
183 println!(
184 "{}",
185 "Run 'sc sync export' to create initial export.".dimmed()
186 );
187 }
188 }
189}
190
191fn format_size(bytes: u64) -> String {
193 const KB: u64 = 1024;
194 const MB: u64 = 1024 * KB;
195
196 if bytes >= MB {
197 format!("{:.1} MB", bytes as f64 / MB as f64)
198 } else if bytes >= KB {
199 format!("{:.1} KB", bytes as f64 / KB as f64)
200 } else {
201 format!("{} B", bytes)
202 }
203}
204
205#[cfg(test)]
206mod tests {
207 use super::*;
208 use tempfile::TempDir;
209
210 #[test]
211 fn test_format_size() {
212 assert_eq!(format_size(0), "0 B");
213 assert_eq!(format_size(512), "512 B");
214 assert_eq!(format_size(1024), "1.0 KB");
215 assert_eq!(format_size(1536), "1.5 KB");
216 assert_eq!(format_size(1024 * 1024), "1.0 MB");
217 assert_eq!(format_size(1024 * 1024 + 512 * 1024), "1.5 MB");
218 }
219
220 #[test]
221 fn test_get_sync_status_empty() {
222 let temp_dir = TempDir::new().unwrap();
223 let db_path = temp_dir.path().join("test.db");
224 let storage = SqliteStorage::open(&db_path).unwrap();
225 let project_path = "/test/project";
226
227 let status = get_sync_status(&storage, temp_dir.path(), project_path).unwrap();
228
229 assert_eq!(status.dirty_sessions, 0);
230 assert_eq!(status.dirty_issues, 0);
231 assert_eq!(status.dirty_context_items, 0);
232 assert_eq!(status.pending_deletions, 0);
233 assert_eq!(status.total_sessions, 0);
234 assert!(!status.has_export_files);
235 assert!(status.export_files.is_empty());
236 assert!(!status.needs_backfill);
237 }
238
239 #[test]
240 fn test_get_sync_status_with_dirty_records() {
241 let temp_dir = TempDir::new().unwrap();
242 let db_path = temp_dir.path().join("test.db");
243 let mut storage = SqliteStorage::open(&db_path).unwrap();
244 let project_path = "/test/project";
245
246 storage
248 .create_session("sess_1", "Test Session", None, Some(project_path), None, "test")
249 .unwrap();
250
251 let status = get_sync_status(&storage, temp_dir.path(), project_path).unwrap();
252
253 assert_eq!(status.dirty_sessions, 1);
254 assert_eq!(status.total_sessions, 1);
255 assert_eq!(status.dirty_issues, 0);
256 assert!(!status.needs_backfill);
257 }
258
259 #[test]
260 fn test_get_sync_status_with_export_files() {
261 let temp_dir = TempDir::new().unwrap();
262 let db_path = temp_dir.path().join("test.db");
263 let storage = SqliteStorage::open(&db_path).unwrap();
264 let project_path = "/test/project";
265
266 let sessions_path = temp_dir.path().join("sessions.jsonl");
268 std::fs::write(&sessions_path, "{\"type\":\"session\"}\n{\"type\":\"session\"}\n").unwrap();
269
270 let status = get_sync_status(&storage, temp_dir.path(), project_path).unwrap();
271
272 assert!(status.has_export_files);
273 assert_eq!(status.export_files.len(), 1);
274 assert_eq!(status.export_files[0].name, "sessions.jsonl");
275 assert_eq!(status.export_files[0].line_count, 2);
276 }
277
278 #[test]
279 fn test_needs_backfill_detection() {
280 let temp_dir = TempDir::new().unwrap();
281 let db_path = temp_dir.path().join("test.db");
282 let mut storage = SqliteStorage::open(&db_path).unwrap();
283 let project_path = "/test/project";
284
285 storage
287 .create_session("sess_1", "Test Session", None, Some(project_path), None, "test")
288 .unwrap();
289
290 storage.clear_dirty_sessions(&["sess_1".to_string()]).unwrap();
292
293 let status = get_sync_status(&storage, temp_dir.path(), project_path).unwrap();
294
295 assert_eq!(status.total_sessions, 1);
297 assert_eq!(status.dirty_sessions, 0);
298 assert!(status.needs_backfill);
299 }
300}