1use crate::args::{Cli, InsightsAction};
7use crate::output::OutputStreams;
8use anyhow::{Context, Result};
9use sqry_core::uses::{DiagnosticsAggregator, UsesConfig, UsesStorage};
10
11const KB_BYTES: u64 = 1024;
12const MB_BYTES: u64 = KB_BYTES * 1024;
13const GB_BYTES: u64 = MB_BYTES * 1024;
14const KB_BYTES_F64: f64 = 1024.0;
15const MB_BYTES_F64: f64 = 1024.0 * 1024.0;
16const GB_BYTES_F64: f64 = 1024.0 * 1024.0 * 1024.0;
17
18pub fn run_insights(cli: &Cli, action: &InsightsAction) -> Result<()> {
23 match action {
24 InsightsAction::Show { week } => run_show(cli, week.as_deref()),
25 InsightsAction::Config {
26 enable,
27 disable,
28 retention,
29 } => run_config(cli, *enable, *disable, *retention),
30 InsightsAction::Status => run_status(cli),
31 InsightsAction::Prune { older, dry_run } => run_prune(cli, older.as_deref(), *dry_run),
32 #[cfg(feature = "share")]
33 InsightsAction::Share {
34 week,
35 from,
36 to,
37 output,
38 dry_run,
39 } => run_share(
40 cli,
41 week.as_deref(),
42 from.as_deref(),
43 to.as_deref(),
44 output.as_deref(),
45 *dry_run,
46 ),
47 }
48}
49
50fn run_show(cli: &Cli, week: Option<&str>) -> Result<()> {
52 let mut streams = OutputStreams::new();
53
54 let uses_dir = UsesConfig::uses_dir()
56 .context("Could not determine uses directory (home directory unavailable)")?;
57
58 let config = UsesConfig::load();
60 if !config.enabled {
61 streams.write_diagnostic(
62 "Uses capture is currently disabled. Enable with: sqry insights config --enable",
63 )?;
64 return Ok(());
65 }
66
67 let aggregator = DiagnosticsAggregator::new(&uses_dir);
69
70 let summary = if let Some(week_str) = week {
72 aggregator
73 .get_or_generate_summary(week_str)
74 .with_context(|| format!("Failed to get summary for week {week_str}"))?
75 } else {
76 aggregator
77 .summarize_current_week()
78 .context("Failed to generate summary for current week")?
79 };
80
81 if cli.json {
83 let json = serde_json::to_string_pretty(&summary)
84 .context("Failed to serialize summary to JSON")?;
85 streams.write_result(&json)?;
86 } else {
87 let output = format_summary_text(&summary);
89 streams.write_result(&output)?;
90 }
91
92 Ok(())
93}
94
95fn format_summary_text(summary: &sqry_core::uses::DiagnosticsSummary) -> String {
97 let mut lines = Vec::new();
98
99 lines.push(format!("Usage Summary for {}", summary.period));
100 lines.push(String::new());
101
102 lines.push(format!("Total uses: {}", summary.total_uses));
104 if summary.dropped_events > 0 {
105 lines.push(format!("Dropped events: {}", summary.dropped_events));
106 }
107 lines.push(String::new());
108
109 if !summary.top_workflows.is_empty() {
111 lines.push("Top Workflows:".to_string());
112 for workflow in &summary.top_workflows {
113 lines.push(format!(" {:?}: {}", workflow.kind, workflow.count));
114 }
115 lines.push(String::new());
116 }
117
118 lines.push("Timing Metrics:".to_string());
120 lines.push(format!(
121 " Average time to result: {:.2}s",
122 summary.avg_time_to_result_sec
123 ));
124 lines.push(format!(
125 " Median time to result: {:.2}s",
126 summary.median_time_to_result_sec
127 ));
128 lines.push(String::new());
129
130 lines.push(format!(
132 "Abandonment rate: {:.1}%",
133 summary.abandon_rate * 100.0
134 ));
135 lines.push(format!(
136 "AI requery rate: {:.1}%",
137 summary.ai_requery_rate * 100.0
138 ));
139
140 if !summary.abandonment.is_empty() {
142 lines.push(String::new());
143 lines.push("Abandonment by graph type:".to_string());
144 for abandon in &summary.abandonment {
145 lines.push(format!(
146 " {:?}: {:.1}%",
147 abandon.kind,
148 abandon.rate * 100.0
149 ));
150 }
151 }
152
153 lines.join("\n")
154}
155
156fn run_config(cli: &Cli, enable: bool, disable: bool, retention: Option<u32>) -> Result<()> {
158 let mut streams = OutputStreams::new();
159 let mut config = UsesConfig::load();
160 let mut modified = false;
161
162 if enable {
164 config.enabled = true;
165 modified = true;
166 }
167 if disable {
168 config.enabled = false;
169 modified = true;
170 }
171 if let Some(days) = retention {
172 config.retention_days = days;
173 modified = true;
174 }
175
176 if modified {
178 config.save().context("Failed to save configuration")?;
179 streams.write_diagnostic("Configuration updated successfully.")?;
180 }
181
182 if cli.json {
184 let json =
185 serde_json::to_string_pretty(&config).context("Failed to serialize config to JSON")?;
186 streams.write_result(&json)?;
187 } else {
188 let uses_dir = UsesConfig::uses_dir()
189 .map_or_else(|| "(unavailable)".to_string(), |p| p.display().to_string());
190
191 let output = format!(
192 "Uses Configuration:\n\
193 \n\
194 Enabled: {}\n\
195 Retention: {} days\n\
196 Storage: {}\n\
197 \n\
198 Contextual Feedback:\n\
199 - Enabled: {}\n\
200 - Frequency: {:?}\n\
201 \n\
202 Auto-summarize: {}",
203 if config.enabled { "yes" } else { "no" },
204 config.retention_days,
205 uses_dir,
206 if config.contextual_feedback.enabled {
207 "yes"
208 } else {
209 "no"
210 },
211 config.contextual_feedback.prompt_frequency,
212 if config.auto_summarize.enabled {
213 "yes"
214 } else {
215 "no"
216 },
217 );
218 streams.write_result(&output)?;
219 }
220
221 Ok(())
222}
223
224fn run_status(cli: &Cli) -> Result<()> {
226 let mut streams = OutputStreams::new();
227
228 let config = UsesConfig::load();
229 let uses_dir = UsesConfig::uses_dir()
230 .context("Could not determine uses directory (home directory unavailable)")?;
231
232 let storage = UsesStorage::new(uses_dir.clone());
234 let stats = calculate_storage_stats(&storage)?;
235
236 if cli.json {
237 let json_output = serde_json::json!({
238 "enabled": config.enabled,
239 "uses_dir": uses_dir.display().to_string(),
240 "total_files": stats.total_files,
241 "total_size_bytes": stats.total_size_bytes,
242 "oldest_date": stats.oldest_date,
243 "newest_date": stats.newest_date,
244 "retention_days": config.retention_days,
245 });
246 let json = serde_json::to_string_pretty(&json_output)
247 .context("Failed to serialize status to JSON")?;
248 streams.write_result(&json)?;
249 } else {
250 let enabled_str = if config.enabled {
251 "enabled"
252 } else {
253 "disabled"
254 };
255
256 let size_str = format_size(stats.total_size_bytes);
257 let date_range =
258 if let (Some(oldest), Some(newest)) = (&stats.oldest_date, &stats.newest_date) {
259 format!("{oldest} to {newest}")
260 } else {
261 "no data".to_string()
262 };
263
264 let output = format!(
265 "Uses Status:\n\
266 \n\
267 Capture: {enabled_str}\n\
268 Storage: {}\n\
269 Files: {}\n\
270 Size: {size_str}\n\
271 Date range: {date_range}\n\
272 Retention: {} days",
273 uses_dir.display(),
274 stats.total_files,
275 config.retention_days,
276 );
277 streams.write_result(&output)?;
278 }
279
280 Ok(())
281}
282
283struct StorageStats {
285 total_files: usize,
286 total_size_bytes: u64,
287 oldest_date: Option<String>,
288 newest_date: Option<String>,
289}
290
291fn calculate_storage_stats(storage: &UsesStorage) -> Result<StorageStats> {
293 let events_dir = storage.events_dir();
294
295 let mut total_files = 0;
296 let mut total_size_bytes = 0u64;
297 let mut oldest_date: Option<String> = None;
298 let mut newest_date: Option<String> = None;
299
300 if events_dir.exists() {
301 for entry in std::fs::read_dir(&events_dir)? {
302 let entry = entry?;
303 let path = entry.path();
304
305 if !is_event_log_file(&path) {
306 continue;
307 }
308
309 total_files += 1;
310 if let Ok(metadata) = entry.metadata() {
311 total_size_bytes += metadata.len();
312 }
313
314 if let Some(date) = extract_event_date(&path) {
315 update_date_range(&mut oldest_date, &mut newest_date, date);
316 }
317 }
318 }
319
320 Ok(StorageStats {
321 total_files,
322 total_size_bytes,
323 oldest_date,
324 newest_date,
325 })
326}
327
328fn is_event_log_file(path: &std::path::Path) -> bool {
329 path.extension().is_some_and(|ext| ext == "jsonl")
330}
331
332fn extract_event_date(path: &std::path::Path) -> Option<&str> {
333 path.file_stem()
334 .and_then(|stem| stem.to_str())
335 .and_then(|filename| filename.strip_prefix("events-"))
336}
337
338fn update_date_range(oldest: &mut Option<String>, newest: &mut Option<String>, date: &str) {
339 match (oldest.as_deref(), newest.as_deref()) {
340 (None, _) => {
341 *oldest = Some(date.to_string());
342 *newest = Some(date.to_string());
343 }
344 (Some(oldest_date), Some(newest_date)) => {
345 if date < oldest_date {
346 *oldest = Some(date.to_string());
347 }
348 if date > newest_date {
349 *newest = Some(date.to_string());
350 }
351 }
352 _ => {}
353 }
354}
355
356fn format_size(bytes: u64) -> String {
358 if bytes >= GB_BYTES {
359 format!("{:.2} GB", u64_to_f64_lossy(bytes) / GB_BYTES_F64)
360 } else if bytes >= MB_BYTES {
361 format!("{:.2} MB", u64_to_f64_lossy(bytes) / MB_BYTES_F64)
362 } else if bytes >= KB_BYTES {
363 format!("{:.2} KB", u64_to_f64_lossy(bytes) / KB_BYTES_F64)
364 } else {
365 format!("{bytes} bytes")
366 }
367}
368
369fn u64_to_f64_lossy(value: u64) -> f64 {
370 let narrowed = u32::try_from(value).unwrap_or(u32::MAX);
371 f64::from(narrowed)
372}
373
374fn run_prune(cli: &Cli, older: Option<&str>, dry_run: bool) -> Result<()> {
376 let mut streams = OutputStreams::new();
377
378 let config = UsesConfig::load();
379 let uses_dir = UsesConfig::uses_dir()
380 .context("Could not determine uses directory (home directory unavailable)")?;
381
382 let retain_days = if let Some(duration_str) = older {
384 parse_duration_days(duration_str).with_context(|| {
385 format!("Invalid duration format: {duration_str}. Use format like '30d' or '90d'")
386 })?
387 } else {
388 config.retention_days
389 };
390
391 let aggregator = DiagnosticsAggregator::new(&uses_dir);
392
393 if dry_run {
394 let storage = UsesStorage::new(uses_dir.clone());
396 let preview = count_files_to_prune(&storage, retain_days)?;
397
398 if cli.json {
399 let json_output = serde_json::json!({
400 "dry_run": true,
401 "files_to_delete": preview.file_count,
402 "bytes_to_free": preview.total_bytes,
403 "retain_days": retain_days,
404 });
405 let json = serde_json::to_string_pretty(&json_output)?;
406 streams.write_result(&json)?;
407 } else {
408 let size_str = format_size(preview.total_bytes);
409 streams.write_result(&format!(
410 "Dry run: Would delete {} files ({size_str}) older than {retain_days} days",
411 preview.file_count,
412 ))?;
413 }
414 } else {
415 let pruned_count = aggregator
417 .prune(retain_days)
418 .context("Failed to prune event logs")?;
419
420 if cli.json {
421 let json_output = serde_json::json!({
422 "pruned_files": pruned_count,
423 "retain_days": retain_days,
424 });
425 let json = serde_json::to_string_pretty(&json_output)?;
426 streams.write_result(&json)?;
427 } else {
428 streams.write_result(&format!(
429 "Pruned {pruned_count} files older than {retain_days} days"
430 ))?;
431 }
432 }
433
434 Ok(())
435}
436
437fn parse_duration_days(duration: &str) -> Result<u32> {
439 let trimmed = duration.trim();
440
441 if let Some(days_str) = trimmed.strip_suffix('d') {
442 days_str.parse::<u32>().context("Invalid number of days")
443 } else {
444 trimmed
446 .parse::<u32>()
447 .context("Invalid duration. Use format like '30d' or '90d'")
448 }
449}
450
451struct PrunePreview {
453 file_count: usize,
454 total_bytes: u64,
455}
456
457fn count_files_to_prune(storage: &UsesStorage, retain_days: u32) -> Result<PrunePreview> {
459 use chrono::{NaiveDate, Utc};
460
461 let events_dir = storage.events_dir();
462 let cutoff = Utc::now().date_naive() - chrono::Duration::days(i64::from(retain_days));
463
464 let mut file_count = 0;
465 let mut total_bytes = 0u64;
466
467 if events_dir.exists() {
468 for entry in std::fs::read_dir(&events_dir)? {
469 let entry = entry?;
470 let path = entry.path();
471
472 if path.extension().is_some_and(|e| e == "jsonl")
473 && let Some(filename) = path.file_stem().and_then(|s| s.to_str())
474 && let Some(date_str) = filename.strip_prefix("events-")
475 && let Ok(date) = NaiveDate::parse_from_str(date_str, "%Y-%m-%d")
476 && date < cutoff
477 {
478 file_count += 1;
479 if let Ok(metadata) = entry.metadata() {
480 total_bytes += metadata.len();
481 }
482 }
483 }
484 }
485
486 Ok(PrunePreview {
487 file_count,
488 total_bytes,
489 })
490}
491
492#[cfg(feature = "share")]
506fn run_share(
507 cli: &Cli,
508 week: Option<&str>,
509 from: Option<&str>,
510 to: Option<&str>,
511 output: Option<&std::path::Path>,
512 dry_run: bool,
513) -> Result<()> {
514 let mut streams = OutputStreams::new();
515
516 let config = UsesConfig::load();
518 if !config.enabled {
519 streams.write_diagnostic(
520 "Error: Uses capture is disabled. Enable with: sqry insights config --enable",
521 )?;
522 return Err(anyhow::anyhow!("Uses capture is disabled"));
523 }
524
525 let uses_dir = UsesConfig::uses_dir()
526 .context("Could not determine uses directory (home directory unavailable)")?;
527 let aggregator = DiagnosticsAggregator::new(&uses_dir);
528
529 let snapshot = match (week, from, to) {
531 (None, Some(from_str), Some(to_str)) => {
532 let weeks = iso_weeks_in_range(from_str, to_str)?;
534 let snapshots: Result<Vec<_>> = weeks
535 .iter()
536 .map(|w| sqry_core::uses::generate_share_snapshot(&aggregator, w))
537 .collect();
538 sqry_core::uses::merge_snapshots(&snapshots?)
539 .context("Failed to merge weekly snapshots")?
540 }
541 (Some(week_str), None, None) => {
542 sqry_core::uses::generate_share_snapshot(&aggregator, week_str)
543 .with_context(|| format!("Failed to generate snapshot for week {week_str}"))?
544 }
545 _ => {
546 sqry_core::uses::generate_current_share_snapshot(&aggregator)
548 .context("Failed to generate current week snapshot")?
549 }
550 };
551
552 streams.write_diagnostic("This file stays on your machine. No data is sent.")?;
554
555 if dry_run {
557 let preview = sqry_core::uses::format_share_preview(&snapshot);
558 streams.write_result(&preview)?;
559 return Ok(());
560 }
561
562 if let Some(output_path) = output {
564 let json = serde_json::to_string_pretty(&snapshot)
565 .context("Failed to serialize snapshot to JSON")?;
566 std::fs::write(output_path, &json)
567 .with_context(|| format!("Failed to write snapshot to {}", output_path.display()))?;
568 if cli.json {
569 streams.write_result(&json)?;
571 } else {
572 streams.write_result(&format!("Snapshot written to {}", output_path.display()))?;
574 }
575 return Ok(());
576 }
577
578 if cli.json {
580 let json = serde_json::to_string_pretty(&snapshot)
581 .context("Failed to serialize snapshot to JSON")?;
582 streams.write_result(&json)?;
583 } else {
584 let text = sqry_core::uses::format_share_preview(&snapshot);
585 streams.write_result(&text)?;
586 }
587
588 Ok(())
589}
590
591#[cfg(feature = "share")]
596fn iso_weeks_in_range(from: &str, to: &str) -> Result<Vec<String>> {
597 use chrono::Duration;
598
599 sqry_core::uses::IsoWeekPeriod::try_new(from).map_err(|_| {
604 anyhow::anyhow!("Invalid ISO week format: {from}. Expected YYYY-Www (e.g. 2026-W09)")
605 })?;
606 sqry_core::uses::IsoWeekPeriod::try_new(to).map_err(|_| {
607 anyhow::anyhow!("Invalid ISO week format: {to}. Expected YYYY-Www (e.g. 2026-W09)")
608 })?;
609
610 let from_monday = iso_week_to_monday(from)?;
611 let to_monday = iso_week_to_monday(to)?;
612
613 anyhow::ensure!(
614 from_monday <= to_monday,
615 "--from ({from}) must not be after --to ({to})"
616 );
617
618 let mut weeks = Vec::new();
619 let mut current = from_monday;
620 while current <= to_monday {
621 weeks.push(current.format("%G-W%V").to_string());
622 current += Duration::weeks(1);
623 }
624 Ok(weeks)
625}
626
627#[cfg(feature = "share")]
629fn iso_week_to_monday(week: &str) -> Result<chrono::NaiveDate> {
630 use chrono::{Datelike, Duration, NaiveDate};
631
632 let (year_str, week_part) = week
633 .split_once('-')
634 .ok_or_else(|| anyhow::anyhow!("Invalid week format: {week}"))?;
635 let year: i32 = year_str
636 .parse()
637 .with_context(|| format!("Invalid year in week: {week}"))?;
638 let week_num: u32 = week_part
639 .strip_prefix('W')
640 .ok_or_else(|| anyhow::anyhow!("Expected 'W' prefix in: {week}"))?
641 .parse()
642 .with_context(|| format!("Invalid week number in: {week}"))?;
643
644 let jan4 = NaiveDate::from_ymd_opt(year, 1, 4)
646 .ok_or_else(|| anyhow::anyhow!("Invalid year: {year}"))?;
647 let days_from_monday = jan4.weekday().num_days_from_monday();
648 let week1_monday = jan4 - Duration::days(i64::from(days_from_monday));
649 let week_monday = week1_monday + Duration::weeks(i64::from(week_num) - 1);
650 Ok(week_monday)
651}
652
653#[cfg(test)]
654mod tests {
655 use super::*;
656
657 #[test]
658 fn test_parse_duration_days() {
659 assert_eq!(parse_duration_days("30d").unwrap(), 30);
660 assert_eq!(parse_duration_days("90d").unwrap(), 90);
661 assert_eq!(parse_duration_days("365d").unwrap(), 365);
662 assert_eq!(parse_duration_days("30").unwrap(), 30);
663 assert_eq!(parse_duration_days(" 30d ").unwrap(), 30);
664 }
665
666 #[test]
667 fn test_parse_duration_days_invalid() {
668 assert!(parse_duration_days("abc").is_err());
669 assert!(parse_duration_days("30x").is_err());
670 assert!(parse_duration_days("-30d").is_err());
671 }
672
673 #[test]
674 fn test_format_size() {
675 assert_eq!(format_size(0), "0 bytes");
676 assert_eq!(format_size(500), "500 bytes");
677 assert_eq!(format_size(1024), "1.00 KB");
678 assert_eq!(format_size(1536), "1.50 KB");
679 assert_eq!(format_size(1_048_576), "1.00 MB");
680 assert_eq!(format_size(1_073_741_824), "1.00 GB");
681 }
682
683 #[cfg(feature = "share")]
684 #[test]
685 fn test_iso_week_to_monday_known_weeks() {
686 use chrono::NaiveDate;
687 let monday = iso_week_to_monday("2026-W09").unwrap();
690 assert_eq!(monday.format("%Y-%m-%d").to_string(), "2026-02-23");
691
692 let monday_w1 = iso_week_to_monday("2026-W01").unwrap();
694 assert_eq!(monday_w1.format("%Y-%m-%d").to_string(), "2025-12-29");
696 }
697
698 #[cfg(feature = "share")]
699 #[test]
700 fn test_iso_weeks_in_range_three_weeks() {
701 let weeks = iso_weeks_in_range("2026-W07", "2026-W09").unwrap();
702 assert_eq!(weeks, vec!["2026-W07", "2026-W08", "2026-W09"]);
703 }
704
705 #[cfg(feature = "share")]
706 #[test]
707 fn test_iso_weeks_in_range_single_week() {
708 let weeks = iso_weeks_in_range("2026-W09", "2026-W09").unwrap();
709 assert_eq!(weeks, vec!["2026-W09"]);
710 }
711
712 #[cfg(feature = "share")]
713 #[test]
714 fn test_iso_weeks_in_range_reversed_returns_error() {
715 let result = iso_weeks_in_range("2026-W09", "2026-W07");
716 assert!(result.is_err(), "from > to should return error");
717 }
718
719 #[cfg(feature = "share")]
720 #[test]
721 fn test_iso_weeks_in_range_invalid_week_w00_returns_error() {
722 let result = iso_weeks_in_range("2026-W00", "2026-W01");
724 assert!(result.is_err(), "W00 should be rejected as invalid");
725 }
726
727 #[cfg(feature = "share")]
728 #[test]
729 fn test_iso_weeks_in_range_invalid_week_w54_returns_error() {
730 let result = iso_weeks_in_range("2026-W01", "2026-W54");
731 assert!(result.is_err(), "W54 should be rejected as invalid");
732 }
733
734 #[cfg(feature = "share")]
735 #[test]
736 fn test_iso_weeks_in_range_invalid_non_padded_returns_error() {
737 let result = iso_weeks_in_range("2026-W9", "2026-W09");
739 assert!(result.is_err(), "non-padded week W9 should be rejected");
740 }
741}