Skip to main content

aptu_core/
history.rs

1// SPDX-License-Identifier: Apache-2.0
2
3//! Local contribution history tracking.
4//!
5//! Stores contribution records in `~/.local/share/aptu/history.json`.
6//! Each contribution tracks repo, issue, action, timestamp, and status.
7
8use std::fs;
9use std::path::PathBuf;
10
11use anyhow::{Context, Result};
12use chrono::{DateTime, Utc};
13use serde::{Deserialize, Serialize};
14use uuid::Uuid;
15
16use crate::config::data_dir;
17
18/// AI usage statistics for a contribution.
19#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
20pub struct AiStats {
21    /// Provider name (e.g., "openrouter", "anthropic").
22    pub provider: String,
23    /// Model used for analysis.
24    pub model: String,
25    /// Number of input tokens.
26    pub input_tokens: u64,
27    /// Number of output tokens.
28    pub output_tokens: u64,
29    /// Duration of the API call in milliseconds.
30    pub duration_ms: u64,
31    /// Cost in USD (from `OpenRouter` API, `None` if not reported).
32    #[serde(default)]
33    pub cost_usd: Option<f64>,
34    /// Fallback provider used if primary failed (None if primary succeeded).
35    #[serde(default)]
36    pub fallback_provider: Option<String>,
37    /// Prompt size in characters.
38    #[serde(default)]
39    pub prompt_chars: usize,
40}
41
42/// Status of a contribution.
43#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
44#[serde(rename_all = "lowercase")]
45pub enum ContributionStatus {
46    /// Contribution submitted, awaiting maintainer response.
47    #[default]
48    Pending,
49    /// Maintainer accepted the contribution.
50    Accepted,
51    /// Maintainer rejected the contribution.
52    Rejected,
53}
54
55/// A single contribution record.
56#[derive(Debug, Clone, Serialize, Deserialize)]
57pub struct Contribution {
58    /// Unique identifier.
59    pub id: Uuid,
60    /// Repository in "owner/repo" format.
61    pub repo: String,
62    /// Issue number.
63    pub issue: u64,
64    /// Action type (e.g., "triage").
65    pub action: String,
66    /// When the contribution was made.
67    pub timestamp: DateTime<Utc>,
68    /// URL to the posted comment.
69    pub comment_url: String,
70    /// Current status of the contribution.
71    #[serde(default)]
72    pub status: ContributionStatus,
73    /// AI usage statistics for this contribution.
74    #[serde(default, skip_serializing_if = "Option::is_none")]
75    pub ai_stats: Option<AiStats>,
76}
77
78/// Container for all contribution history.
79#[derive(Debug, Default, Serialize, Deserialize)]
80pub struct HistoryData {
81    /// List of contributions.
82    pub contributions: Vec<Contribution>,
83}
84
85impl HistoryData {
86    /// Calculate total tokens used across all contributions.
87    #[must_use]
88    pub fn total_tokens(&self) -> u64 {
89        self.contributions
90            .iter()
91            .filter_map(|c| c.ai_stats.as_ref())
92            .map(|stats| stats.input_tokens + stats.output_tokens)
93            .sum()
94    }
95
96    /// Calculate total cost in USD across all contributions.
97    #[must_use]
98    pub fn total_cost(&self) -> f64 {
99        self.contributions
100            .iter()
101            .filter_map(|c| c.ai_stats.as_ref())
102            .filter_map(|stats| stats.cost_usd)
103            .sum()
104    }
105
106    /// Calculate average tokens per triage.
107    #[must_use]
108    #[allow(clippy::cast_precision_loss)]
109    pub fn avg_tokens_per_triage(&self) -> f64 {
110        let contributions_with_stats: Vec<_> = self
111            .contributions
112            .iter()
113            .filter_map(|c| c.ai_stats.as_ref())
114            .collect();
115
116        if contributions_with_stats.is_empty() {
117            return 0.0;
118        }
119
120        let total: u64 = contributions_with_stats
121            .iter()
122            .map(|stats| stats.input_tokens + stats.output_tokens)
123            .sum();
124
125        total as f64 / contributions_with_stats.len() as f64
126    }
127
128    /// Calculate total cost grouped by model.
129    #[must_use]
130    pub fn cost_by_model(&self) -> std::collections::HashMap<String, f64> {
131        let mut costs = std::collections::HashMap::new();
132
133        for contribution in &self.contributions {
134            if let Some(stats) = &contribution.ai_stats
135                && let Some(cost) = stats.cost_usd
136            {
137                *costs.entry(stats.model.clone()).or_insert(0.0) += cost;
138            }
139        }
140
141        costs
142    }
143}
144
145/// Returns the path to the history file.
146#[must_use]
147pub fn history_file_path() -> PathBuf {
148    data_dir().join("history.json")
149}
150
151/// Load contribution history from disk.
152///
153/// Returns empty history if file doesn't exist.
154pub fn load() -> Result<HistoryData> {
155    let path = history_file_path();
156
157    if !path.exists() {
158        return Ok(HistoryData::default());
159    }
160
161    let contents = fs::read_to_string(&path)
162        .with_context(|| format!("Failed to read history file: {}", path.display()))?;
163
164    let data: HistoryData = serde_json::from_str(&contents)
165        .with_context(|| format!("Failed to parse history file: {}", path.display()))?;
166
167    Ok(data)
168}
169
170/// Save contribution history to disk.
171///
172/// Creates parent directories if they don't exist.
173pub fn save(data: &HistoryData) -> Result<()> {
174    let path = history_file_path();
175
176    // Create parent directories if needed
177    if let Some(parent) = path.parent() {
178        fs::create_dir_all(parent)
179            .with_context(|| format!("Failed to create directory: {}", parent.display()))?;
180    }
181
182    let contents =
183        serde_json::to_string_pretty(data).context("Failed to serialize history data")?;
184
185    fs::write(&path, contents)
186        .with_context(|| format!("Failed to write history file: {}", path.display()))?;
187
188    Ok(())
189}
190
191/// Add a contribution to history.
192///
193/// Loads existing history, appends the new contribution, and saves.
194pub fn add_contribution(contribution: Contribution) -> Result<()> {
195    let mut data = load()?;
196    data.contributions.push(contribution);
197    save(&data)?;
198    Ok(())
199}
200
201#[cfg(test)]
202mod tests {
203    use super::*;
204
205    /// Create a test contribution.
206    fn test_contribution() -> Contribution {
207        Contribution {
208            id: Uuid::new_v4(),
209            repo: "owner/repo".to_string(),
210            issue: 123,
211            action: "triage".to_string(),
212            timestamp: Utc::now(),
213            comment_url: "https://github.com/owner/repo/issues/123#issuecomment-1".to_string(),
214            status: ContributionStatus::Pending,
215            ai_stats: None,
216        }
217    }
218
219    #[test]
220    fn test_contribution_serialization_roundtrip() {
221        let contribution = test_contribution();
222        let json = serde_json::to_string(&contribution).expect("serialize");
223        let parsed: Contribution = serde_json::from_str(&json).expect("deserialize");
224
225        assert_eq!(contribution.id, parsed.id);
226        assert_eq!(contribution.repo, parsed.repo);
227        assert_eq!(contribution.issue, parsed.issue);
228        assert_eq!(contribution.action, parsed.action);
229        assert_eq!(contribution.comment_url, parsed.comment_url);
230        assert_eq!(contribution.status, parsed.status);
231    }
232
233    #[test]
234    fn test_history_data_serialization_roundtrip() {
235        let data = HistoryData {
236            contributions: vec![test_contribution(), test_contribution()],
237        };
238
239        let json = serde_json::to_string_pretty(&data).expect("serialize");
240        let parsed: HistoryData = serde_json::from_str(&json).expect("deserialize");
241
242        assert_eq!(parsed.contributions.len(), 2);
243    }
244
245    #[test]
246    fn test_contribution_status_default() {
247        let status = ContributionStatus::default();
248        assert_eq!(status, ContributionStatus::Pending);
249    }
250
251    #[test]
252    fn test_contribution_status_serialization() {
253        assert_eq!(
254            serde_json::to_string(&ContributionStatus::Pending).unwrap(),
255            "\"pending\""
256        );
257        assert_eq!(
258            serde_json::to_string(&ContributionStatus::Accepted).unwrap(),
259            "\"accepted\""
260        );
261        assert_eq!(
262            serde_json::to_string(&ContributionStatus::Rejected).unwrap(),
263            "\"rejected\""
264        );
265    }
266
267    #[test]
268    fn test_empty_history_default() {
269        let data = HistoryData::default();
270        assert!(data.contributions.is_empty());
271    }
272
273    #[test]
274    fn test_ai_stats_serialization_roundtrip() {
275        let stats = AiStats {
276            provider: "openrouter".to_string(),
277            model: "mistralai/mistral-small-2603".to_string(),
278            input_tokens: 1000,
279            output_tokens: 500,
280            duration_ms: 1500,
281            cost_usd: Some(0.0),
282            fallback_provider: None,
283            prompt_chars: 0,
284        };
285
286        let json = serde_json::to_string(&stats).expect("serialize");
287        let parsed: AiStats = serde_json::from_str(&json).expect("deserialize");
288
289        assert_eq!(stats, parsed);
290    }
291
292    #[test]
293    fn test_contribution_with_ai_stats() {
294        let mut contribution = test_contribution();
295        contribution.ai_stats = Some(AiStats {
296            provider: "openrouter".to_string(),
297            model: "mistralai/mistral-small-2603".to_string(),
298            input_tokens: 1000,
299            output_tokens: 500,
300            duration_ms: 1500,
301            cost_usd: Some(0.0),
302            fallback_provider: None,
303            prompt_chars: 0,
304        });
305
306        let json = serde_json::to_string(&contribution).expect("serialize");
307        let parsed: Contribution = serde_json::from_str(&json).expect("deserialize");
308
309        assert!(parsed.ai_stats.is_some());
310        assert_eq!(
311            parsed.ai_stats.unwrap().model,
312            "mistralai/mistral-small-2603"
313        );
314    }
315
316    #[test]
317    fn test_contribution_without_ai_stats_backward_compat() {
318        let json = r#"{
319            "id": "550e8400-e29b-41d4-a716-446655440000",
320            "repo": "owner/repo",
321            "issue": 123,
322            "action": "triage",
323            "timestamp": "2024-01-01T00:00:00Z",
324            "comment_url": "https://github.com/owner/repo/issues/123#issuecomment-1",
325            "status": "pending"
326        }"#;
327
328        let parsed: Contribution = serde_json::from_str(json).expect("deserialize");
329        assert!(parsed.ai_stats.is_none());
330    }
331
332    #[test]
333    fn test_total_tokens() {
334        let mut data = HistoryData::default();
335
336        let mut c1 = test_contribution();
337        c1.ai_stats = Some(AiStats {
338            provider: "openrouter".to_string(),
339            model: "model1".to_string(),
340            input_tokens: 100,
341            output_tokens: 50,
342            duration_ms: 1000,
343            cost_usd: Some(0.01),
344            fallback_provider: None,
345            prompt_chars: 0,
346        });
347
348        let mut c2 = test_contribution();
349        c2.ai_stats = Some(AiStats {
350            provider: "openrouter".to_string(),
351            model: "model2".to_string(),
352            input_tokens: 200,
353            output_tokens: 100,
354            duration_ms: 2000,
355            cost_usd: Some(0.02),
356            fallback_provider: None,
357            prompt_chars: 0,
358        });
359
360        data.contributions.push(c1);
361        data.contributions.push(c2);
362        data.contributions.push(test_contribution()); // No stats
363
364        assert_eq!(data.total_tokens(), 450);
365    }
366
367    #[test]
368    fn test_total_cost() {
369        let mut data = HistoryData::default();
370
371        let mut c1 = test_contribution();
372        c1.ai_stats = Some(AiStats {
373            provider: "openrouter".to_string(),
374            model: "model1".to_string(),
375            input_tokens: 100,
376            output_tokens: 50,
377            duration_ms: 1000,
378            cost_usd: Some(0.01),
379            fallback_provider: None,
380            prompt_chars: 0,
381        });
382
383        let mut c2 = test_contribution();
384        c2.ai_stats = Some(AiStats {
385            provider: "openrouter".to_string(),
386            model: "model2".to_string(),
387            input_tokens: 200,
388            output_tokens: 100,
389            duration_ms: 2000,
390            cost_usd: Some(0.02),
391            fallback_provider: None,
392            prompt_chars: 0,
393        });
394
395        data.contributions.push(c1);
396        data.contributions.push(c2);
397
398        assert!((data.total_cost() - 0.03).abs() < f64::EPSILON);
399    }
400
401    #[test]
402    fn test_avg_tokens_per_triage() {
403        let mut data = HistoryData::default();
404
405        let mut c1 = test_contribution();
406        c1.ai_stats = Some(AiStats {
407            provider: "openrouter".to_string(),
408            model: "model1".to_string(),
409            input_tokens: 100,
410            output_tokens: 50,
411            duration_ms: 1000,
412            cost_usd: Some(0.01),
413            fallback_provider: None,
414            prompt_chars: 0,
415        });
416
417        let mut c2 = test_contribution();
418        c2.ai_stats = Some(AiStats {
419            provider: "openrouter".to_string(),
420            model: "model2".to_string(),
421            input_tokens: 200,
422            output_tokens: 100,
423            duration_ms: 2000,
424            cost_usd: Some(0.02),
425            fallback_provider: None,
426            prompt_chars: 0,
427        });
428
429        data.contributions.push(c1);
430        data.contributions.push(c2);
431
432        assert!((data.avg_tokens_per_triage() - 225.0).abs() < f64::EPSILON);
433    }
434
435    #[test]
436    fn test_avg_tokens_per_triage_empty() {
437        let data = HistoryData::default();
438        assert!((data.avg_tokens_per_triage() - 0.0).abs() < f64::EPSILON);
439    }
440
441    #[test]
442    fn test_cost_by_model() {
443        let mut data = HistoryData::default();
444
445        let mut c1 = test_contribution();
446        c1.ai_stats = Some(AiStats {
447            provider: "openrouter".to_string(),
448            model: "model1".to_string(),
449            input_tokens: 100,
450            output_tokens: 50,
451            duration_ms: 1000,
452            cost_usd: Some(0.01),
453            fallback_provider: None,
454            prompt_chars: 0,
455        });
456
457        let mut c2 = test_contribution();
458        c2.ai_stats = Some(AiStats {
459            provider: "openrouter".to_string(),
460            model: "model1".to_string(),
461            input_tokens: 200,
462            output_tokens: 100,
463            duration_ms: 2000,
464            cost_usd: Some(0.02),
465            fallback_provider: None,
466            prompt_chars: 0,
467        });
468
469        let mut c3 = test_contribution();
470        c3.ai_stats = Some(AiStats {
471            provider: "openrouter".to_string(),
472            model: "model2".to_string(),
473            input_tokens: 150,
474            output_tokens: 75,
475            duration_ms: 1500,
476            cost_usd: Some(0.015),
477            fallback_provider: None,
478            prompt_chars: 0,
479        });
480
481        data.contributions.push(c1);
482        data.contributions.push(c2);
483        data.contributions.push(c3);
484
485        let costs = data.cost_by_model();
486        assert_eq!(costs.len(), 2);
487        assert!((costs.get("model1").unwrap() - 0.03).abs() < f64::EPSILON);
488        assert!((costs.get("model2").unwrap() - 0.015).abs() < f64::EPSILON);
489    }
490}