1use chrono::{DateTime, Datelike, Duration, TimeZone, Timelike, Utc};
7use serde::{Deserialize, Serialize};
8use std::collections::{BTreeMap, HashMap};
9
10use super::{ContextType, GatheredContext};
11
12#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
14pub enum TemporalResolution {
15 Hour,
16 Day,
17 Week,
18 Month,
19 Quarter,
20 Year,
21}
22
23impl TemporalResolution {
24 pub fn duration(&self) -> Duration {
26 match self {
27 Self::Hour => Duration::hours(1),
28 Self::Day => Duration::days(1),
29 Self::Week => Duration::weeks(1),
30 Self::Month => Duration::days(30), Self::Quarter => Duration::days(90),
32 Self::Year => Duration::days(365),
33 }
34 }
35
36 pub fn truncate(&self, dt: DateTime<Utc>) -> DateTime<Utc> {
38 match self {
39 Self::Hour => Utc
40 .with_ymd_and_hms(dt.year(), dt.month(), dt.day(), dt.hour(), 0, 0)
41 .single()
42 .unwrap_or(dt),
43 Self::Day => Utc
44 .with_ymd_and_hms(dt.year(), dt.month(), dt.day(), 0, 0, 0)
45 .single()
46 .unwrap_or(dt),
47 Self::Week => {
48 let days_since_monday = dt.weekday().num_days_from_monday();
49 let start_of_week = dt - Duration::days(days_since_monday as i64);
50 Utc.with_ymd_and_hms(
51 start_of_week.year(),
52 start_of_week.month(),
53 start_of_week.day(),
54 0,
55 0,
56 0,
57 )
58 .single()
59 .unwrap_or(dt)
60 }
61 Self::Month => Utc
62 .with_ymd_and_hms(dt.year(), dt.month(), 1, 0, 0, 0)
63 .single()
64 .unwrap_or(dt),
65 Self::Quarter => {
66 let quarter_month = ((dt.month() - 1) / 3) * 3 + 1;
67 Utc.with_ymd_and_hms(dt.year(), quarter_month, 1, 0, 0, 0)
68 .single()
69 .unwrap_or(dt)
70 }
71 Self::Year => Utc
72 .with_ymd_and_hms(dt.year(), 1, 1, 0, 0, 0)
73 .single()
74 .unwrap_or(dt),
75 }
76 }
77}
78
79pub struct TemporalContextAnalyzer {
81 pub contexts: Vec<GatheredContext>,
82 resolution: TemporalResolution,
83}
84
85impl TemporalContextAnalyzer {
86 pub fn new(contexts: Vec<GatheredContext>, resolution: TemporalResolution) -> Self {
87 Self {
88 contexts,
89 resolution,
90 }
91 }
92
93 pub fn group_by_time(&self) -> BTreeMap<DateTime<Utc>, Vec<&GatheredContext>> {
95 let mut groups = BTreeMap::new();
96
97 for context in &self.contexts {
98 let period = self.resolution.truncate(context.timestamp);
99 groups.entry(period).or_insert_with(Vec::new).push(context);
100 }
101
102 groups
103 }
104
105 pub fn activity_timeline(&self) -> Vec<TimelinePoint> {
107 let groups = self.group_by_time();
108 let mut timeline = Vec::new();
109
110 for (time, contexts) in groups {
111 let intensity = contexts.len() as f32;
112 let avg_relevance =
113 contexts.iter().map(|c| c.relevance_score).sum::<f32>() / contexts.len() as f32;
114
115 timeline.push(TimelinePoint {
116 timestamp: time,
117 activity_count: contexts.len(),
118 intensity,
119 average_relevance: avg_relevance,
120 dominant_type: Self::get_dominant_type(&contexts),
121 tools_used: Self::get_tools_used(&contexts),
122 });
123 }
124
125 timeline
126 }
127
128 pub fn detect_patterns(&self) -> TemporalPatterns {
130 let timeline = self.activity_timeline();
131
132 let sessions = self.detect_work_sessions(&timeline);
134
135 let peak_times = self.find_peak_times(&timeline);
137
138 let momentum = self.calculate_momentum(&timeline);
140
141 let periodic_patterns = self.detect_periodic_patterns(&timeline);
143
144 TemporalPatterns {
145 work_sessions: sessions,
146 peak_times,
147 momentum,
148 periodic_patterns,
149 total_duration: self.calculate_total_duration(),
150 active_days: self.count_active_days(),
151 }
152 }
153
154 pub fn create_temporal_waves(&self) -> TemporalWaveGrid {
156 let mut grid = TemporalWaveGrid::new(self.resolution);
157 let groups = self.group_by_time();
158
159 for (time, contexts) in groups {
160 for context in contexts {
162 let wave = TemporalWave {
163 timestamp: time,
164 frequency: self.calculate_frequency(context),
165 amplitude: context.relevance_score,
166 phase: self.calculate_phase(context),
167 decay_rate: self.calculate_decay_rate(&time),
168 context_type: context.content_type.clone(),
169 tool: context.ai_tool.clone(),
170 };
171
172 grid.add_wave(wave);
173 }
174 }
175
176 grid
177 }
178
179 pub fn apply_temporal_decay(&mut self, half_life_days: f32) {
181 let now = Utc::now();
182
183 for context in &mut self.contexts {
184 let age_days = (now - context.timestamp).num_days() as f32;
185 let decay_factor = 0.5_f32.powf(age_days / half_life_days);
186 context.relevance_score *= decay_factor;
187 }
188
189 self.contexts
191 .sort_by(|a, b| b.relevance_score.partial_cmp(&a.relevance_score).unwrap());
192 }
193
194 fn get_dominant_type(contexts: &[&GatheredContext]) -> ContextType {
197 let mut type_counts = HashMap::new();
198 for context in contexts {
199 *type_counts.entry(context.content_type.clone()).or_insert(0) += 1;
200 }
201
202 type_counts
203 .into_iter()
204 .max_by_key(|(_, count)| *count)
205 .map(|(ctx_type, _)| ctx_type)
206 .unwrap_or(ContextType::Configuration)
207 }
208
209 fn get_tools_used(contexts: &[&GatheredContext]) -> Vec<String> {
210 let mut tools = contexts
211 .iter()
212 .map(|c| c.ai_tool.clone())
213 .collect::<Vec<_>>();
214 tools.sort();
215 tools.dedup();
216 tools
217 }
218
219 fn detect_work_sessions(&self, timeline: &[TimelinePoint]) -> Vec<WorkSession> {
220 let mut sessions = Vec::new();
221 let mut current_session: Option<WorkSession> = None;
222
223 let session_gap = match self.resolution {
224 TemporalResolution::Hour => Duration::hours(4),
225 TemporalResolution::Day => Duration::days(3),
226 _ => Duration::weeks(1),
227 };
228
229 for point in timeline.iter() {
230 if let Some(ref mut session) = current_session {
231 let gap = point.timestamp - session.end_time;
232
233 if gap > session_gap {
234 sessions.push(session.clone());
236 current_session = Some(WorkSession {
237 start_time: point.timestamp,
238 end_time: point.timestamp,
239 total_activities: point.activity_count,
240 average_intensity: point.intensity,
241 });
242 } else {
243 session.end_time = point.timestamp;
245 session.total_activities += point.activity_count;
246 session.average_intensity = (session.average_intensity + point.intensity) / 2.0;
247 }
248 } else {
249 current_session = Some(WorkSession {
251 start_time: point.timestamp,
252 end_time: point.timestamp,
253 total_activities: point.activity_count,
254 average_intensity: point.intensity,
255 });
256 }
257 }
258
259 if let Some(session) = current_session {
260 sessions.push(session);
261 }
262
263 sessions
264 }
265
266 fn find_peak_times(&self, timeline: &[TimelinePoint]) -> Vec<PeakTime> {
267 let mut peaks = timeline
268 .iter()
269 .map(|point| PeakTime {
270 timestamp: point.timestamp,
271 intensity: point.intensity,
272 resolution: self.resolution,
273 })
274 .collect::<Vec<_>>();
275
276 peaks.sort_by(|a, b| b.intensity.partial_cmp(&a.intensity).unwrap());
278 peaks.truncate(10);
279
280 peaks
281 }
282
283 fn calculate_momentum(&self, timeline: &[TimelinePoint]) -> f32 {
284 if timeline.len() < 2 {
285 return 0.0;
286 }
287
288 let recent_count = timeline.len().min(10);
290 let recent = &timeline[timeline.len() - recent_count..];
291
292 let mut momentum = 0.0;
293 for i in 1..recent.len() {
294 let change = recent[i].intensity - recent[i - 1].intensity;
295 momentum += change * (i as f32 / recent.len() as f32); }
297
298 momentum / recent.len() as f32
299 }
300
301 fn detect_periodic_patterns(&self, timeline: &[TimelinePoint]) -> Vec<PeriodicPattern> {
302 let mut patterns = Vec::new();
303
304 if matches!(self.resolution, TemporalResolution::Hour) {
306 if let Some(pattern) = self.detect_daily_pattern(timeline) {
307 patterns.push(pattern);
308 }
309 }
310
311 if matches!(
313 self.resolution,
314 TemporalResolution::Day | TemporalResolution::Hour
315 ) {
316 if let Some(pattern) = self.detect_weekly_pattern(timeline) {
317 patterns.push(pattern);
318 }
319 }
320
321 patterns
322 }
323
324 fn detect_daily_pattern(&self, timeline: &[TimelinePoint]) -> Option<PeriodicPattern> {
325 let mut hour_activities = HashMap::new();
327
328 for point in timeline {
329 let hour = point.timestamp.hour();
330 hour_activities
331 .entry(hour)
332 .or_insert_with(Vec::new)
333 .push(point.intensity);
334 }
335
336 let mut peak_hours = hour_activities
338 .iter()
339 .map(|(hour, intensities)| {
340 let avg = intensities.iter().sum::<f32>() / intensities.len() as f32;
341 (*hour, avg)
342 })
343 .collect::<Vec<_>>();
344
345 peak_hours.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
346
347 if peak_hours.is_empty() {
348 return None;
349 }
350
351 Some(PeriodicPattern {
352 period_type: "daily".to_string(),
353 peak_periods: peak_hours
354 .iter()
355 .take(3)
356 .map(|(h, _)| format!("{:02}:00", h))
357 .collect(),
358 strength: 0.0, })
360 }
361
362 fn detect_weekly_pattern(&self, timeline: &[TimelinePoint]) -> Option<PeriodicPattern> {
363 let mut day_activities = HashMap::new();
365
366 for point in timeline {
367 let day = point.timestamp.weekday();
368 day_activities
369 .entry(day)
370 .or_insert_with(Vec::new)
371 .push(point.intensity);
372 }
373
374 let mut peak_days = day_activities
376 .iter()
377 .map(|(day, intensities)| {
378 let avg = intensities.iter().sum::<f32>() / intensities.len() as f32;
379 (*day, avg)
380 })
381 .collect::<Vec<_>>();
382
383 peak_days.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
384
385 if peak_days.is_empty() {
386 return None;
387 }
388
389 Some(PeriodicPattern {
390 period_type: "weekly".to_string(),
391 peak_periods: peak_days
392 .iter()
393 .take(3)
394 .map(|(d, _)| format!("{:?}", d))
395 .collect(),
396 strength: 0.0, })
398 }
399
400 fn calculate_total_duration(&self) -> Duration {
401 if self.contexts.is_empty() {
402 return Duration::zero();
403 }
404
405 let min_time = self.contexts.iter().map(|c| c.timestamp).min().unwrap();
406 let max_time = self.contexts.iter().map(|c| c.timestamp).max().unwrap();
407
408 max_time - min_time
409 }
410
411 fn count_active_days(&self) -> usize {
412 let mut days = self
413 .contexts
414 .iter()
415 .map(|c| c.timestamp.date_naive())
416 .collect::<Vec<_>>();
417 days.sort();
418 days.dedup();
419 days.len()
420 }
421
422 fn calculate_frequency(&self, context: &GatheredContext) -> f32 {
423 match context.content_type {
425 ContextType::ChatHistory => 0.1,
426 ContextType::ProjectSettings => 0.05,
427 ContextType::CodeSnippets => 0.2,
428 ContextType::Documentation => 0.08,
429 ContextType::Configuration => 0.06,
430 ContextType::SearchHistory => 0.15,
431 ContextType::Bookmarks => 0.07,
432 ContextType::CustomPrompts => 0.12,
433 ContextType::ModelPreferences => 0.04,
434 ContextType::WorkspaceState => 0.09,
435 }
436 }
437
438 fn calculate_phase(&self, context: &GatheredContext) -> f32 {
439 let minutes_since_epoch = context.timestamp.timestamp() / 60;
441 ((minutes_since_epoch % 360) as f32) * std::f32::consts::PI / 180.0
442 }
443
444 fn calculate_decay_rate(&self, time: &DateTime<Utc>) -> f32 {
445 let age_days = (Utc::now() - *time).num_days() as f32;
446 1.0 / (1.0 + age_days / 30.0) }
448}
449
450#[derive(Debug, Clone, Serialize, Deserialize)]
452pub struct TimelinePoint {
453 pub timestamp: DateTime<Utc>,
454 pub activity_count: usize,
455 pub intensity: f32,
456 pub average_relevance: f32,
457 pub dominant_type: ContextType,
458 pub tools_used: Vec<String>,
459}
460
461#[derive(Debug, Clone, Serialize, Deserialize)]
463pub struct TemporalPatterns {
464 pub work_sessions: Vec<WorkSession>,
465 pub peak_times: Vec<PeakTime>,
466 pub momentum: f32,
467 pub periodic_patterns: Vec<PeriodicPattern>,
468 pub total_duration: Duration,
469 pub active_days: usize,
470}
471
472#[derive(Debug, Clone, Serialize, Deserialize)]
473pub struct WorkSession {
474 pub start_time: DateTime<Utc>,
475 pub end_time: DateTime<Utc>,
476 pub total_activities: usize,
477 pub average_intensity: f32,
478}
479
480#[derive(Debug, Clone, Serialize, Deserialize)]
481pub struct PeakTime {
482 pub timestamp: DateTime<Utc>,
483 pub intensity: f32,
484 pub resolution: TemporalResolution,
485}
486
487#[derive(Debug, Clone, Serialize, Deserialize)]
488pub struct PeriodicPattern {
489 pub period_type: String,
490 pub peak_periods: Vec<String>,
491 pub strength: f32,
492}
493
494#[derive(Debug, Clone, Serialize, Deserialize)]
496pub struct TemporalWave {
497 pub timestamp: DateTime<Utc>,
498 pub frequency: f32,
499 pub amplitude: f32,
500 pub phase: f32,
501 pub decay_rate: f32,
502 pub context_type: ContextType,
503 pub tool: String,
504}
505
506pub struct TemporalWaveGrid {
508 resolution: TemporalResolution,
509 waves: Vec<TemporalWave>,
510 time_slots: BTreeMap<DateTime<Utc>, Vec<usize>>, }
512
513impl TemporalWaveGrid {
514 pub fn new(resolution: TemporalResolution) -> Self {
515 Self {
516 resolution,
517 waves: Vec::new(),
518 time_slots: BTreeMap::new(),
519 }
520 }
521
522 pub fn add_wave(&mut self, wave: TemporalWave) {
523 let time_slot = self.resolution.truncate(wave.timestamp);
524 let wave_idx = self.waves.len();
525 self.waves.push(wave);
526
527 self.time_slots.entry(time_slot).or_default().push(wave_idx);
528 }
529
530 pub fn get_interference_at(&self, time: DateTime<Utc>) -> f32 {
532 let time_slot = self.resolution.truncate(time);
533
534 if let Some(indices) = self.time_slots.get(&time_slot) {
535 let mut total = 0.0;
536
537 for &idx in indices {
538 let wave = &self.waves[idx];
539 let age_factor = wave.decay_rate;
540 let value = wave.amplitude
541 * age_factor
542 * (2.0 * std::f32::consts::PI * wave.frequency * time.timestamp() as f32
543 + wave.phase)
544 .sin();
545 total += value;
546 }
547
548 total / indices.len() as f32
549 } else {
550 0.0
551 }
552 }
553
554 pub fn find_resonance_peaks(&self) -> Vec<DateTime<Utc>> {
556 let mut peaks = Vec::new();
557
558 for (&time, indices) in &self.time_slots {
559 if indices.len() > 3 {
560 let interference = self.get_interference_at(time);
562 if interference.abs() > 0.7 {
563 peaks.push(time);
564 }
565 }
566 }
567
568 peaks
569 }
570}