1#![allow(dead_code, missing_docs)]
12
13use crate::error::{Result, VisionError};
14use crate::scene_understanding::SceneAnalysisResult;
15use scirs2_core::ndarray::{Array1, Array2, Array3, ArrayView3};
16use std::collections::HashMap;
17
18pub struct ActivityRecognitionEngine {
20 action_detectors: Vec<ActionDetector>,
22 sequence_analyzer: ActivitySequenceAnalyzer,
24 interaction_recognizer: MultiPersonInteractionRecognizer,
26 context_classifier: ContextAwareActivityClassifier,
28 temporal_modeler: TemporalActivityModeler,
30 hierarchical_decomposer: HierarchicalActivityDecomposer,
32 knowledge_base: ActivityKnowledgeBase,
34}
35
36#[derive(Debug, Clone)]
38pub struct ActionDetector {
39 name: String,
41 action_types: Vec<String>,
43 confidence_threshold: f32,
45 temporal_window: usize,
47 feature_method: String,
49}
50
51#[derive(Debug, Clone)]
53pub struct ActivitySequenceAnalyzer {
54 max_sequence_length: usize,
56 pattern_models: Vec<SequencePattern>,
58 transition_models: HashMap<String, TransitionModel>,
60 anomaly_params: AnomalyDetectionParams,
62}
63
64#[derive(Debug, Clone)]
66pub struct MultiPersonInteractionRecognizer {
67 interaction_types: Vec<InteractionType>,
69 tracking_params: PersonTrackingParams,
71 social_distance_model: SocialDistanceModel,
73 group_recognition: GroupActivityRecognition,
75}
76
77#[derive(Debug, Clone)]
79pub struct ContextAwareActivityClassifier {
80 context_features: Vec<ContextFeature>,
82 environment_classifiers: Vec<EnvironmentClassifier>,
84 object_associations: HashMap<String, Vec<String>>,
86 scene_correlations: HashMap<String, ActivityDistribution>,
88}
89
90#[derive(Debug, Clone)]
92pub struct TemporalActivityModeler {
93 temporal_resolution: f32,
95 memory_length: usize,
97 rnn_params: RNNParameters,
99 attention_mechanisms: Vec<TemporalAttention>,
101}
102
103#[derive(Debug, Clone)]
105pub struct HierarchicalActivityDecomposer {
106 hierarchy_levels: Vec<ActivityLevel>,
108 decomposition_rules: Vec<DecompositionRule>,
110 composition_rules: Vec<CompositionRule>,
112}
113
114#[derive(Debug, Clone)]
116pub struct ActivityKnowledgeBase {
117 activity_definitions: HashMap<String, ActivityDefinition>,
119 ontology: ActivityOntology,
121 common_patterns: Vec<ActivityPattern>,
123 cultural_variations: HashMap<String, Vec<ActivityVariation>>,
125}
126
127#[derive(Debug, Clone)]
129pub struct ActivityRecognitionResult {
130 pub activities: Vec<DetectedActivity>,
132 pub sequences: Vec<ActivitySequence>,
134 pub interactions: Vec<PersonInteraction>,
136 pub scene_summary: ActivitySummary,
138 pub timeline: ActivityTimeline,
140 pub confidence_scores: ConfidenceScores,
142 pub uncertainty: ActivityUncertainty,
144}
145
146#[derive(Debug, Clone)]
148pub struct DetectedActivity {
149 pub activity_class: String,
151 pub subtype: Option<String>,
153 pub confidence: f32,
155 pub temporal_bounds: (f32, f32),
157 pub spatial_region: Option<(f32, f32, f32, f32)>,
159 pub involved_persons: Vec<PersonID>,
161 pub involved_objects: Vec<ObjectID>,
163 pub attributes: HashMap<String, f32>,
165 pub motion_characteristics: MotionCharacteristics,
167}
168
169#[derive(Debug, Clone)]
171pub struct ActivitySequence {
172 pub sequence_id: String,
174 pub activities: Vec<DetectedActivity>,
176 pub sequence_type: String,
178 pub confidence: f32,
180 pub transitions: Vec<ActivityTransition>,
182 pub completeness: f32,
184}
185
186#[derive(Debug, Clone)]
188pub struct PersonInteraction {
189 pub interaction_type: String,
191 pub participants: Vec<PersonID>,
193 pub strength: f32,
195 pub duration: f32,
197 pub proximity: f32,
199 pub attributes: HashMap<String, f32>,
201}
202
203#[derive(Debug, Clone)]
205pub struct ActivitySummary {
206 pub dominant_activity: String,
208 pub diversity_index: f32,
210 pub energy_level: f32,
212 pub social_interaction_level: f32,
214 pub complexity_score: f32,
216 pub anomaly_indicators: Vec<AnomalyIndicator>,
218}
219
220#[derive(Debug, Clone)]
222pub struct ActivityTimeline {
223 pub segments: Vec<TimelineSegment>,
225 pub resolution: f32,
227 pub flow_patterns: Vec<FlowPattern>,
229}
230
231#[derive(Debug, Clone)]
233pub struct ConfidenceScores {
234 pub overall: f32,
236 pub per_activity: HashMap<String, f32>,
238 pub temporal_segmentation: f32,
240 pub spatial_localization: f32,
242}
243
244#[derive(Debug, Clone)]
246pub struct ActivityUncertainty {
247 pub epistemic: f32,
249 pub aleatoric: f32,
251 pub temporal: f32,
253 pub spatial: f32,
255 pub confusion_matrix: Array2<f32>,
257}
258
259pub type PersonID = String;
262pub type ObjectID = String;
264
265#[derive(Debug, Clone)]
267pub struct MotionCharacteristics {
268 pub velocity: f32,
270 pub acceleration: f32,
272 pub direction: f32,
274 pub smoothness: f32,
276 pub periodicity: f32,
278}
279
280#[derive(Debug, Clone)]
282pub struct ActivityTransition {
283 pub from_activity: String,
285 pub to_activity: String,
287 pub probability: f32,
289 pub typical_duration: f32,
291}
292
293#[derive(Debug, Clone)]
295pub struct AnomalyIndicator {
296 pub anomaly_type: String,
298 pub severity: f32,
300 pub description: String,
302 pub temporal_location: f32,
304}
305
306#[derive(Debug, Clone)]
308pub struct TimelineSegment {
309 pub start_time: f32,
311 pub end_time: f32,
313 pub dominant_activity: String,
315 pub activity_mix: HashMap<String, f32>,
317}
318
319#[derive(Debug, Clone)]
321pub struct FlowPattern {
322 pub pattern_type: String,
324 pub frequency: f32,
326 pub amplitude: f32,
328 pub phase: f32,
330}
331
332#[derive(Debug, Clone)]
333pub struct SequencePattern {
334 pub pattern_name: String,
335 pub activity_sequence: Vec<String>,
336 pub temporal_constraints: Vec<TemporalConstraint>,
337 pub occurrence_probability: f32,
338}
339
340#[derive(Debug, Clone)]
341pub struct TemporalConstraint {
342 pub constraint_type: String,
343 pub min_duration: f32,
344 pub max_duration: f32,
345 pub typical_duration: f32,
346}
347
348#[derive(Debug, Clone)]
349pub struct TransitionModel {
350 pub source_activity: String,
351 pub transition_probabilities: HashMap<String, f32>,
352 pub typical_durations: HashMap<String, f32>,
353}
354
355#[derive(Debug, Clone)]
356pub struct AnomalyDetectionParams {
357 pub detection_threshold: f32,
358 pub temporal_window: usize,
359 pub feature_importance: Array1<f32>,
360 pub novelty_detection: bool,
361}
362
363#[derive(Debug, Clone)]
364pub enum InteractionType {
365 Conversation,
366 Collaboration,
367 Competition,
368 Following,
369 Avoiding,
370 Playing,
371 Fighting,
372 Helping,
373 Teaching,
374 Custom(String),
375}
376
377#[derive(Debug, Clone)]
378pub struct PersonTrackingParams {
379 pub max_tracking_distance: f32,
380 pub identity_confidence_threshold: f32,
381 pub re_identification_enabled: bool,
382 pub track_merge_threshold: f32,
383}
384
385#[derive(Debug, Clone)]
386pub struct SocialDistanceModel {
387 pub personal_space_radius: f32,
388 pub social_space_radius: f32,
389 pub public_space_radius: f32,
390 pub cultural_factors: HashMap<String, f32>,
391}
392
393#[derive(Debug, Clone)]
394pub struct GroupActivityRecognition {
395 pub min_group_size: usize,
396 pub max_group_size: usize,
397 pub cohesion_threshold: f32,
398 pub activity_synchronization: bool,
399}
400
401#[derive(Debug, Clone)]
402pub enum ContextFeature {
403 SceneType,
404 TimeOfDay,
405 Weather,
406 CrowdDensity,
407 NoiseLevel,
408 LightingConditions,
409 ObjectPresence(String),
410}
411
412#[derive(Debug, Clone)]
413pub struct EnvironmentClassifier {
414 pub environment_type: String,
415 pub typical_activities: Vec<String>,
416 pub activity_probabilities: HashMap<String, f32>,
417 pub contextual_cues: Vec<String>,
418}
419
420#[derive(Debug, Clone)]
421pub struct ActivityDistribution {
422 pub activities: HashMap<String, f32>,
423 pub temporal_patterns: HashMap<String, TemporalPattern>,
424 pub confidence: f32,
425}
426
427#[derive(Debug, Clone)]
428pub struct TemporalPattern {
429 pub pattern_type: String,
430 pub peak_times: Vec<f32>,
431 pub duration_distribution: Array1<f32>,
432 pub seasonality: Option<SeasonalityInfo>,
433}
434
435#[derive(Debug, Clone)]
436pub struct SeasonalityInfo {
437 pub period: f32,
438 pub amplitude: f32,
439 pub phase_shift: f32,
440}
441
442#[derive(Debug, Clone)]
443pub struct RNNParameters {
444 pub hidden_size: usize,
445 pub num_layers: usize,
446 pub dropout_rate: f32,
447 pub bidirectional: bool,
448}
449
450#[derive(Debug, Clone)]
451pub struct TemporalAttention {
452 pub attention_type: String,
453 pub window_size: usize,
454 pub attention_weights: Array2<f32>,
455 pub learnable: bool,
456}
457
458#[derive(Debug, Clone)]
459pub struct ActivityLevel {
460 pub level_name: String,
461 pub granularity: f32,
462 pub typical_duration: f32,
463 pub complexity: f32,
464}
465
466#[derive(Debug, Clone)]
467pub struct DecompositionRule {
468 pub rule_name: String,
469 pub parent_activity: String,
470 pub child_activities: Vec<String>,
471 pub decomposition_conditions: Vec<String>,
472}
473
474#[derive(Debug, Clone)]
475pub struct CompositionRule {
476 pub rule_name: String,
477 pub component_activities: Vec<String>,
478 pub composite_activity: String,
479 pub composition_conditions: Vec<String>,
480}
481
482#[derive(Debug, Clone)]
483pub struct ActivityDefinition {
484 pub activity_name: String,
485 pub description: String,
486 pub typical_duration: f32,
487 pub required_objects: Vec<String>,
488 pub typical_poses: Vec<String>,
489 pub motion_patterns: Vec<String>,
490 pub contextual_requirements: Vec<String>,
491}
492
493#[derive(Debug, Clone)]
494pub struct ActivityOntology {
495 pub activity_hierarchy: HashMap<String, Vec<String>>,
496 pub activity_relationships: Vec<ActivityRelationship>,
497 pub semantic_similarity: Array2<f32>,
498}
499
500#[derive(Debug, Clone)]
501pub struct ActivityRelationship {
502 pub source_activity: String,
503 pub target_activity: String,
504 pub relationship_type: String,
505 pub strength: f32,
506}
507
508#[derive(Debug, Clone)]
509pub struct ActivityPattern {
510 pub pattern_name: String,
511 pub activity_sequence: Vec<String>,
512 pub temporal_structure: TemporalStructure,
513 pub context_requirements: Vec<String>,
514 pub occurrence_frequency: f32,
515}
516
517#[derive(Debug, Clone)]
518pub struct TemporalStructure {
519 pub sequence_type: String,
520 pub timing_constraints: Vec<TimingConstraint>,
521 pub overlap_patterns: Vec<OverlapPattern>,
522}
523
524#[derive(Debug, Clone)]
525pub struct TimingConstraint {
526 pub constraint_type: String,
527 pub activity_pair: (String, String),
528 pub min_delay: f32,
529 pub max_delay: f32,
530}
531
532#[derive(Debug, Clone)]
533pub struct OverlapPattern {
534 pub activity_pair: (String, String),
535 pub overlap_type: String,
536 pub typical_overlap: f32,
537}
538
539#[derive(Debug, Clone)]
540pub struct ActivityVariation {
541 pub variation_name: String,
542 pub base_activity: String,
543 pub cultural_context: String,
544 pub modifications: HashMap<String, String>,
545 pub prevalence: f32,
546}
547
548impl Default for ActivityRecognitionEngine {
549 fn default() -> Self {
550 Self::new()
551 }
552}
553
554impl ActivityRecognitionEngine {
555 pub fn new() -> Self {
557 Self {
558 action_detectors: vec![
559 ActionDetector::new("human_action_detector"),
560 ActionDetector::new("object_interaction_detector"),
561 ],
562 sequence_analyzer: ActivitySequenceAnalyzer::new(),
563 interaction_recognizer: MultiPersonInteractionRecognizer::new(),
564 context_classifier: ContextAwareActivityClassifier::new(),
565 temporal_modeler: TemporalActivityModeler::new(),
566 hierarchical_decomposer: HierarchicalActivityDecomposer::new(),
567 knowledge_base: ActivityKnowledgeBase::new(),
568 }
569 }
570
571 pub fn recognize_frame_activities(
573 &self,
574 frame: &ArrayView3<f32>,
575 scene_analysis: &SceneAnalysisResult,
576 ) -> Result<ActivityRecognitionResult> {
577 let motion_features = self.extract_motion_features(frame)?;
579
580 let detected_actions = self.detect_actions(frame, scene_analysis, &motion_features)?;
582
583 let context = self.context_classifier.classify_context(scene_analysis)?;
585
586 let enhanced_activities = self.enhance_with_context(&detected_actions, &context)?;
588
589 Ok(ActivityRecognitionResult {
591 activities: enhanced_activities,
592 sequences: Vec::new(), interactions: self.detect_frame_interactions(scene_analysis)?,
594 scene_summary: self.summarize_frame_activities(scene_analysis)?,
595 timeline: ActivityTimeline {
596 segments: Vec::new(),
597 resolution: 1.0,
598 flow_patterns: Vec::new(),
599 },
600 confidence_scores: ConfidenceScores {
601 overall: 0.8,
602 per_activity: HashMap::new(),
603 temporal_segmentation: 0.0,
604 spatial_localization: 0.75,
605 },
606 uncertainty: ActivityUncertainty {
607 epistemic: 0.2,
608 aleatoric: 0.15,
609 temporal: 0.0,
610 spatial: 0.1,
611 confusion_matrix: Array2::zeros((10, 10)),
612 },
613 })
614 }
615
616 pub fn recognize_sequence_activities(
618 &self,
619 frames: &[ArrayView3<f32>],
620 scene_analyses: &[SceneAnalysisResult],
621 ) -> Result<ActivityRecognitionResult> {
622 if frames.len() != scene_analyses.len() {
623 return Err(VisionError::InvalidInput(
624 "Number of frames must match number of scene _analyses".to_string(),
625 ));
626 }
627
628 let mut frame_activities = Vec::new();
630 for (frame, scene_analysis) in frames.iter().zip(scene_analyses.iter()) {
631 let frame_result = self.recognize_frame_activities(frame, scene_analysis)?;
632 frame_activities.push(frame_result);
633 }
634
635 let sequences = self
637 .sequence_analyzer
638 .analyze_sequences(&frame_activities)?;
639
640 let interactions = self
642 .interaction_recognizer
643 .analyze_interactions(scene_analyses)?;
644
645 let timeline = self.build_activity_timeline(&frame_activities)?;
647
648 let scene_summary = self.summarize_sequence_activities(&frame_activities)?;
650
651 let all_activities: Vec<DetectedActivity> = frame_activities
653 .into_iter()
654 .flat_map(|result| result.activities)
655 .collect();
656
657 Ok(ActivityRecognitionResult {
658 activities: all_activities,
659 sequences,
660 interactions,
661 scene_summary,
662 timeline,
663 confidence_scores: ConfidenceScores {
664 overall: 0.85,
665 per_activity: HashMap::new(),
666 temporal_segmentation: 0.8,
667 spatial_localization: 0.75,
668 },
669 uncertainty: ActivityUncertainty {
670 epistemic: 0.15,
671 aleatoric: 0.1,
672 temporal: 0.12,
673 spatial: 0.08,
674 confusion_matrix: Array2::zeros((10, 10)),
675 },
676 })
677 }
678
679 pub fn detect_complex_interactions(
681 &self,
682 scene_sequence: &[SceneAnalysisResult],
683 ) -> Result<Vec<PersonInteraction>> {
684 self.interaction_recognizer
685 .analyze_interactions(scene_sequence)
686 }
687
688 pub fn recognize_hierarchical_structure(
690 &self,
691 activities: &[DetectedActivity],
692 ) -> Result<HierarchicalActivityStructure> {
693 self.hierarchical_decomposer
694 .decompose_activities(activities)
695 }
696
697 pub fn predict_future_activities(
699 &self,
700 current_activities: &[DetectedActivity],
701 prediction_horizon: f32,
702 ) -> Result<Vec<ActivityPrediction>> {
703 self.temporal_modeler
704 .predict_activities(current_activities, prediction_horizon)
705 }
706
707 fn extract_motion_features(&self, frame: &ArrayView3<f32>) -> Result<Array3<f32>> {
709 let (height, width, _channels) = frame.dim();
710 let mut motion_features = Array3::zeros((height, width, 10));
711
712 if let Some(ref prev_frame) = self.get_previous_frame() {
715 let flow = self.compute_optical_flow(frame, prev_frame)?;
716 motion_features
717 .slice_mut(scirs2_core::ndarray::s![.., .., 0])
718 .assign(&flow.slice(scirs2_core::ndarray::s![.., .., 0]));
719 motion_features
720 .slice_mut(scirs2_core::ndarray::s![.., .., 1])
721 .assign(&flow.slice(scirs2_core::ndarray::s![.., .., 1]));
722 }
723
724 for y in 0..height {
726 for x in 0..width {
727 let fx = motion_features[[y, x, 0]];
728 let fy = motion_features[[y, x, 1]];
729 motion_features[[y, x, 2]] = (fx * fx + fy * fy).sqrt();
730 }
731 }
732
733 for y in 0..height {
735 for x in 0..width {
736 let fx = motion_features[[y, x, 0]];
737 let fy = motion_features[[y, x, 1]];
738 motion_features[[y, x, 3]] = fy.atan2(fx);
739 }
740 }
741
742 if let Some(ref prev_frame) = self.get_previous_frame() {
744 for y in 0..height {
745 for x in 0..width {
746 let current = frame[[y, x, 0]];
747 let previous = prev_frame[[y, x, 0]];
748 motion_features[[y, x, 4]] = current - previous;
749 motion_features[[y, x, 5]] = (current - previous).abs();
750 }
751 }
752 }
753
754 for y in 1..height - 1 {
756 for x in 1..width - 1 {
757 let mag = motion_features[[y, x, 2]];
758 let mag_left = motion_features[[y, x - 1, 2]];
759 let mag_right = motion_features[[y, x + 1, 2]];
760 let mag_up = motion_features[[y - 1, x, 2]];
761 let mag_down = motion_features[[y + 1, x, 2]];
762
763 motion_features[[y, x, 6]] = mag_right - mag_left; motion_features[[y, x, 7]] = mag_down - mag_up; motion_features[[y, x, 8]] =
766 (mag - (mag_left + mag_right + mag_up + mag_down) / 4.0).abs(); motion_features[[y, x, 9]] = mag.max(0.1).ln(); }
769 }
770
771 Ok(motion_features)
772 }
773
774 fn detect_actions(
775 &self,
776 self_frame: &ArrayView3<f32>,
777 scene_analysis: &SceneAnalysisResult,
778 motion_features: &Array3<f32>,
779 ) -> Result<Vec<DetectedActivity>> {
780 let mut activities = Vec::new();
781
782 for (i, object) in scene_analysis.objects.iter().enumerate() {
784 if object.class == "person" {
785 let (bbox_x, bbox_y, bbox_w, bbox_h) = object.bbox;
787 let person_motion = self.extract_person_motion_features(
788 motion_features,
789 bbox_x as usize,
790 bbox_y as usize,
791 bbox_w as usize,
792 bbox_h as usize,
793 )?;
794
795 let (activity_class, confidence) = self.classify_person_activity(&person_motion);
797
798 let motion_chars = self.compute_motion_characteristics(&person_motion);
800
801 let involved_objects = self.detect_object_interactions(scene_analysis, object)?;
803
804 let activity_ = DetectedActivity {
805 activity_class,
806 subtype: self.determine_activity_subtype(&person_motion),
807 confidence,
808 temporal_bounds: (0.0, 1.0),
809 spatial_region: Some(object.bbox),
810 involved_persons: vec![format!("person_{}", i)],
811 involved_objects,
812 attributes: self.extract_activity_attributes(&person_motion),
813 motion_characteristics: motion_chars,
814 };
815 activities.push(activity_);
816 }
817 }
818
819 Ok(activities)
820 }
821
822 fn enhance_with_context(
823 &self,
824 activities: &[DetectedActivity],
825 _context: &ContextClassification,
826 ) -> Result<Vec<DetectedActivity>> {
827 Ok(activities.to_vec())
829 }
830
831 fn detect_frame_interactions(
832 &self,
833 _scene_analysis: &SceneAnalysisResult,
834 ) -> Result<Vec<PersonInteraction>> {
835 Ok(Vec::new()) }
837
838 fn summarize_frame_activities(
839 &self,
840 _scene_analysis: &SceneAnalysisResult,
841 ) -> Result<ActivitySummary> {
842 Ok(ActivitySummary {
843 dominant_activity: "static_scene".to_string(),
844 diversity_index: 0.3,
845 energy_level: 0.2,
846 social_interaction_level: 0.1,
847 complexity_score: 0.4,
848 anomaly_indicators: Vec::new(),
849 })
850 }
851
852 fn build_activity_timeline(
853 &self,
854 _frame_activities: &[ActivityRecognitionResult],
855 ) -> Result<ActivityTimeline> {
856 Ok(ActivityTimeline {
857 segments: Vec::new(),
858 resolution: 1.0 / 30.0, flow_patterns: Vec::new(),
860 })
861 }
862
863 fn summarize_sequence_activities(
864 &self,
865 _frame_activities: &[ActivityRecognitionResult],
866 ) -> Result<ActivitySummary> {
867 Ok(ActivitySummary {
868 dominant_activity: "general_activity".to_string(),
869 diversity_index: 0.5,
870 energy_level: 0.4,
871 social_interaction_level: 0.3,
872 complexity_score: 0.6,
873 anomaly_indicators: Vec::new(),
874 })
875 }
876
877 fn analyze_person_interaction(
879 &self,
880 id1: &str,
881 id2: &str,
882 track1: &[(f32, f32)],
883 track2: &[(f32, f32)],
884 ) -> Result<Option<PersonInteraction>> {
885 if track1.len() != track2.len() || track1.is_empty() {
886 return Ok(None);
887 }
888
889 let mut total_distance = 0.0;
891 let mut relative_motion = 0.0;
892 let mut close_proximity_frames = 0;
893
894 for i in 0..track1.len() {
895 let distance =
896 ((track1[i].0 - track2[i].0).powi(2) + (track1[i].1 - track2[i].1).powi(2)).sqrt();
897 total_distance += distance;
898
899 if distance < 150.0 {
900 close_proximity_frames += 1;
902 }
903
904 if i > 0 {
905 let velocity1 = ((track1[i].0 - track1[i - 1].0).powi(2)
906 + (track1[i].1 - track1[i - 1].1).powi(2))
907 .sqrt();
908 let velocity2 = ((track2[i].0 - track2[i - 1].0).powi(2)
909 + (track2[i].1 - track2[i - 1].1).powi(2))
910 .sqrt();
911 relative_motion += (velocity1 - velocity2).abs();
912 }
913 }
914
915 let avg_distance = total_distance / track1.len() as f32;
916 let proximity_ratio = close_proximity_frames as f32 / track1.len() as f32;
917
918 if proximity_ratio > 0.3 {
919 let interaction_type = if relative_motion / (track1.len() as f32) < 5.0 {
921 "following".to_string()
922 } else if avg_distance < 100.0 {
923 "conversation".to_string()
924 } else {
925 "collaboration".to_string()
926 };
927
928 Ok(Some(PersonInteraction {
929 interaction_type,
930 participants: vec![id1.to_string(), id2.to_string()],
931 strength: proximity_ratio,
932 duration: track1.len() as f32 / 30.0, proximity: avg_distance,
934 attributes: HashMap::new(),
935 }))
936 } else {
937 Ok(None)
938 }
939 }
940
941 fn count_activity_types(&self, activities: &[DetectedActivity]) -> HashMap<String, usize> {
942 let mut counts = HashMap::new();
943 for activity_ in activities {
944 *counts.entry(activity_.activity_class.clone()).or_insert(0) += 1;
945 }
946 counts
947 }
948
949 fn find_dominant_activity(&self, activitycounts: &HashMap<String, usize>) -> String {
950 activitycounts
951 .iter()
952 .max_by_key(|(_, &count)| count)
953 .map(|(activity_, _)| activity_.clone())
954 .unwrap_or_else(|| "unknown".to_string())
955 }
956
957 fn predict_activity_transition(&self, currentactivity: &str) -> Option<String> {
958 match currentactivity {
960 "sitting" => Some("standing".to_string()),
961 "standing" => Some("walking".to_string()),
962 "walking" => Some("standing".to_string()),
963 "running" => Some("walking".to_string()),
964 "gesturing" => Some("standing".to_string()),
965 _ => None,
966 }
967 }
968
969 fn group_activities_by_similarity(
970 &self,
971 activities: &[DetectedActivity],
972 ) -> HashMap<String, Vec<DetectedActivity>> {
973 let mut groups = HashMap::new();
974
975 for activity_ in activities {
976 let group_key = if activity_.motion_characteristics.velocity > 0.5 {
977 "dynamic_activities".to_string()
978 } else if activity_.motion_characteristics.velocity < 0.1 {
979 "static_activities".to_string()
980 } else {
981 "moderate_activities".to_string()
982 };
983
984 groups
985 .entry(group_key)
986 .or_insert_with(Vec::new)
987 .push(activity_.clone());
988 }
989
990 groups
991 }
992}
993
994#[derive(Debug, Clone)]
996pub struct ContextClassification {
997 pub scene_type: String,
998 pub environment_factors: HashMap<String, f32>,
999 pub temporal_context: HashMap<String, f32>,
1000}
1001
1002#[derive(Debug, Clone)]
1003pub struct HierarchicalActivityStructure {
1004 pub levels: Vec<ActivityLevel>,
1005 pub activity_tree: ActivityTree,
1006 pub decomposition_confidence: f32,
1007}
1008
1009#[derive(Debug, Clone)]
1010pub struct ActivityTree {
1011 pub root: ActivityNode,
1012 pub nodes: Vec<ActivityNode>,
1013 pub edges: Vec<ActivityEdge>,
1014}
1015
1016#[derive(Debug, Clone)]
1017pub struct ActivityNode {
1018 pub node_id: String,
1019 pub activity_type: String,
1020 pub level: usize,
1021 pub children: Vec<String>,
1022}
1023
1024#[derive(Debug, Clone)]
1025pub struct ActivityEdge {
1026 pub parent: String,
1027 pub child: String,
1028 pub relationship_type: String,
1029}
1030
1031#[derive(Debug, Clone)]
1032pub struct ActivityPrediction {
1033 pub predicted_activity: String,
1034 pub probability: f32,
1035 pub expected_start_time: f32,
1036 pub expected_duration: f32,
1037 pub confidence_interval: (f32, f32),
1038}
1039
1040impl ActionDetector {
1042 fn new(name: &str) -> Self {
1043 Self {
1044 name: name.to_string(),
1045 action_types: vec![
1046 "walking".to_string(),
1047 "sitting".to_string(),
1048 "standing".to_string(),
1049 ],
1050 confidence_threshold: 0.5,
1051 temporal_window: 30,
1052 feature_method: "optical_flow".to_string(),
1053 }
1054 }
1055}
1056
1057impl ActivitySequenceAnalyzer {
1058 fn new() -> Self {
1059 Self {
1060 max_sequence_length: 100,
1061 pattern_models: Vec::new(),
1062 transition_models: HashMap::new(),
1063 anomaly_params: AnomalyDetectionParams {
1064 detection_threshold: 0.3,
1065 temporal_window: 10,
1066 feature_importance: Array1::ones(50),
1067 novelty_detection: true,
1068 },
1069 }
1070 }
1071
1072 fn analyze_sequences(
1073 &self,
1074 frame_activities: &[ActivityRecognitionResult],
1075 ) -> Result<Vec<ActivitySequence>> {
1076 let mut sequences = Vec::new();
1077
1078 if frame_activities.len() < 2 {
1079 return Ok(sequences);
1080 }
1081
1082 let mut current_sequence: Option<ActivitySequence> = None;
1084
1085 for frame_result in frame_activities.iter() {
1086 for activity_ in &frame_result.activities {
1087 match &mut current_sequence {
1088 None => {
1089 current_sequence = Some(ActivitySequence {
1091 sequence_id: format!("seq_{}", sequences.len()),
1092 activities: vec![activity_.clone()],
1093 sequence_type: activity_.activity_class.clone(),
1094 confidence: activity_.confidence,
1095 transitions: Vec::new(),
1096 completeness: 0.0,
1097 });
1098 }
1099 Some(ref mut seq) => {
1100 if activity_.activity_class == seq.sequence_type {
1101 seq.activities.push(activity_.clone());
1103 seq.confidence = (seq.confidence + activity_.confidence) / 2.0;
1104 } else {
1105 seq.completeness =
1107 seq.activities.len() as f32 / frame_activities.len() as f32;
1108 sequences.push(seq.clone());
1109
1110 current_sequence = Some(ActivitySequence {
1111 sequence_id: format!("seq_{}", sequences.len()),
1112 activities: vec![activity_.clone()],
1113 sequence_type: activity_.activity_class.clone(),
1114 confidence: activity_.confidence,
1115 transitions: vec![ActivityTransition {
1116 from_activity: seq.sequence_type.clone(),
1117 to_activity: activity_.activity_class.clone(),
1118 probability: 0.8,
1119 typical_duration: 1.0,
1120 }],
1121 completeness: 0.0,
1122 });
1123 }
1124 }
1125 }
1126 }
1127 }
1128
1129 if let Some(mut seq) = current_sequence {
1131 seq.completeness = seq.activities.len() as f32 / frame_activities.len() as f32;
1132 sequences.push(seq);
1133 }
1134
1135 Ok(sequences)
1136 }
1137}
1138
1139impl MultiPersonInteractionRecognizer {
1140 fn new() -> Self {
1141 Self {
1142 interaction_types: vec![
1143 InteractionType::Conversation,
1144 InteractionType::Collaboration,
1145 ],
1146 tracking_params: PersonTrackingParams {
1147 max_tracking_distance: 50.0,
1148 identity_confidence_threshold: 0.8,
1149 re_identification_enabled: true,
1150 track_merge_threshold: 0.7,
1151 },
1152 social_distance_model: SocialDistanceModel {
1153 personal_space_radius: 0.5,
1154 social_space_radius: 1.5,
1155 public_space_radius: 3.0,
1156 cultural_factors: HashMap::new(),
1157 },
1158 group_recognition: GroupActivityRecognition {
1159 min_group_size: 2,
1160 max_group_size: 10,
1161 cohesion_threshold: 0.6,
1162 activity_synchronization: true,
1163 },
1164 }
1165 }
1166
1167 fn analyze_interactions(
1168 &self,
1169 scene_analyses: &[SceneAnalysisResult],
1170 ) -> Result<Vec<PersonInteraction>> {
1171 let mut interactions = Vec::new();
1172
1173 if scene_analyses.len() < 2 {
1174 return Ok(interactions);
1175 }
1176
1177 let mut person_tracks: HashMap<String, Vec<(f32, f32)>> = HashMap::new();
1179
1180 for scene in scene_analyses {
1181 for (i, object) in scene.objects.iter().enumerate() {
1182 if object.class == "person" {
1183 let person_id = format!("person_{i}");
1184 let position = (
1185 object.bbox.0 + object.bbox.2 / 2.0,
1186 object.bbox.1 + object.bbox.3 / 2.0,
1187 );
1188 person_tracks.entry(person_id).or_default().push(position);
1189 }
1190 }
1191 }
1192
1193 let person_ids: Vec<_> = person_tracks.keys().cloned().collect();
1195
1196 for i in 0..person_ids.len() {
1197 for j in (i + 1)..person_ids.len() {
1198 let id1 = &person_ids[i];
1199 let id2 = &person_ids[j];
1200
1201 if let (Some(track1), Some(track2)) =
1202 (person_tracks.get(id1), person_tracks.get(id2))
1203 {
1204 let interaction = self.analyze_person_interaction(id1, id2, track1, track2)?;
1205 if let Some(interaction) = interaction {
1206 interactions.push(interaction);
1207 }
1208 }
1209 }
1210 }
1211
1212 Ok(interactions)
1213 }
1214}
1215
1216impl ContextAwareActivityClassifier {
1217 fn new() -> Self {
1218 Self {
1219 context_features: vec![ContextFeature::SceneType, ContextFeature::CrowdDensity],
1220 environment_classifiers: Vec::new(),
1221 object_associations: HashMap::new(),
1222 scene_correlations: HashMap::new(),
1223 }
1224 }
1225
1226 fn classify_context(
1227 &self,
1228 _scene_analysis: &SceneAnalysisResult,
1229 ) -> Result<ContextClassification> {
1230 Ok(ContextClassification {
1231 scene_type: "indoor".to_string(),
1232 environment_factors: HashMap::new(),
1233 temporal_context: HashMap::new(),
1234 })
1235 }
1236}
1237
1238impl TemporalActivityModeler {
1239 fn new() -> Self {
1240 Self {
1241 temporal_resolution: 1.0 / 30.0,
1242 memory_length: 100,
1243 rnn_params: RNNParameters {
1244 hidden_size: 128,
1245 num_layers: 2,
1246 dropout_rate: 0.2,
1247 bidirectional: true,
1248 },
1249 attention_mechanisms: Vec::new(),
1250 }
1251 }
1252
1253 fn predict_activities(
1254 &self,
1255 current_activities: &[DetectedActivity],
1256 prediction_horizon: f32,
1257 ) -> Result<Vec<ActivityPrediction>> {
1258 let mut predictions = Vec::new();
1259
1260 if current_activities.is_empty() {
1261 return Ok(predictions);
1262 }
1263
1264 let activitycounts = self.count_activity_types(current_activities);
1266 let dominant_activity = self.find_dominant_activity(&activitycounts);
1267
1268 for (activity_type, count) in activitycounts {
1270 let confidence = (count as f32 / current_activities.len() as f32) * 0.8;
1271
1272 let predicted_duration = if activity_type == dominant_activity {
1274 prediction_horizon * 0.7 } else {
1276 prediction_horizon * 0.3 };
1278
1279 predictions.push(ActivityPrediction {
1280 predicted_activity: activity_type,
1281 probability: confidence,
1282 expected_start_time: 0.0,
1283 expected_duration: predicted_duration,
1284 confidence_interval: (confidence - 0.2, confidence + 0.2),
1285 });
1286 }
1287
1288 for activity_ in current_activities {
1290 if let Some(transition) = self.predict_activity_transition(&activity_.activity_class) {
1291 predictions.push(ActivityPrediction {
1292 predicted_activity: transition,
1293 probability: 0.4,
1294 expected_start_time: prediction_horizon * 0.5,
1295 expected_duration: prediction_horizon * 0.5,
1296 confidence_interval: (0.2, 0.6),
1297 });
1298 }
1299 }
1300
1301 Ok(predictions)
1302 }
1303}
1304
1305impl HierarchicalActivityDecomposer {
1306 fn new() -> Self {
1307 Self {
1308 hierarchy_levels: Vec::new(),
1309 decomposition_rules: Vec::new(),
1310 composition_rules: Vec::new(),
1311 }
1312 }
1313
1314 fn decompose_activities(
1315 &self,
1316 activities: &[DetectedActivity],
1317 ) -> Result<HierarchicalActivityStructure> {
1318 let mut structure = HierarchicalActivityStructure {
1319 levels: vec![
1320 ActivityLevel {
1321 level_name: "atomic".to_string(),
1322 granularity: 1.0,
1323 typical_duration: 1.0,
1324 complexity: 1.0,
1325 },
1326 ActivityLevel {
1327 level_name: "composite".to_string(),
1328 granularity: 0.5,
1329 typical_duration: 5.0,
1330 complexity: 2.0,
1331 },
1332 ActivityLevel {
1333 level_name: "complex".to_string(),
1334 granularity: 0.2,
1335 typical_duration: 15.0,
1336 complexity: 3.0,
1337 },
1338 ],
1339 activity_tree: ActivityTree {
1340 root: ActivityNode {
1341 node_id: "root".to_string(),
1342 activity_type: "scene".to_string(),
1343 level: 0,
1344 children: Vec::new(),
1345 },
1346 nodes: Vec::new(),
1347 edges: Vec::new(),
1348 },
1349 decomposition_confidence: 0.7,
1350 };
1351
1352 let mut node_id = 1;
1354
1355 let activity_groups = self.group_activities_by_similarity(activities);
1357
1358 for (group_type, group_activities) in activity_groups {
1359 let composite_node = ActivityNode {
1361 node_id: format!("composite_{node_id}"),
1362 activity_type: group_type.clone(),
1363 level: 1,
1364 children: Vec::new(),
1365 };
1366
1367 structure
1368 .activity_tree
1369 .root
1370 .children
1371 .push(composite_node.node_id.clone());
1372 structure.activity_tree.nodes.push(composite_node.clone());
1373
1374 structure.activity_tree.edges.push(ActivityEdge {
1376 parent: "root".to_string(),
1377 child: composite_node.node_id.clone(),
1378 relationship_type: "contains".to_string(),
1379 });
1380
1381 for (i, activity_) in group_activities.iter().enumerate() {
1383 let atomic_node = ActivityNode {
1384 node_id: format!("atomic_{node_id}_{i}"),
1385 activity_type: activity_.activity_class.clone(),
1386 level: 2,
1387 children: Vec::new(),
1388 };
1389
1390 structure.activity_tree.nodes.push(atomic_node.clone());
1391 structure.activity_tree.edges.push(ActivityEdge {
1392 parent: composite_node.node_id.clone(),
1393 child: atomic_node.node_id.clone(),
1394 relationship_type: "instantiation".to_string(),
1395 });
1396 }
1397
1398 node_id += 1;
1399 }
1400
1401 Ok(structure)
1402 }
1403}
1404
1405impl ActivityKnowledgeBase {
1406 fn new() -> Self {
1407 Self {
1408 activity_definitions: HashMap::new(),
1409 ontology: ActivityOntology {
1410 activity_hierarchy: HashMap::new(),
1411 activity_relationships: Vec::new(),
1412 semantic_similarity: Array2::zeros((50, 50)),
1413 },
1414 common_patterns: Vec::new(),
1415 cultural_variations: HashMap::new(),
1416 }
1417 }
1418}
1419
1420#[allow(dead_code)]
1422pub fn recognize_activities_comprehensive(
1423 frames: &[ArrayView3<f32>],
1424 scene_analyses: &[SceneAnalysisResult],
1425) -> Result<ActivityRecognitionResult> {
1426 let engine = ActivityRecognitionEngine::new();
1427
1428 if frames.len() == 1 {
1429 engine.recognize_frame_activities(&frames[0], &scene_analyses[0])
1430 } else {
1431 engine.recognize_sequence_activities(frames, scene_analyses)
1432 }
1433}
1434
1435#[allow(dead_code)]
1437pub fn monitor_activities_realtime(
1438 current_frame: &ArrayView3<f32>,
1439 scene_analysis: &SceneAnalysisResult,
1440 activity_history: Option<&[ActivityRecognitionResult]>,
1441) -> Result<ActivityRecognitionResult> {
1442 let engine = ActivityRecognitionEngine::new();
1443 let mut result = engine.recognize_frame_activities(current_frame, scene_analysis)?;
1444
1445 if let Some(_history) = activity_history {
1447 result = apply_temporal_smoothing(result, _history)?;
1448 }
1449
1450 Ok(result)
1451}
1452
1453#[allow(dead_code)]
1455fn apply_temporal_smoothing(
1456 current_result: ActivityRecognitionResult,
1457 _history: &[ActivityRecognitionResult],
1458) -> Result<ActivityRecognitionResult> {
1459 Ok(current_result)
1461}
1462
1463impl ActivityRecognitionEngine {
1465 fn get_previous_frame(&self) -> Option<Array3<f32>> {
1466 None
1468 }
1469
1470 fn compute_optical_flow(
1471 &self,
1472 current_frame: &ArrayView3<f32>,
1473 previous_frame: &Array3<f32>,
1474 ) -> Result<Array3<f32>> {
1475 let (height, width, _) = current_frame.dim();
1476 let mut flow = Array3::zeros((height, width, 2));
1477
1478 for y in 1..height - 1 {
1480 for x in 1..width - 1 {
1481 let current = current_frame[[y, x, 0]];
1482 let previous = previous_frame[[y, x, 0]];
1483
1484 let ix = (current_frame[[y, x + 1, 0]] - current_frame[[y, x - 1, 0]]) / 2.0;
1486 let iy = (current_frame[[y + 1, x, 0]] - current_frame[[y - 1, x, 0]]) / 2.0;
1487 let it = current - previous;
1488
1489 if ix.abs() > 0.01 || iy.abs() > 0.01 {
1491 let denominator = ix * ix + iy * iy;
1492 if denominator > 0.001 {
1493 flow[[y, x, 0]] = -it * ix / denominator;
1494 flow[[y, x, 1]] = -it * iy / denominator;
1495 }
1496 }
1497 }
1498 }
1499
1500 Ok(flow)
1501 }
1502
1503 fn extract_person_motion_features(
1504 &self,
1505 motion_features: &Array3<f32>,
1506 bbox_x: usize,
1507 bbox_y: usize,
1508 bbox_w: usize,
1509 bbox_h: usize,
1510 ) -> Result<Array1<f32>> {
1511 let mut person_features = Array1::zeros(20);
1512
1513 let end_x = (bbox_x + bbox_w).min(motion_features.dim().1);
1514 let end_y = (bbox_y + bbox_h).min(motion_features.dim().0);
1515
1516 let mut count = 0;
1518 let mut sum_velocity = 0.0;
1519 let mut sum_magnitude = 0.0;
1520 let mut sum_direction = 0.0;
1521
1522 for _y in bbox_y..end_y {
1523 for _x in bbox_x..end_x {
1524 let magnitude = motion_features[[_y, _x, 2]];
1525 let direction = motion_features[[_y, _x, 3]];
1526
1527 sum_velocity += magnitude;
1528 sum_magnitude += magnitude;
1529 sum_direction += direction;
1530 count += 1;
1531 }
1532 }
1533
1534 if count > 0 {
1535 person_features[0] = sum_velocity / count as f32; person_features[1] = sum_magnitude / count as f32; person_features[2] = sum_direction / count as f32; person_features[3] = (bbox_w * bbox_h) as f32; person_features[4] = bbox_w as f32 / bbox_h as f32; }
1541
1542 Ok(person_features)
1543 }
1544
1545 fn classify_person_activity(&self, person_motionfeatures: &Array1<f32>) -> (String, f32) {
1546 let velocity = person_motionfeatures[0];
1547 let magnitude = person_motionfeatures[1];
1548 let aspect_ratio = person_motionfeatures[4];
1549
1550 if velocity < 0.1 {
1552 if aspect_ratio > 0.8 {
1553 ("standing".to_string(), 0.8)
1554 } else {
1555 ("sitting".to_string(), 0.7)
1556 }
1557 } else if velocity < 0.5 {
1558 ("walking".to_string(), 0.75)
1559 } else if velocity < 1.0 {
1560 ("running".to_string(), 0.7)
1561 } else if magnitude > 0.5 {
1562 ("gesturing".to_string(), 0.6)
1563 } else {
1564 ("moving_quickly".to_string(), 0.65)
1565 }
1566 }
1567
1568 fn compute_motion_characteristics(
1569 &self,
1570 person_motionfeatures: &Array1<f32>,
1571 ) -> MotionCharacteristics {
1572 MotionCharacteristics {
1573 velocity: person_motionfeatures[0],
1574 acceleration: person_motionfeatures[1] - person_motionfeatures[0], direction: person_motionfeatures[2],
1576 smoothness: 1.0 - (person_motionfeatures[1] - person_motionfeatures[0]).abs(),
1577 periodicity: 0.5, }
1579 }
1580
1581 fn detect_object_interactions(
1582 &self,
1583 scene_analysis: &SceneAnalysisResult,
1584 person_object: &crate::scene_understanding::DetectedObject,
1585 ) -> Result<Vec<ObjectID>> {
1586 let mut interactions = Vec::new();
1587 let person_center = (
1588 person_object.bbox.0 + person_object.bbox.2 / 2.0,
1589 person_object.bbox.1 + person_object.bbox.3 / 2.0,
1590 );
1591
1592 for _object in &scene_analysis.objects {
1593 if _object.class != "person" {
1594 let object_center = (
1595 _object.bbox.0 + _object.bbox.2 / 2.0,
1596 _object.bbox.1 + _object.bbox.3 / 2.0,
1597 );
1598 let distance = ((person_center.0 - object_center.0).powi(2)
1599 + (person_center.1 - object_center.1).powi(2))
1600 .sqrt();
1601
1602 if distance < 100.0 {
1604 interactions.push(format!("{}:unknown", _object.class));
1605 }
1606 }
1607 }
1608
1609 Ok(interactions)
1610 }
1611
1612 fn determine_activity_subtype(&self, person_motionfeatures: &Array1<f32>) -> Option<String> {
1613 let velocity = person_motionfeatures[0];
1614 let magnitude = person_motionfeatures[1];
1615
1616 if velocity > 0.8 {
1617 Some("fast".to_string())
1618 } else if velocity < 0.2 {
1619 Some("slow".to_string())
1620 } else if magnitude > 0.6 {
1621 Some("active".to_string())
1622 } else {
1623 None
1624 }
1625 }
1626
1627 fn extract_activity_attributes(
1628 &self,
1629 person_motionfeatures: &Array1<f32>,
1630 ) -> HashMap<String, f32> {
1631 let mut attributes = HashMap::new();
1632
1633 attributes.insert("velocity".to_string(), person_motionfeatures[0]);
1634 attributes.insert("magnitude".to_string(), person_motionfeatures[1]);
1635 attributes.insert("direction".to_string(), person_motionfeatures[2]);
1636 attributes.insert("size".to_string(), person_motionfeatures[3]);
1637 attributes.insert("aspect_ratio".to_string(), person_motionfeatures[4]);
1638
1639 attributes
1640 }
1641}
1642
1643impl TemporalActivityModeler {
1645 fn count_activity_types(&self, activities: &[DetectedActivity]) -> HashMap<String, usize> {
1646 let mut counts = HashMap::new();
1647 for activity_ in activities {
1648 *counts.entry(activity_.activity_class.clone()).or_insert(0) += 1;
1649 }
1650 counts
1651 }
1652
1653 fn find_dominant_activity(&self, activitycounts: &HashMap<String, usize>) -> String {
1654 activitycounts
1655 .iter()
1656 .max_by_key(|(_, &count)| count)
1657 .map(|(activity_, _)| activity_.clone())
1658 .unwrap_or_else(|| "unknown".to_string())
1659 }
1660
1661 fn predict_activity_transition(&self, currentactivity: &str) -> Option<String> {
1662 match currentactivity {
1664 "sitting" => Some("standing".to_string()),
1665 "standing" => Some("walking".to_string()),
1666 "walking" => Some("standing".to_string()),
1667 "running" => Some("walking".to_string()),
1668 "gesturing" => Some("standing".to_string()),
1669 _ => None,
1670 }
1671 }
1672}
1673
1674impl HierarchicalActivityDecomposer {
1675 fn group_activities_by_similarity(
1676 &self,
1677 activities: &[DetectedActivity],
1678 ) -> HashMap<String, Vec<DetectedActivity>> {
1679 let mut groups = HashMap::new();
1680
1681 for activity_ in activities {
1682 let group_key = if activity_.motion_characteristics.velocity > 0.5 {
1683 "dynamic_activities".to_string()
1684 } else if activity_.motion_characteristics.velocity < 0.1 {
1685 "static_activities".to_string()
1686 } else {
1687 "moderate_activities".to_string()
1688 };
1689
1690 groups
1691 .entry(group_key)
1692 .or_insert_with(Vec::new)
1693 .push(activity_.clone());
1694 }
1695
1696 groups
1697 }
1698}
1699
1700impl MultiPersonInteractionRecognizer {
1701 fn analyze_person_interaction(
1702 &self,
1703 id1: &str,
1704 id2: &str,
1705 track1: &[(f32, f32)],
1706 track2: &[(f32, f32)],
1707 ) -> Result<Option<PersonInteraction>> {
1708 if track1.len() != track2.len() || track1.is_empty() {
1709 return Ok(None);
1710 }
1711
1712 let mut total_distance = 0.0;
1714 let mut relative_motion = 0.0;
1715 let mut close_proximity_frames = 0;
1716
1717 for i in 0..track1.len() {
1718 let distance =
1719 ((track1[i].0 - track2[i].0).powi(2) + (track1[i].1 - track2[i].1).powi(2)).sqrt();
1720 total_distance += distance;
1721
1722 if distance < 150.0 {
1723 close_proximity_frames += 1;
1725 }
1726
1727 if i > 0 {
1728 let velocity1 = ((track1[i].0 - track1[i - 1].0).powi(2)
1729 + (track1[i].1 - track1[i - 1].1).powi(2))
1730 .sqrt();
1731 let velocity2 = ((track2[i].0 - track2[i - 1].0).powi(2)
1732 + (track2[i].1 - track2[i - 1].1).powi(2))
1733 .sqrt();
1734 relative_motion += (velocity1 - velocity2).abs();
1735 }
1736 }
1737
1738 let avg_distance = total_distance / track1.len() as f32;
1739 let proximity_ratio = close_proximity_frames as f32 / track1.len() as f32;
1740
1741 if proximity_ratio > 0.3 {
1742 let interaction_type = if relative_motion / (track1.len() as f32) < 5.0 {
1744 "following".to_string()
1745 } else if avg_distance < 100.0 {
1746 "conversation".to_string()
1747 } else {
1748 "collaboration".to_string()
1749 };
1750
1751 Ok(Some(PersonInteraction {
1752 interaction_type,
1753 participants: vec![id1.to_string(), id2.to_string()],
1754 strength: proximity_ratio,
1755 duration: track1.len() as f32 / 30.0, proximity: avg_distance,
1757 attributes: HashMap::new(),
1758 }))
1759 } else {
1760 Ok(None)
1761 }
1762 }
1763}