1use crate::Vector;
8use anyhow::Result;
9use chrono::{DateTime, Utc};
10use serde::{Deserialize, Serialize};
11use std::collections::{HashMap, HashSet};
12use std::sync::{Arc, RwLock};
13use tokio::task::JoinHandle;
14use tracing::{debug, info};
15
16pub struct EnterpriseKnowledgeAnalyzer {
18 product_embeddings: Arc<RwLock<HashMap<String, ProductEmbedding>>>,
20 employee_embeddings: Arc<RwLock<HashMap<String, EmployeeEmbedding>>>,
22 customer_embeddings: Arc<RwLock<HashMap<String, CustomerEmbedding>>>,
24 category_hierarchy: Arc<RwLock<CategoryHierarchy>>,
26 organizational_structure: Arc<RwLock<OrganizationalStructure>>,
28 recommendation_engines: Arc<RwLock<HashMap<String, RecommendationEngine>>>,
30 config: EnterpriseConfig,
32 analysis_tasks: Vec<JoinHandle<()>>,
34}
35
36#[derive(Debug, Clone)]
38pub struct EnterpriseConfig {
39 pub max_products: usize,
41 pub max_employees: usize,
43 pub max_customers: usize,
45 pub product_recommendation_refresh_hours: u64,
47 pub skill_analysis_interval_hours: u64,
49 pub market_analysis_interval_hours: u64,
51 pub enable_real_time_customer_tracking: bool,
53 pub min_interaction_threshold: u32,
55 pub embedding_dimension: usize,
57 pub recommendation_config: RecommendationConfig,
59}
60
61impl Default for EnterpriseConfig {
62 fn default() -> Self {
63 Self {
64 max_products: 500_000,
65 max_employees: 50_000,
66 max_customers: 1_000_000,
67 product_recommendation_refresh_hours: 6,
68 skill_analysis_interval_hours: 24,
69 market_analysis_interval_hours: 12,
70 enable_real_time_customer_tracking: true,
71 min_interaction_threshold: 3,
72 embedding_dimension: 256,
73 recommendation_config: RecommendationConfig::default(),
74 }
75 }
76}
77
78#[derive(Debug, Clone)]
80pub struct RecommendationConfig {
81 pub num_recommendations: usize,
83 pub similarity_threshold: f64,
85 pub diversity_factor: f64,
87 pub enable_collaborative_filtering: bool,
89 pub enable_content_based_filtering: bool,
91 pub enable_hybrid: bool,
93 pub cold_start_strategy: ColdStartStrategy,
95}
96
97impl Default for RecommendationConfig {
98 fn default() -> Self {
99 Self {
100 num_recommendations: 10,
101 similarity_threshold: 0.3,
102 diversity_factor: 0.2,
103 enable_collaborative_filtering: true,
104 enable_content_based_filtering: true,
105 enable_hybrid: true,
106 cold_start_strategy: ColdStartStrategy::PopularityBased,
107 }
108 }
109}
110
111#[derive(Debug, Clone)]
113pub enum ColdStartStrategy {
114 PopularityBased,
115 ContentBased,
116 DemographicBased,
117 RandomSampling,
118}
119
120#[derive(Debug, Clone, Serialize, Deserialize)]
122pub struct ProductEmbedding {
123 pub product_id: String,
125 pub name: String,
127 pub description: String,
129 pub category: String,
131 pub subcategories: Vec<String>,
133 pub features: Vec<ProductFeature>,
135 pub price: f64,
137 pub availability: ProductAvailability,
139 pub sales_metrics: SalesMetrics,
141 pub ratings: CustomerRatings,
143 pub embedding: Vector,
145 pub similar_products: Vec<String>,
147 pub market_position: f64,
149 pub last_updated: DateTime<Utc>,
151}
152
153#[derive(Debug, Clone, Serialize, Deserialize)]
155pub struct ProductFeature {
156 pub feature_name: String,
158 pub feature_value: String,
160 pub feature_type: FeatureType,
162 pub importance_score: f64,
164}
165
166#[derive(Debug, Clone, Serialize, Deserialize)]
168pub enum FeatureType {
169 Categorical,
170 Numerical,
171 Boolean,
172 Text,
173 List,
174}
175
176#[derive(Debug, Clone, Serialize, Deserialize)]
178pub enum ProductAvailability {
179 InStock(u32), OutOfStock,
181 Discontinued,
182 PreOrder(DateTime<Utc>), Limited(u32), }
185
186#[derive(Debug, Clone, Serialize, Deserialize)]
188pub struct SalesMetrics {
189 pub units_sold: u64,
191 pub revenue: f64,
193 pub sales_velocity: f64,
195 pub conversion_rate: f64,
197 pub return_rate: f64,
199 pub profit_margin: f64,
201}
202
203#[derive(Debug, Clone, Serialize, Deserialize)]
205pub struct CustomerRatings {
206 pub average_rating: f64,
208 pub review_count: u32,
210 pub rating_distribution: HashMap<u8, u32>,
212 pub sentiment_score: f64,
214}
215
216#[derive(Debug, Clone, Serialize, Deserialize)]
218pub struct EmployeeEmbedding {
219 pub employee_id: String,
221 pub name: String,
223 pub job_title: String,
225 pub department: String,
227 pub team: String,
229 pub skills: Vec<Skill>,
231 pub experience_level: ExperienceLevel,
233 pub performance_metrics: PerformanceMetrics,
235 pub project_history: Vec<ProjectParticipation>,
237 pub collaborators: Vec<String>,
239 pub embedding: Vector,
241 pub career_predictions: CareerPredictions,
243 pub last_updated: DateTime<Utc>,
245}
246
247#[derive(Debug, Clone, Serialize, Deserialize)]
249pub struct Skill {
250 pub skill_name: String,
252 pub category: SkillCategory,
254 pub proficiency_level: u8,
256 pub years_experience: f64,
258 pub role_importance: f64,
260 pub market_demand: f64,
262}
263
264#[derive(Debug, Clone, Serialize, Deserialize)]
266pub enum SkillCategory {
267 Technical,
268 Leadership,
269 Communication,
270 Analytical,
271 Creative,
272 Domain,
273 Language,
274 Tools,
275}
276
277#[derive(Debug, Clone, Serialize, Deserialize)]
279pub enum ExperienceLevel {
280 Junior,
281 Mid,
282 Senior,
283 Lead,
284 Principal,
285 Executive,
286}
287
288#[derive(Debug, Clone, Serialize, Deserialize)]
290pub struct PerformanceMetrics {
291 pub overall_score: f64,
293 pub goal_achievement_rate: f64,
295 pub project_completion_rate: f64,
297 pub collaboration_score: f64,
299 pub innovation_score: f64,
301 pub leadership_score: f64,
303}
304
305#[derive(Debug, Clone, Serialize, Deserialize)]
307pub struct ProjectParticipation {
308 pub project_id: String,
310 pub project_name: String,
312 pub role: String,
314 pub start_date: DateTime<Utc>,
316 pub end_date: Option<DateTime<Utc>>,
318 pub outcome: ProjectOutcome,
320 pub contribution_score: f64,
322}
323
324#[derive(Debug, Clone, Serialize, Deserialize)]
326pub enum ProjectOutcome {
327 Successful,
328 PartiallySuccessful,
329 Failed,
330 Cancelled,
331 Ongoing,
332}
333
334#[derive(Debug, Clone, Serialize, Deserialize)]
336pub struct CareerPredictions {
337 pub promotion_likelihood: f64,
339 pub next_role: String,
341 pub skills_to_develop: Vec<String>,
343 pub career_paths: Vec<String>,
345 pub retention_risk: f64,
347}
348
349#[derive(Debug, Clone, Serialize, Deserialize)]
351pub struct CustomerEmbedding {
352 pub customer_id: String,
354 pub name: String,
356 pub segment: CustomerSegment,
358 pub purchase_history: Vec<Purchase>,
360 pub preferences: CustomerPreferences,
362 pub behavior_metrics: BehaviorMetrics,
364 pub embedding: Vector,
366 pub predicted_ltv: f64,
368 pub churn_risk: f64,
370 pub recommendations: Vec<ProductRecommendation>,
372 pub last_updated: DateTime<Utc>,
374}
375
376#[derive(Debug, Clone, Serialize, Deserialize)]
378pub enum CustomerSegment {
379 HighValue,
380 Regular,
381 Occasional,
382 NewCustomer,
383 AtRisk,
384 Churned,
385}
386
387#[derive(Debug, Clone, Serialize, Deserialize)]
389pub struct Purchase {
390 pub product_id: String,
392 pub purchase_date: DateTime<Utc>,
394 pub quantity: u32,
396 pub price: f64,
398 pub channel: PurchaseChannel,
400 pub satisfaction: Option<u8>,
402}
403
404#[derive(Debug, Clone, Serialize, Deserialize)]
406pub enum PurchaseChannel {
407 Online,
408 InStore,
409 Mobile,
410 Phone,
411 ThirdParty,
412}
413
414#[derive(Debug, Clone, Serialize, Deserialize)]
416pub struct CustomerPreferences {
417 pub preferred_categories: Vec<String>,
419 pub price_sensitivity: f64,
421 pub brand_loyalty: HashMap<String, f64>,
423 pub preferred_channels: Vec<PurchaseChannel>,
425 pub communication_preferences: CommunicationPreferences,
427}
428
429#[derive(Debug, Clone, Serialize, Deserialize)]
431pub struct CommunicationPreferences {
432 pub email_opt_in: bool,
434 pub sms_opt_in: bool,
436 pub frequency: CommunicationFrequency,
438 pub content_types: Vec<String>,
440}
441
442#[derive(Debug, Clone, Serialize, Deserialize)]
444pub enum CommunicationFrequency {
445 Daily,
446 Weekly,
447 Monthly,
448 Quarterly,
449 Never,
450}
451
452#[derive(Debug, Clone, Serialize, Deserialize)]
454pub struct BehaviorMetrics {
455 pub visit_frequency: f64,
457 pub avg_session_duration: f64,
459 pub avg_products_viewed: f64,
461 pub cart_abandonment_rate: f64,
463 pub return_visit_rate: f64,
465 pub referral_rate: f64,
467}
468
469#[derive(Debug, Clone, Serialize, Deserialize)]
471pub struct ProductRecommendation {
472 pub product_id: String,
474 pub score: f64,
476 pub reason: RecommendationReason,
478 pub confidence: f64,
480 pub expected_revenue: f64,
482}
483
484#[derive(Debug, Clone, Serialize, Deserialize)]
486pub enum RecommendationReason {
487 SimilarProducts,
488 CustomersBought,
489 PopularInCategory,
490 PersonalizedPreference,
491 TrendingNow,
492 SeasonalRecommendation,
493}
494
495#[derive(Debug, Clone)]
497pub struct CategoryHierarchy {
498 pub categories: HashMap<String, Category>,
500 pub parent_child: HashMap<String, Vec<String>>,
502 pub category_embeddings: HashMap<String, Vector>,
504}
505
506#[derive(Debug, Clone, Serialize, Deserialize)]
508pub struct Category {
509 pub category_id: String,
511 pub name: String,
513 pub parent: Option<String>,
515 pub children: Vec<String>,
517 pub products: Vec<String>,
519 pub attributes: HashMap<String, String>,
521 pub performance: CategoryPerformance,
523}
524
525#[derive(Debug, Clone, Serialize, Deserialize)]
527pub struct CategoryPerformance {
528 pub total_sales: f64,
530 pub product_count: u32,
532 pub average_rating: f64,
534 pub growth_rate: f64,
536 pub market_share: f64,
538}
539
540#[derive(Debug, Clone)]
542pub struct OrganizationalStructure {
543 pub departments: HashMap<String, Department>,
545 pub teams: HashMap<String, Team>,
547 pub reporting_structure: HashMap<String, Vec<String>>,
549 pub projects: HashMap<String, Project>,
551}
552
553#[derive(Debug, Clone, Serialize, Deserialize)]
555pub struct Department {
556 pub department_id: String,
558 pub name: String,
560 pub head: String,
562 pub employees: Vec<String>,
564 pub teams: Vec<String>,
566 pub budget: f64,
568 pub performance: DepartmentPerformance,
570}
571
572#[derive(Debug, Clone, Serialize, Deserialize)]
574pub struct DepartmentPerformance {
575 pub budget_utilization: f64,
577 pub goal_achievement: f64,
579 pub employee_satisfaction: f64,
581 pub productivity_score: f64,
583 pub innovation_index: f64,
585}
586
587#[derive(Debug, Clone, Serialize, Deserialize)]
589pub struct Team {
590 pub team_id: String,
592 pub name: String,
594 pub lead: String,
596 pub members: Vec<String>,
598 pub department: String,
600 pub team_skills: Vec<Skill>,
602 pub performance: TeamPerformance,
604}
605
606#[derive(Debug, Clone, Serialize, Deserialize)]
608pub struct TeamPerformance {
609 pub collaboration_score: f64,
611 pub delivery_performance: f64,
613 pub quality_score: f64,
615 pub innovation_score: f64,
617 pub team_satisfaction: f64,
619}
620
621#[derive(Debug, Clone, Serialize, Deserialize)]
623pub struct Project {
624 pub project_id: String,
626 pub name: String,
628 pub description: String,
630 pub manager: String,
632 pub team_members: Vec<String>,
634 pub start_date: DateTime<Utc>,
636 pub end_date: Option<DateTime<Utc>>,
638 pub budget: f64,
640 pub status: ProjectStatus,
642 pub required_skills: Vec<String>,
644 pub performance: ProjectPerformance,
646}
647
648#[derive(Debug, Clone, Serialize, Deserialize)]
650pub enum ProjectStatus {
651 Planning,
652 InProgress,
653 OnHold,
654 Completed,
655 Cancelled,
656}
657
658#[derive(Debug, Clone, Serialize, Deserialize)]
660pub struct ProjectPerformance {
661 pub progress_percentage: f64,
663 pub budget_utilization: f64,
665 pub timeline_adherence: f64,
667 pub quality_score: f64,
669 pub stakeholder_satisfaction: f64,
671}
672
673#[derive(Debug, Clone)]
675pub struct RecommendationEngine {
676 pub engine_type: RecommendationEngineType,
678 pub parameters: HashMap<String, f64>,
680 pub performance: RecommendationPerformance,
682 pub last_update: DateTime<Utc>,
684}
685
686#[derive(Debug, Clone)]
688pub enum RecommendationEngineType {
689 CollaborativeFiltering,
690 ContentBased,
691 MatrixFactorization,
692 DeepLearning,
693 Hybrid,
694}
695
696#[derive(Debug, Clone, Serialize, Deserialize)]
698pub struct RecommendationPerformance {
699 pub precision_at_k: HashMap<u32, f64>,
701 pub recall_at_k: HashMap<u32, f64>,
703 pub ndcg_scores: HashMap<u32, f64>,
705 pub click_through_rate: f64,
707 pub conversion_rate: f64,
709 pub revenue_impact: f64,
711}
712
713impl EnterpriseKnowledgeAnalyzer {
714 pub fn new(config: EnterpriseConfig) -> Self {
716 Self {
717 product_embeddings: Arc::new(RwLock::new(HashMap::new())),
718 employee_embeddings: Arc::new(RwLock::new(HashMap::new())),
719 customer_embeddings: Arc::new(RwLock::new(HashMap::new())),
720 category_hierarchy: Arc::new(RwLock::new(CategoryHierarchy {
721 categories: HashMap::new(),
722 parent_child: HashMap::new(),
723 category_embeddings: HashMap::new(),
724 })),
725 organizational_structure: Arc::new(RwLock::new(OrganizationalStructure {
726 departments: HashMap::new(),
727 teams: HashMap::new(),
728 reporting_structure: HashMap::new(),
729 projects: HashMap::new(),
730 })),
731 recommendation_engines: Arc::new(RwLock::new(HashMap::new())),
732 config,
733 analysis_tasks: Vec::new(),
734 }
735 }
736
737 pub async fn start(&mut self) -> Result<()> {
739 info!("Starting enterprise knowledge analysis system");
740
741 let recommendation_task = self.start_recommendation_engine().await;
743 self.analysis_tasks.push(recommendation_task);
744
745 let skill_analysis_task = self.start_skill_analysis().await;
747 self.analysis_tasks.push(skill_analysis_task);
748
749 let market_analysis_task = self.start_market_analysis().await;
751 self.analysis_tasks.push(market_analysis_task);
752
753 let org_optimization_task = self.start_organizational_optimization().await;
755 self.analysis_tasks.push(org_optimization_task);
756
757 info!("Enterprise knowledge analysis system started successfully");
758 Ok(())
759 }
760
761 pub async fn stop(&mut self) {
763 info!("Stopping enterprise knowledge analysis system");
764
765 for task in self.analysis_tasks.drain(..) {
766 task.abort();
767 }
768
769 info!("Enterprise knowledge analysis system stopped");
770 }
771
772 pub async fn generate_product_embedding(&self, product_id: &str) -> Result<ProductEmbedding> {
774 {
776 let embeddings = self.product_embeddings.read().expect("lock poisoned");
777 if let Some(existing) = embeddings.get(product_id) {
778 return Ok(existing.clone());
779 }
780 }
781
782 info!("Generating product embedding for: {}", product_id);
783
784 let name = format!("Product_{product_id}");
786 let description = format!("Description for product {product_id}");
787 let category = "Electronics".to_string();
788 let subcategories = vec!["Smartphones".to_string(), "Mobile".to_string()];
789
790 let features = vec![
792 ProductFeature {
793 feature_name: "Brand".to_string(),
794 feature_value: "TechCorp".to_string(),
795 feature_type: FeatureType::Categorical,
796 importance_score: 0.9,
797 },
798 ProductFeature {
799 feature_name: "Price".to_string(),
800 feature_value: "299.99".to_string(),
801 feature_type: FeatureType::Numerical,
802 importance_score: 0.8,
803 },
804 ];
805
806 let price = 299.99;
807 let availability = ProductAvailability::InStock(100);
808
809 let sales_metrics = SalesMetrics {
811 units_sold: 1500,
812 revenue: 449_985.0,
813 sales_velocity: 25.5,
814 conversion_rate: 0.12,
815 return_rate: 0.03,
816 profit_margin: 0.35,
817 };
818
819 let mut rating_distribution = HashMap::new();
821 rating_distribution.insert(5, 120);
822 rating_distribution.insert(4, 80);
823 rating_distribution.insert(3, 30);
824 rating_distribution.insert(2, 10);
825 rating_distribution.insert(1, 5);
826
827 let ratings = CustomerRatings {
828 average_rating: 4.2,
829 review_count: 245,
830 rating_distribution,
831 sentiment_score: 0.7,
832 };
833
834 let embedding = self
836 .compute_product_embedding_vector(&name, &description, &features, &sales_metrics)
837 .await?;
838
839 let similar_products = self.find_similar_products(product_id, &embedding).await?;
841
842 let market_position = self
844 .calculate_market_position(&sales_metrics, &ratings)
845 .await?;
846
847 let product_embedding = ProductEmbedding {
848 product_id: product_id.to_string(),
849 name,
850 description,
851 category,
852 subcategories,
853 features,
854 price,
855 availability,
856 sales_metrics,
857 ratings,
858 embedding,
859 similar_products,
860 market_position,
861 last_updated: Utc::now(),
862 };
863
864 {
866 let mut embeddings = self.product_embeddings.write().expect("lock poisoned");
867 embeddings.insert(product_id.to_string(), product_embedding.clone());
868 }
869
870 info!(
871 "Generated product embedding for {} with market position: {:.3}",
872 product_id, market_position
873 );
874 Ok(product_embedding)
875 }
876
877 pub async fn generate_employee_embedding(
879 &self,
880 employee_id: &str,
881 ) -> Result<EmployeeEmbedding> {
882 {
884 let embeddings = self.employee_embeddings.read().expect("lock poisoned");
885 if let Some(existing) = embeddings.get(employee_id) {
886 return Ok(existing.clone());
887 }
888 }
889
890 info!("Generating employee embedding for: {}", employee_id);
891
892 let name = format!("Employee_{employee_id}");
894 let job_title = "Software Engineer".to_string();
895 let department = "Engineering".to_string();
896 let team = "Backend Team".to_string();
897
898 let skills = vec![
900 Skill {
901 skill_name: "Python".to_string(),
902 category: SkillCategory::Technical,
903 proficiency_level: 8,
904 years_experience: 5.0,
905 role_importance: 0.9,
906 market_demand: 0.85,
907 },
908 Skill {
909 skill_name: "Leadership".to_string(),
910 category: SkillCategory::Leadership,
911 proficiency_level: 6,
912 years_experience: 2.0,
913 role_importance: 0.6,
914 market_demand: 0.9,
915 },
916 ];
917
918 let experience_level = ExperienceLevel::Mid;
919
920 let performance_metrics = PerformanceMetrics {
922 overall_score: 8.2,
923 goal_achievement_rate: 0.92,
924 project_completion_rate: 0.95,
925 collaboration_score: 8.5,
926 innovation_score: 7.8,
927 leadership_score: 6.5,
928 };
929
930 let project_history = vec![ProjectParticipation {
932 project_id: "proj_001".to_string(),
933 project_name: "Customer Portal".to_string(),
934 role: "Backend Developer".to_string(),
935 start_date: Utc::now() - chrono::Duration::days(365),
936 end_date: Some(Utc::now() - chrono::Duration::days(300)),
937 outcome: ProjectOutcome::Successful,
938 contribution_score: 8.5,
939 }];
940
941 let collaborators = vec!["emp_002".to_string(), "emp_003".to_string()];
942
943 let embedding = self
945 .compute_employee_embedding_vector(&skills, &performance_metrics, &project_history)
946 .await?;
947
948 let career_predictions = self
950 .predict_career_progression(&skills, &performance_metrics, &experience_level)
951 .await?;
952
953 let employee_embedding = EmployeeEmbedding {
954 employee_id: employee_id.to_string(),
955 name,
956 job_title,
957 department,
958 team,
959 skills,
960 experience_level,
961 performance_metrics,
962 project_history,
963 collaborators,
964 embedding,
965 career_predictions,
966 last_updated: Utc::now(),
967 };
968
969 {
971 let mut embeddings = self.employee_embeddings.write().expect("lock poisoned");
972 embeddings.insert(employee_id.to_string(), employee_embedding.clone());
973 }
974
975 info!(
976 "Generated employee embedding for {} with promotion likelihood: {:.3}",
977 employee_id, employee_embedding.career_predictions.promotion_likelihood
978 );
979 Ok(employee_embedding)
980 }
981
982 pub async fn generate_customer_embedding(
984 &self,
985 customer_id: &str,
986 ) -> Result<CustomerEmbedding> {
987 {
989 let embeddings = self.customer_embeddings.read().expect("lock poisoned");
990 if let Some(existing) = embeddings.get(customer_id) {
991 return Ok(existing.clone());
992 }
993 }
994
995 info!("Generating customer embedding for: {}", customer_id);
996
997 let name = format!("Customer_{customer_id}");
999 let segment = CustomerSegment::Regular;
1000
1001 let purchase_history = vec![
1003 Purchase {
1004 product_id: "prod_001".to_string(),
1005 purchase_date: Utc::now() - chrono::Duration::days(30),
1006 quantity: 1,
1007 price: 299.99,
1008 channel: PurchaseChannel::Online,
1009 satisfaction: Some(4),
1010 },
1011 Purchase {
1012 product_id: "prod_002".to_string(),
1013 purchase_date: Utc::now() - chrono::Duration::days(60),
1014 quantity: 2,
1015 price: 149.99,
1016 channel: PurchaseChannel::InStore,
1017 satisfaction: Some(5),
1018 },
1019 ];
1020
1021 let mut brand_loyalty = HashMap::new();
1023 brand_loyalty.insert("TechCorp".to_string(), 0.8);
1024 brand_loyalty.insert("InnovateCo".to_string(), 0.6);
1025
1026 let preferences = CustomerPreferences {
1027 preferred_categories: vec!["Electronics".to_string(), "Books".to_string()],
1028 price_sensitivity: 0.6,
1029 brand_loyalty,
1030 preferred_channels: vec![PurchaseChannel::Online, PurchaseChannel::Mobile],
1031 communication_preferences: CommunicationPreferences {
1032 email_opt_in: true,
1033 sms_opt_in: false,
1034 frequency: CommunicationFrequency::Weekly,
1035 content_types: vec!["Promotions".to_string(), "NewProducts".to_string()],
1036 },
1037 };
1038
1039 let behavior_metrics = BehaviorMetrics {
1041 visit_frequency: 2.5,
1042 avg_session_duration: 12.5,
1043 avg_products_viewed: 8.2,
1044 cart_abandonment_rate: 0.25,
1045 return_visit_rate: 0.7,
1046 referral_rate: 0.1,
1047 };
1048
1049 let embedding = self
1051 .compute_customer_embedding_vector(&purchase_history, &preferences, &behavior_metrics)
1052 .await?;
1053
1054 let predicted_ltv = self
1056 .predict_customer_ltv(&purchase_history, &behavior_metrics)
1057 .await?;
1058
1059 let churn_risk = self
1061 .calculate_churn_risk(&behavior_metrics, &purchase_history)
1062 .await?;
1063
1064 let recommendations = self
1066 .generate_customer_recommendations(customer_id, &embedding)
1067 .await?;
1068
1069 let customer_embedding = CustomerEmbedding {
1070 customer_id: customer_id.to_string(),
1071 name,
1072 segment,
1073 purchase_history,
1074 preferences,
1075 behavior_metrics,
1076 embedding,
1077 predicted_ltv,
1078 churn_risk,
1079 recommendations,
1080 last_updated: Utc::now(),
1081 };
1082
1083 {
1085 let mut embeddings = self.customer_embeddings.write().expect("lock poisoned");
1086 embeddings.insert(customer_id.to_string(), customer_embedding.clone());
1087 }
1088
1089 info!(
1090 "Generated customer embedding for {} with LTV: ${:.2} and churn risk: {:.3}",
1091 customer_id, predicted_ltv, churn_risk
1092 );
1093 Ok(customer_embedding)
1094 }
1095
1096 pub async fn recommend_products(
1098 &self,
1099 customer_id: &str,
1100 num_recommendations: usize,
1101 ) -> Result<Vec<ProductRecommendation>> {
1102 let customer_embedding = self.generate_customer_embedding(customer_id).await?;
1103
1104 if !customer_embedding.recommendations.is_empty()
1106 && customer_embedding.last_updated > Utc::now() - chrono::Duration::hours(6)
1107 {
1108 return Ok(customer_embedding
1109 .recommendations
1110 .into_iter()
1111 .take(num_recommendations)
1112 .collect());
1113 }
1114
1115 self.generate_customer_recommendations(customer_id, &customer_embedding.embedding)
1117 .await
1118 }
1119
1120 pub async fn find_similar_employees(
1122 &self,
1123 employee_id: &str,
1124 k: usize,
1125 ) -> Result<Vec<(String, f64)>> {
1126 let target_embedding = self.generate_employee_embedding(employee_id).await?;
1127 let embeddings = {
1128 let guard = self.employee_embeddings.read().expect("lock poisoned");
1129 guard.clone()
1130 };
1131
1132 let mut similarities = Vec::new();
1133
1134 for (other_id, other_embedding) in embeddings.iter() {
1135 if other_id != employee_id {
1136 let similarity = self
1137 .calculate_employee_similarity(&target_embedding, other_embedding)
1138 .await?;
1139 similarities.push((other_id.clone(), similarity));
1140 }
1141 }
1142
1143 similarities.sort_by(|a, b| {
1145 b.1.partial_cmp(&a.1)
1146 .expect("similarity scores should be finite")
1147 });
1148 similarities.truncate(k);
1149
1150 Ok(similarities)
1151 }
1152
1153 pub async fn optimize_team_composition(
1155 &self,
1156 _project_id: &str,
1157 required_skills: &[String],
1158 ) -> Result<Vec<String>> {
1159 let employees = {
1160 let guard = self.employee_embeddings.read().expect("lock poisoned");
1161 guard.clone()
1162 };
1163 let mut candidates = Vec::new();
1164
1165 for (employee_id, employee) in employees.iter() {
1167 let skill_match_score = self
1168 .calculate_skill_match_score(&employee.skills, required_skills)
1169 .await?;
1170 candidates.push((employee_id.clone(), skill_match_score));
1171 }
1172
1173 candidates.sort_by(|a, b| {
1175 b.1.partial_cmp(&a.1)
1176 .expect("candidate scores should be finite")
1177 });
1178
1179 let optimal_team = self.select_optimal_team(candidates, 5).await?;
1181
1182 Ok(optimal_team)
1183 }
1184
1185 pub async fn analyze_market_trends(&self) -> Result<MarketAnalysis> {
1187 let products = {
1188 let guard = self.product_embeddings.read().expect("lock poisoned");
1189 guard.clone()
1190 };
1191 let customers = {
1192 let guard = self.customer_embeddings.read().expect("lock poisoned");
1193 guard.clone()
1194 };
1195
1196 let mut category_performance = HashMap::new();
1198 let mut trending_products = Vec::new();
1199
1200 for (product_id, product) in products.iter() {
1201 let performance = category_performance
1203 .entry(product.category.clone())
1204 .or_insert(CategoryPerformance {
1205 total_sales: 0.0,
1206 product_count: 0,
1207 average_rating: 0.0,
1208 growth_rate: 0.0,
1209 market_share: 0.0,
1210 });
1211
1212 performance.total_sales += product.sales_metrics.revenue;
1213 performance.product_count += 1;
1214 performance.average_rating += product.ratings.average_rating;
1215
1216 if product.sales_metrics.sales_velocity > 20.0 {
1218 trending_products.push(product_id.clone());
1219 }
1220 }
1221
1222 for performance in category_performance.values_mut() {
1224 if performance.product_count > 0 {
1225 performance.average_rating /= performance.product_count as f64;
1226 }
1227 }
1228
1229 let mut segment_analysis = HashMap::new();
1231 for customer in customers.values() {
1232 let segment_name = format!("{:?}", customer.segment);
1233 let count = segment_analysis.entry(segment_name).or_insert(0);
1234 *count += 1;
1235 }
1236
1237 Ok(MarketAnalysis {
1238 category_performance,
1239 trending_products,
1240 segment_distribution: segment_analysis,
1241 market_opportunities: self.identify_market_opportunities().await?,
1242 competitive_landscape: self.analyze_competitive_landscape().await?,
1243 forecast: self.generate_market_forecast().await?,
1244 })
1245 }
1246
1247 async fn compute_product_embedding_vector(
1250 &self,
1251 _name: &str,
1252 _description: &str,
1253 _features: &[ProductFeature],
1254 _sales_metrics: &SalesMetrics,
1255 ) -> Result<Vector> {
1256 let values = {
1258 use scirs2_core::random::{Random, Rng};
1259 let mut random = Random::default();
1260 (0..self.config.embedding_dimension)
1261 .map(|_| random.random::<f32>())
1262 .collect()
1263 };
1264 Ok(Vector::new(values))
1265 }
1266
1267 async fn find_similar_products(
1268 &self,
1269 _product_id: &str,
1270 _embedding: &Vector,
1271 ) -> Result<Vec<String>> {
1272 Ok(vec!["prod_002".to_string(), "prod_003".to_string()])
1274 }
1275
1276 async fn calculate_market_position(
1277 &self,
1278 sales_metrics: &SalesMetrics,
1279 ratings: &CustomerRatings,
1280 ) -> Result<f64> {
1281 let sales_score = (sales_metrics.sales_velocity / 100.0).min(1.0);
1283 let rating_score = ratings.average_rating / 5.0;
1284 let position = (sales_score * 0.6 + rating_score * 0.4).min(1.0);
1285 Ok(position)
1286 }
1287
1288 async fn compute_employee_embedding_vector(
1289 &self,
1290 _skills: &[Skill],
1291 _performance: &PerformanceMetrics,
1292 _projects: &[ProjectParticipation],
1293 ) -> Result<Vector> {
1294 let values = {
1296 use scirs2_core::random::{Random, Rng};
1297 let mut random = Random::default();
1298 (0..self.config.embedding_dimension)
1299 .map(|_| random.random::<f32>())
1300 .collect()
1301 };
1302 Ok(Vector::new(values))
1303 }
1304
1305 async fn predict_career_progression(
1306 &self,
1307 skills: &[Skill],
1308 performance: &PerformanceMetrics,
1309 _experience_level: &ExperienceLevel,
1310 ) -> Result<CareerPredictions> {
1311 let performance_factor = performance.overall_score / 10.0;
1313 let skill_factor = skills
1314 .iter()
1315 .map(|s| s.proficiency_level as f64 / 10.0)
1316 .sum::<f64>()
1317 / skills.len() as f64;
1318 let promotion_likelihood = (performance_factor * 0.7 + skill_factor * 0.3).min(1.0);
1319
1320 Ok(CareerPredictions {
1321 promotion_likelihood,
1322 next_role: "Senior Software Engineer".to_string(),
1323 skills_to_develop: vec!["Team Leadership".to_string(), "System Design".to_string()],
1324 career_paths: vec![
1325 "Technical Lead".to_string(),
1326 "Engineering Manager".to_string(),
1327 ],
1328 retention_risk: 1.0 - promotion_likelihood * 0.8,
1329 })
1330 }
1331
1332 async fn compute_customer_embedding_vector(
1333 &self,
1334 _purchases: &[Purchase],
1335 _preferences: &CustomerPreferences,
1336 _behavior: &BehaviorMetrics,
1337 ) -> Result<Vector> {
1338 let values = {
1340 use scirs2_core::random::{Random, Rng};
1341 let mut random = Random::default();
1342 (0..self.config.embedding_dimension)
1343 .map(|_| random.random::<f32>())
1344 .collect()
1345 };
1346 Ok(Vector::new(values))
1347 }
1348
1349 async fn predict_customer_ltv(
1350 &self,
1351 purchases: &[Purchase],
1352 behavior: &BehaviorMetrics,
1353 ) -> Result<f64> {
1354 if purchases.is_empty() {
1355 return Ok(0.0);
1356 }
1357
1358 let total_spent: f64 = purchases.iter().map(|p| p.price * p.quantity as f64).sum();
1360 let avg_purchase = total_spent / purchases.len() as f64;
1361 let frequency_factor = behavior.visit_frequency;
1362 let ltv = avg_purchase * frequency_factor * 12.0; Ok(ltv)
1365 }
1366
1367 async fn calculate_churn_risk(
1368 &self,
1369 behavior: &BehaviorMetrics,
1370 purchases: &[Purchase],
1371 ) -> Result<f64> {
1372 let recency_factor = if let Some(last_purchase) = purchases.last() {
1374 let days_since_last = (Utc::now() - last_purchase.purchase_date).num_days() as f64;
1375 (days_since_last / 90.0).min(1.0) } else {
1377 1.0
1378 };
1379
1380 let engagement_factor = 1.0 - (behavior.visit_frequency / 10.0).min(1.0);
1381 let abandonment_factor = behavior.cart_abandonment_rate;
1382
1383 let churn_risk =
1384 (recency_factor * 0.4 + engagement_factor * 0.3 + abandonment_factor * 0.3).min(1.0);
1385 Ok(churn_risk)
1386 }
1387
1388 async fn generate_customer_recommendations(
1389 &self,
1390 _customer_id: &str,
1391 _embedding: &Vector,
1392 ) -> Result<Vec<ProductRecommendation>> {
1393 Ok(vec![
1395 ProductRecommendation {
1396 product_id: "prod_101".to_string(),
1397 score: 0.95,
1398 reason: RecommendationReason::SimilarProducts,
1399 confidence: 0.85,
1400 expected_revenue: 199.99,
1401 },
1402 ProductRecommendation {
1403 product_id: "prod_102".to_string(),
1404 score: 0.88,
1405 reason: RecommendationReason::CustomersBought,
1406 confidence: 0.78,
1407 expected_revenue: 149.99,
1408 },
1409 ])
1410 }
1411
1412 async fn calculate_employee_similarity(
1413 &self,
1414 emp1: &EmployeeEmbedding,
1415 emp2: &EmployeeEmbedding,
1416 ) -> Result<f64> {
1417 let embedding1 = &emp1.embedding.values;
1419 let embedding2 = &emp2.embedding.values;
1420
1421 let dot_product: f32 = embedding1
1422 .iter()
1423 .zip(embedding2.iter())
1424 .map(|(a, b)| a * b)
1425 .sum();
1426 let norm1: f32 = embedding1.iter().map(|x| x * x).sum::<f32>().sqrt();
1427 let norm2: f32 = embedding2.iter().map(|x| x * x).sum::<f32>().sqrt();
1428
1429 let cosine_similarity = if norm1 > 0.0 && norm2 > 0.0 {
1430 dot_product / (norm1 * norm2)
1431 } else {
1432 0.0
1433 };
1434
1435 let skill_similarity = self
1437 .calculate_skill_similarity(&emp1.skills, &emp2.skills)
1438 .await?;
1439
1440 let final_similarity = 0.6 * cosine_similarity as f64 + 0.4 * skill_similarity;
1442
1443 Ok(final_similarity)
1444 }
1445
1446 async fn calculate_skill_similarity(
1447 &self,
1448 skills1: &[Skill],
1449 skills2: &[Skill],
1450 ) -> Result<f64> {
1451 let skill_set1: HashSet<_> = skills1.iter().map(|s| &s.skill_name).collect();
1452 let skill_set2: HashSet<_> = skills2.iter().map(|s| &s.skill_name).collect();
1453
1454 let intersection = skill_set1.intersection(&skill_set2).count();
1455 let union = skill_set1.union(&skill_set2).count();
1456
1457 if union > 0 {
1458 Ok(intersection as f64 / union as f64)
1459 } else {
1460 Ok(0.0)
1461 }
1462 }
1463
1464 async fn calculate_skill_match_score(
1465 &self,
1466 employee_skills: &[Skill],
1467 required_skills: &[String],
1468 ) -> Result<f64> {
1469 let employee_skill_names: HashSet<_> =
1470 employee_skills.iter().map(|s| &s.skill_name).collect();
1471 let required_skill_set: HashSet<_> = required_skills.iter().collect();
1472
1473 let matches = required_skill_set
1474 .intersection(&employee_skill_names)
1475 .count();
1476 let score = matches as f64 / required_skills.len() as f64;
1477
1478 Ok(score)
1479 }
1480
1481 async fn select_optimal_team(
1482 &self,
1483 _candidates: Vec<(String, f64)>,
1484 team_size: usize,
1485 ) -> Result<Vec<String>> {
1486 let team: Vec<String> = _candidates
1488 .into_iter()
1489 .take(team_size)
1490 .map(|(id, _score)| id)
1491 .collect();
1492
1493 Ok(team)
1494 }
1495
1496 async fn identify_market_opportunities(&self) -> Result<Vec<String>> {
1497 Ok(vec![
1499 "AI-powered fitness devices".to_string(),
1500 "Sustainable electronics".to_string(),
1501 "Remote work solutions".to_string(),
1502 ])
1503 }
1504
1505 async fn analyze_competitive_landscape(&self) -> Result<HashMap<String, f64>> {
1506 let mut landscape = HashMap::new();
1508 landscape.insert("TechCorp".to_string(), 0.35);
1509 landscape.insert("InnovateCo".to_string(), 0.28);
1510 landscape.insert("FutureTech".to_string(), 0.22);
1511 landscape.insert("Others".to_string(), 0.15);
1512
1513 Ok(landscape)
1514 }
1515
1516 async fn generate_market_forecast(&self) -> Result<HashMap<String, f64>> {
1517 let mut forecast = HashMap::new();
1519 forecast.insert("Q1_growth".to_string(), 0.12);
1520 forecast.insert("Q2_growth".to_string(), 0.15);
1521 forecast.insert("Q3_growth".to_string(), 0.18);
1522 forecast.insert("Q4_growth".to_string(), 0.10);
1523
1524 Ok(forecast)
1525 }
1526
1527 async fn start_recommendation_engine(&self) -> JoinHandle<()> {
1530 let interval =
1531 std::time::Duration::from_secs(self.config.product_recommendation_refresh_hours * 3600);
1532
1533 tokio::spawn(async move {
1534 let mut interval_timer = tokio::time::interval(interval);
1535
1536 loop {
1537 interval_timer.tick().await;
1538
1539 info!("Refreshing product recommendation engines");
1541
1542 debug!("Product recommendation engines refreshed");
1546 }
1547 })
1548 }
1549
1550 async fn start_skill_analysis(&self) -> JoinHandle<()> {
1551 let interval =
1552 std::time::Duration::from_secs(self.config.skill_analysis_interval_hours * 3600);
1553
1554 tokio::spawn(async move {
1555 let mut interval_timer = tokio::time::interval(interval);
1556
1557 loop {
1558 interval_timer.tick().await;
1559
1560 info!("Performing employee skill analysis");
1562
1563 debug!("Employee skill analysis completed");
1566 }
1567 })
1568 }
1569
1570 async fn start_market_analysis(&self) -> JoinHandle<()> {
1571 let interval =
1572 std::time::Duration::from_secs(self.config.market_analysis_interval_hours * 3600);
1573
1574 tokio::spawn(async move {
1575 let mut interval_timer = tokio::time::interval(interval);
1576
1577 loop {
1578 interval_timer.tick().await;
1579
1580 info!("Performing market trend analysis");
1582
1583 debug!("Market trend analysis completed");
1586 }
1587 })
1588 }
1589
1590 async fn start_organizational_optimization(&self) -> JoinHandle<()> {
1591 let interval = std::time::Duration::from_secs(24 * 3600); tokio::spawn(async move {
1594 let mut interval_timer = tokio::time::interval(interval);
1595
1596 loop {
1597 interval_timer.tick().await;
1598
1599 info!("Performing organizational optimization");
1601
1602 debug!("Organizational optimization completed");
1605 }
1606 })
1607 }
1608}
1609
1610#[derive(Debug, Clone, Serialize, Deserialize)]
1612pub struct MarketAnalysis {
1613 pub category_performance: HashMap<String, CategoryPerformance>,
1615 pub trending_products: Vec<String>,
1617 pub segment_distribution: HashMap<String, u32>,
1619 pub market_opportunities: Vec<String>,
1621 pub competitive_landscape: HashMap<String, f64>,
1623 pub forecast: HashMap<String, f64>,
1625}
1626
1627#[derive(Debug, Clone, Serialize, Deserialize)]
1629pub struct EnterpriseMetrics {
1630 pub total_products: usize,
1632 pub total_employees: usize,
1634 pub total_customers: usize,
1636 pub total_revenue: f64,
1638 pub avg_customer_satisfaction: f64,
1640 pub employee_engagement: f64,
1642 pub organizational_efficiency: f64,
1644 pub innovation_index: f64,
1646 pub top_products: Vec<String>,
1648 pub top_employees: Vec<String>,
1650 pub high_value_customers: Vec<String>,
1652}
1653
1654impl EnterpriseKnowledgeAnalyzer {
1655 pub async fn get_enterprise_metrics(&self) -> Result<EnterpriseMetrics> {
1657 let product_embeddings = self.product_embeddings.read().expect("lock poisoned");
1658 let employee_embeddings = self.employee_embeddings.read().expect("lock poisoned");
1659 let customer_embeddings = self.customer_embeddings.read().expect("lock poisoned");
1660
1661 let total_products = product_embeddings.len();
1662 let total_employees = employee_embeddings.len();
1663 let total_customers = customer_embeddings.len();
1664
1665 let total_revenue = product_embeddings
1667 .values()
1668 .map(|p| p.sales_metrics.revenue)
1669 .sum();
1670
1671 let avg_customer_satisfaction = product_embeddings
1673 .values()
1674 .map(|p| p.ratings.average_rating)
1675 .sum::<f64>()
1676 / total_products.max(1) as f64;
1677
1678 let employee_engagement = employee_embeddings
1680 .values()
1681 .map(|e| e.performance_metrics.overall_score)
1682 .sum::<f64>()
1683 / total_employees.max(1) as f64;
1684
1685 let mut product_scores: Vec<_> = product_embeddings
1687 .iter()
1688 .map(|(id, p)| (id.clone(), p.market_position))
1689 .collect();
1690 product_scores.sort_by(|a, b| {
1691 b.1.partial_cmp(&a.1)
1692 .expect("product scores should be finite")
1693 });
1694 let top_products: Vec<String> = product_scores
1695 .into_iter()
1696 .take(10)
1697 .map(|(id, _)| id)
1698 .collect();
1699
1700 let mut employee_scores: Vec<_> = employee_embeddings
1701 .iter()
1702 .map(|(id, e)| (id.clone(), e.performance_metrics.overall_score))
1703 .collect();
1704 employee_scores.sort_by(|a, b| {
1705 b.1.partial_cmp(&a.1)
1706 .expect("employee scores should be finite")
1707 });
1708 let top_employees: Vec<String> = employee_scores
1709 .into_iter()
1710 .take(10)
1711 .map(|(id, _)| id)
1712 .collect();
1713
1714 let mut customer_values: Vec<_> = customer_embeddings
1715 .iter()
1716 .map(|(id, c)| (id.clone(), c.predicted_ltv))
1717 .collect();
1718 customer_values.sort_by(|a, b| {
1719 b.1.partial_cmp(&a.1)
1720 .expect("customer values should be finite")
1721 });
1722 let high_value_customers: Vec<String> = customer_values
1723 .into_iter()
1724 .take(10)
1725 .map(|(id, _)| id)
1726 .collect();
1727
1728 Ok(EnterpriseMetrics {
1729 total_products,
1730 total_employees,
1731 total_customers,
1732 total_revenue,
1733 avg_customer_satisfaction,
1734 employee_engagement,
1735 organizational_efficiency: 0.75, innovation_index: 0.68, top_products,
1738 top_employees,
1739 high_value_customers,
1740 })
1741 }
1742}
1743
1744#[cfg(test)]
1745mod tests {
1746 use super::*;
1747
1748 #[tokio::test]
1749 async fn test_enterprise_analyzer_creation() {
1750 let config = EnterpriseConfig::default();
1751 let analyzer = EnterpriseKnowledgeAnalyzer::new(config);
1752
1753 assert_eq!(analyzer.product_embeddings.read().unwrap().len(), 0);
1755 assert_eq!(analyzer.employee_embeddings.read().unwrap().len(), 0);
1756 assert_eq!(analyzer.customer_embeddings.read().unwrap().len(), 0);
1757 }
1758
1759 #[tokio::test]
1760 async fn test_product_embedding_generation() {
1761 let config = EnterpriseConfig::default();
1762 let analyzer = EnterpriseKnowledgeAnalyzer::new(config);
1763
1764 let result = analyzer.generate_product_embedding("test_product").await;
1765 assert!(result.is_ok());
1766
1767 let embedding = result.unwrap();
1768 assert_eq!(embedding.product_id, "test_product");
1769 assert!(embedding.market_position >= 0.0);
1770 assert!(embedding.market_position <= 1.0);
1771 assert_eq!(embedding.embedding.values.len(), 256); }
1773
1774 #[tokio::test]
1775 async fn test_employee_embedding_generation() {
1776 let config = EnterpriseConfig::default();
1777 let analyzer = EnterpriseKnowledgeAnalyzer::new(config);
1778
1779 let result = analyzer.generate_employee_embedding("test_employee").await;
1780 assert!(result.is_ok());
1781
1782 let embedding = result.unwrap();
1783 assert_eq!(embedding.employee_id, "test_employee");
1784 assert!(embedding.career_predictions.promotion_likelihood >= 0.0);
1785 assert!(embedding.career_predictions.promotion_likelihood <= 1.0);
1786 }
1787
1788 #[tokio::test]
1789 async fn test_customer_embedding_generation() {
1790 let config = EnterpriseConfig::default();
1791 let analyzer = EnterpriseKnowledgeAnalyzer::new(config);
1792
1793 let result = analyzer.generate_customer_embedding("test_customer").await;
1794 assert!(result.is_ok());
1795
1796 let embedding = result.unwrap();
1797 assert_eq!(embedding.customer_id, "test_customer");
1798 assert!(embedding.predicted_ltv >= 0.0);
1799 assert!(embedding.churn_risk >= 0.0);
1800 assert!(embedding.churn_risk <= 1.0);
1801 }
1802
1803 #[tokio::test]
1804 async fn test_product_recommendations() {
1805 let config = EnterpriseConfig::default();
1806 let analyzer = EnterpriseKnowledgeAnalyzer::new(config);
1807
1808 let _customer = analyzer
1810 .generate_customer_embedding("test_customer")
1811 .await
1812 .unwrap();
1813
1814 let recommendations = analyzer.recommend_products("test_customer", 5).await;
1815 assert!(recommendations.is_ok());
1816
1817 let recs = recommendations.unwrap();
1818 assert!(!recs.is_empty());
1819 assert!(recs.len() <= 5);
1820
1821 for rec in &recs {
1822 assert!(rec.score >= 0.0);
1823 assert!(rec.score <= 1.0);
1824 assert!(rec.confidence >= 0.0);
1825 assert!(rec.confidence <= 1.0);
1826 }
1827 }
1828
1829 #[tokio::test]
1830 async fn test_market_analysis() {
1831 let config = EnterpriseConfig::default();
1832 let analyzer = EnterpriseKnowledgeAnalyzer::new(config);
1833
1834 let _product = analyzer
1836 .generate_product_embedding("test_product")
1837 .await
1838 .unwrap();
1839 let _customer = analyzer
1840 .generate_customer_embedding("test_customer")
1841 .await
1842 .unwrap();
1843
1844 let analysis = analyzer.analyze_market_trends().await;
1845 assert!(analysis.is_ok());
1846
1847 let market_analysis = analysis.unwrap();
1848 assert!(!market_analysis.competitive_landscape.is_empty());
1849 assert!(!market_analysis.forecast.is_empty());
1850 }
1851
1852 #[tokio::test]
1853 async fn test_enterprise_metrics() {
1854 let config = EnterpriseConfig::default();
1855 let analyzer = EnterpriseKnowledgeAnalyzer::new(config);
1856
1857 let _product = analyzer
1859 .generate_product_embedding("test_product")
1860 .await
1861 .unwrap();
1862 let _employee = analyzer
1863 .generate_employee_embedding("test_employee")
1864 .await
1865 .unwrap();
1866 let _customer = analyzer
1867 .generate_customer_embedding("test_customer")
1868 .await
1869 .unwrap();
1870
1871 let metrics = analyzer.get_enterprise_metrics().await;
1872 assert!(metrics.is_ok());
1873
1874 let enterprise_metrics = metrics.unwrap();
1875 assert_eq!(enterprise_metrics.total_products, 1);
1876 assert_eq!(enterprise_metrics.total_employees, 1);
1877 assert_eq!(enterprise_metrics.total_customers, 1);
1878 assert!(enterprise_metrics.total_revenue >= 0.0);
1879 }
1880}