1use wasm_bindgen::prelude::*;
9use serde::{Deserialize, Serialize};
10use std::collections::HashMap;
11use std::sync::{Arc, Mutex};
14use lazy_static::lazy_static;
15use crate::optimized_features::{OptimizedFeatureExtractor, OptimizedPatternMatcher};
16
17pub mod enhanced_router;
18pub mod optimized_features;
19
20#[derive(Debug, Clone)]
22pub struct MockNeuralNetwork {
23 layers: Vec<u32>,
24 activation_func: String,
25 learning_rate: f32,
26 weights: Vec<Vec<f32>>,
27 error: f32,
28}
29
30impl MockNeuralNetwork {
31 pub fn new(layers: &[u32], activation_func: &str, learning_rate: f32) -> Self {
32 let mut weights = Vec::new();
33
34 for i in 0..layers.len().saturating_sub(1) {
36 let layer_weights = (0..(layers[i] * layers[i + 1]))
37 .map(|j| ((j as f32 * 0.1) % 1.0) - 0.5)
38 .collect();
39 weights.push(layer_weights);
40 }
41
42 Self {
43 layers: layers.to_vec(),
44 activation_func: activation_func.to_string(),
45 learning_rate,
46 weights,
47 error: 1.0,
48 }
49 }
50
51 pub fn run(&self, input: &[f32]) -> Result<Vec<f32>, Box<dyn std::error::Error>> {
52 if input.len() != self.layers[0] as usize {
53 return Err(format!("Input size {} doesn't match expected {}", input.len(), self.layers[0]).into());
54 }
55
56 let mut current = input.to_vec();
57
58 for i in 0..self.weights.len() {
60 let next_size = self.layers[i + 1] as usize;
61 let mut next_layer = vec![0.0; next_size];
62
63 for j in 0..next_size {
64 let mut sum = 0.0;
65 for k in 0..current.len() {
66 let weight_idx = j * current.len() + k;
67 if weight_idx < self.weights[i].len() {
68 sum += current[k] * self.weights[i][weight_idx];
69 }
70 }
71
72 next_layer[j] = match self.activation_func.as_str() {
74 "SigmoidSymmetric" => (sum.exp() - (-sum).exp()) / (sum.exp() + (-sum).exp()),
75 "ReLU" => sum.max(0.0),
76 "Linear" => sum,
77 _ => sum,
78 };
79 }
80
81 current = next_layer;
82 }
83
84 Ok(current)
85 }
86
87 pub fn train(&mut self, input: &[f32], target: &[f32]) {
88 if let Ok(output) = self.run(input) {
90 let mut total_error = 0.0;
91 for i in 0..output.len().min(target.len()) {
92 total_error += (output[i] - target[i]).powi(2);
93 }
94 self.error = (total_error / output.len() as f32).sqrt();
95
96 for layer_weights in &mut self.weights {
98 for weight in layer_weights.iter_mut().take(10) { *weight += (fastrand::f32() - 0.5) * self.learning_rate * 0.1;
100 }
101 }
102 }
103 }
104
105 pub fn get_error(&self) -> f32 {
106 self.error
107 }
108}
109
110pub const VERSION: &str = env!("CARGO_PKG_VERSION");
112
113#[derive(Debug, Clone, Serialize, Deserialize)]
115pub struct NeuralConfig {
116 pub input_size: u32,
117 pub hidden_layers: Vec<u32>,
118 pub output_size: u32,
119 pub activation_func: String, pub learning_rate: f32,
121}
122
123#[derive(Debug, Clone, Serialize, Deserialize)]
125pub struct NeuralWeights {
126 pub weights: Vec<f32>,
127 pub biases: Vec<f32>,
128 pub layer_sizes: Vec<u32>,
129}
130
131#[derive(Debug, Clone)]
133pub struct TokenEmbedding {
134 pub tokens: Vec<String>,
135 pub embeddings: Vec<Vec<f32>>,
136 pub vocab_size: usize,
137 pub embedding_dim: usize,
138}
139
140impl TokenEmbedding {
141 pub fn new() -> Self {
142 let tokens = vec![
144 "the".to_string(), "and".to_string(), "or".to_string(), "but".to_string(),
145 "if".to_string(), "then".to_string(), "else".to_string(), "when".to_string(),
146 "how".to_string(), "what".to_string(), "why".to_string(), "where".to_string(),
147 "function".to_string(), "class".to_string(), "method".to_string(), "variable".to_string(),
148 "calculate".to_string(), "solve".to_string(), "analyze".to_string(), "explain".to_string(),
149 "code".to_string(), "program".to_string(), "algorithm".to_string(), "data".to_string(),
150 "neural".to_string(), "network".to_string(), "ai".to_string(), "machine".to_string(),
151 "learning".to_string(), "intelligence".to_string(), "reasoning".to_string(), "logic".to_string(),
152 ];
153
154 let embedding_dim = 32;
155 let mut embeddings = Vec::new();
156
157 for (i, _) in tokens.iter().enumerate() {
159 let mut embedding = Vec::new();
160 for j in 0..embedding_dim {
161 let val = ((i * 37 + j * 17) as f32 / 1000.0).sin() * 0.5;
163 embedding.push(val);
164 }
165 embeddings.push(embedding);
166 }
167
168 TokenEmbedding {
169 vocab_size: tokens.len(),
170 embedding_dim,
171 tokens,
172 embeddings,
173 }
174 }
175}
176
177lazy_static! {
178 static ref GLOBAL_VOCAB: Arc<Mutex<TokenEmbedding>> = Arc::new(Mutex::new(
180 TokenEmbedding::new()
181 ));
182
183 static ref DOMAIN_WEIGHTS: Arc<Mutex<HashMap<ExpertDomain, NeuralWeights>>> = Arc::new(Mutex::new(
185 HashMap::new()
186 ));
187}
188
189#[wasm_bindgen]
191#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
192pub enum ExpertDomain {
193 Reasoning = 0,
194 Coding = 1,
195 Language = 2,
196 Mathematics = 3,
197 ToolUse = 4,
198 Context = 5,
199}
200
201impl ExpertDomain {
202 pub fn neural_config(&self) -> NeuralConfig {
204 match self {
205 ExpertDomain::Reasoning => NeuralConfig {
206 input_size: 128,
207 hidden_layers: vec![64, 32],
208 output_size: 32,
209 activation_func: "SigmoidSymmetric".to_string(),
210 learning_rate: 0.001,
211 },
212 ExpertDomain::Coding => NeuralConfig {
213 input_size: 192,
214 hidden_layers: vec![96, 48],
215 output_size: 48,
216 activation_func: "ReLU".to_string(),
217 learning_rate: 0.002,
218 },
219 ExpertDomain::Language => NeuralConfig {
220 input_size: 256,
221 hidden_layers: vec![128, 64],
222 output_size: 64,
223 activation_func: "SigmoidSymmetric".to_string(),
224 learning_rate: 0.0015,
225 },
226 ExpertDomain::Mathematics => NeuralConfig {
227 input_size: 96,
228 hidden_layers: vec![48, 24],
229 output_size: 24,
230 activation_func: "Linear".to_string(),
231 learning_rate: 0.001,
232 },
233 ExpertDomain::ToolUse => NeuralConfig {
234 input_size: 64,
235 hidden_layers: vec![32, 16],
236 output_size: 16,
237 activation_func: "ReLU".to_string(),
238 learning_rate: 0.003,
239 },
240 ExpertDomain::Context => NeuralConfig {
241 input_size: 160,
242 hidden_layers: vec![80, 40],
243 output_size: 40,
244 activation_func: "SigmoidSymmetric".to_string(),
245 learning_rate: 0.0012,
246 },
247 }
248 }
249
250 pub fn domain_patterns(&self) -> Vec<&'static str> {
252 match self {
253 ExpertDomain::Reasoning => vec![
254 "analyze", "logic", "reason", "because", "therefore", "conclude", "infer", "deduce",
255 "argue", "evidence", "premise", "hypothesis", "theory", "proof", "justify",
256 "meaning", "life", "consciousness", "free", "will", "reality", "existence", "philosophy",
257 "purpose", "truth", "knowledge", "mind", "thought", "belief", "ethics", "moral"
258 ],
259 ExpertDomain::Coding => vec![
260 "function", "class", "variable", "loop", "if", "else", "return", "import", "def",
261 "code", "program", "algorithm", "debug", "compile", "syntax", "python", "javascript",
262 "implement", "array", "recursion", "fibonacci", "sort", "search", "binary", "linked",
263 "design", "architecture", "model", "cnn", "conv2d", "classification"
264 ],
265 ExpertDomain::Language => vec![
266 "translate", "grammar", "sentence", "word", "language", "text", "synonym",
267 "linguistic", "read", "speak", "communication", "literature", "poetry", "prose"
268 ],
269 ExpertDomain::Mathematics => vec![
270 "calculate", "equation", "solve", "integral", "derivative", "algebra", "geometry",
271 "statistics", "probability", "matrix", "vector", "theorem", "proof", "formula",
272 "add", "subtract", "multiply", "divide", "sum", "difference", "product", "quotient",
273 "plus", "minus", "times", "divided", "number", "arithmetic", "computation", "math"
274 ],
275 ExpertDomain::ToolUse => vec![
276 "tool", "api", "function", "call", "execute", "run", "command", "action",
277 "operation", "method", "procedure", "workflow", "automation", "script"
278 ],
279 ExpertDomain::Context => vec![
280 "context", "background", "history", "previous", "remember", "relate", "connect",
281 "reference", "mention", "discuss", "topic", "subject", "theme", "continuation"
282 ],
283 }
284 }
285}
286
287#[derive(Debug, Clone, Serialize, Deserialize)]
289pub struct ExpertConfig {
290 pub domain: ExpertDomain,
291 pub parameter_count: usize,
292 pub learning_rate: f32,
293 pub neural_config: Option<NeuralConfig>,
294}
295
296#[wasm_bindgen]
298pub struct MicroExpert {
299 domain: ExpertDomain,
300 #[allow(dead_code)]
301 config: ExpertConfig,
302 network: Option<MockNeuralNetwork>, #[allow(dead_code)]
304 weights: Option<NeuralWeights>,
305 neural_config: NeuralConfig,
306 training_iterations: u32,
307}
308
309#[wasm_bindgen]
310impl MicroExpert {
311 #[wasm_bindgen(constructor)]
313 pub fn new(domain: ExpertDomain) -> MicroExpert {
314 let neural_config = domain.neural_config();
315 let config = ExpertConfig {
316 domain,
317 parameter_count: neural_config.hidden_layers.iter().sum::<u32>() as usize,
318 learning_rate: neural_config.learning_rate,
319 neural_config: Some(neural_config.clone()),
320 };
321
322 let mut expert = MicroExpert {
323 domain,
324 config,
325 network: None,
326 weights: None,
327 neural_config,
328 training_iterations: 0,
329 };
330
331 if let Err(e) = expert.initialize_network() {
333 web_sys::console::warn_1(&format!("Failed to initialize network: {}", e).into());
334 }
335
336 expert
337 }
338
339 #[wasm_bindgen]
341 pub fn process(&self, input: &str) -> String {
342 match self.neural_inference(input) {
343 Ok(result) => result,
344 Err(_) => {
345 self.enhanced_pattern_processing(input)
347 }
348 }
349 }
350}
351
352impl MicroExpert {
353 fn initialize_network(&mut self) -> Result<(), Box<dyn std::error::Error>> {
355 let mut layers = vec![self.neural_config.input_size];
357 layers.extend(&self.neural_config.hidden_layers);
358 layers.push(self.neural_config.output_size);
359
360 let mut network = MockNeuralNetwork::new(&layers, &self.neural_config.activation_func, self.neural_config.learning_rate);
362
363 self.train_domain_network(&mut network)?;
365
366 self.network = Some(network);
367 Ok(())
368 }
369
370 fn train_domain_network(&mut self, network: &mut MockNeuralNetwork) -> Result<(), Box<dyn std::error::Error>> {
372 let domain_patterns = self.domain.domain_patterns();
373 let mut training_inputs = Vec::new();
374 let mut training_outputs = Vec::new();
375
376 for (i, pattern) in domain_patterns.iter().enumerate().take(8) {
378 let input_vector = self.text_to_vector_basic(pattern)?;
379 let mut output_vector = vec![0.0; self.neural_config.output_size as usize];
380
381 for j in 0..output_vector.len() {
383 output_vector[j] = if j % (i + 1) == 0 { 1.0 } else { 0.0 };
384 }
385
386 training_inputs.push(input_vector);
387 training_outputs.push(output_vector);
388 }
389
390 for epoch in 0..25 {
392 for (input, output) in training_inputs.iter().zip(training_outputs.iter()) {
393 network.train(input, output);
394 }
395
396 if epoch % 5 == 0 {
398 let error = network.get_error();
399 if error < 0.01 {
400 break;
401 }
402 }
403 }
404
405 self.training_iterations += 25;
406 Ok(())
407 }
408
409 fn neural_inference(&self, input: &str) -> Result<String, Box<dyn std::error::Error>> {
411 let network = self.network.as_ref()
412 .ok_or("Neural network not initialized")?;
413
414 let input_vector = self.text_to_vector_basic(input)?;
416
417 let output = network.run(&input_vector)?;
419
420 let response = self.vector_to_response(&output, input)?;
422
423 Ok(response)
424 }
425
426 fn text_to_vector_basic(&self, text: &str) -> Result<Vec<f32>, Box<dyn std::error::Error>> {
428 let mut extractor = OptimizedFeatureExtractor::new(self.domain, self.neural_config.input_size as usize);
430 let optimized_features = extractor.extract_features(text);
431
432 let mut input_vector = vec![0.0; self.neural_config.input_size as usize];
434 let copy_len = optimized_features.len().min(input_vector.len());
435 input_vector[..copy_len].copy_from_slice(&optimized_features[..copy_len]);
436
437 Ok(input_vector)
438 }
439
440 fn vector_to_response(&self, output: &[f32], input: &str) -> Result<String, Box<dyn std::error::Error>> {
442 let confidence = output.iter().map(|&x| x.abs()).sum::<f32>() / output.len() as f32;
444 let max_val = output.iter().fold(f32::NEG_INFINITY, |a, &b| a.max(b));
445 let variance = output.iter().map(|&x| (x - confidence).powi(2)).sum::<f32>() / output.len() as f32;
446
447 let dominant_indices: Vec<usize> = output.iter()
449 .enumerate()
450 .filter(|(_, &val)| val > max_val * 0.6)
451 .map(|(i, _)| i)
452 .collect();
453
454 let response = match self.domain {
456 ExpertDomain::Reasoning => {
457 self.generate_reasoning_response(input, confidence, &dominant_indices)
458 },
459 ExpertDomain::Coding => {
460 self.generate_coding_response(input, confidence, &dominant_indices)
461 },
462 ExpertDomain::Language => {
463 self.generate_language_response(input, confidence, &dominant_indices)
464 },
465 ExpertDomain::Mathematics => {
466 self.generate_math_response(input, confidence, &dominant_indices)
467 },
468 ExpertDomain::ToolUse => {
469 self.generate_tool_response(input, confidence, &dominant_indices)
470 },
471 ExpertDomain::Context => {
472 self.generate_context_response(input, confidence, &dominant_indices)
473 },
474 };
475
476 let metadata = format!(" [Neural: conf={:.3}, patterns={}, var={:.3}]",
478 confidence, dominant_indices.len(), variance);
479
480 Ok(format!("{}{}", response, metadata))
481 }
482
483 fn generate_reasoning_response(&self, input: &str, _confidence: f32, _patterns: &[usize]) -> String {
485 let query_lower = input.to_lowercase();
486
487 if query_lower.contains("machine learning") || (query_lower.contains("what is") && query_lower.contains("ml")) {
489 "Machine Learning (ML) is a subset of artificial intelligence that enables computer systems to learn and improve from experience without being explicitly programmed. ML algorithms build mathematical models based on training data to make predictions or decisions. The three main types are:\n\n1. **Supervised Learning**: Learning from labeled data (e.g., classification, regression)\n2. **Unsupervised Learning**: Finding patterns in unlabeled data (e.g., clustering, dimensionality reduction)\n3. **Reinforcement Learning**: Learning through interaction with an environment using rewards and penalties\n\nCommon applications include image recognition, natural language processing, recommendation systems, and predictive analytics.".to_string()
490 } else if query_lower.contains("deep learning") {
491 "Deep Learning is a subset of machine learning based on artificial neural networks with multiple layers (hence 'deep'). These networks can learn hierarchical representations of data, automatically discovering features at different levels of abstraction. Key concepts include:\n\n• **Neural Networks**: Interconnected layers of artificial neurons\n• **Backpropagation**: Algorithm for training by adjusting weights\n• **CNNs**: Convolutional Neural Networks for image processing\n• **RNNs/LSTMs**: For sequential data like text or time series\n• **Transformers**: Architecture behind models like GPT and BERT\n\nDeep learning powers many modern AI applications including computer vision, speech recognition, and language models.".to_string()
492 } else if query_lower.contains("neural network") && !query_lower.contains("deep learning") {
493 "A Neural Network is a computing system inspired by biological neural networks in the brain. It consists of:\n\n• **Input Layer**: Receives raw data\n• **Hidden Layers**: Process and transform information\n• **Output Layer**: Produces final predictions\n• **Neurons/Nodes**: Basic units that apply activation functions\n• **Weights**: Connection strengths between neurons\n• **Bias**: Offset values for each neuron\n\nNeural networks learn by adjusting weights through training algorithms like gradient descent, enabling them to recognize patterns and make predictions from data.".to_string()
494 } else if (query_lower.contains(" ai ") || query_lower.contains("ai ") || query_lower.contains(" ai") || query_lower == "ai" || query_lower.contains("what is ai") || query_lower.contains("artificial intelligence")) && !query_lower.contains("explain") {
495 "AI (Artificial Intelligence) refers to computer systems that can perform tasks typically requiring human intelligence, such as learning, reasoning, problem-solving, and understanding language. AI systems use algorithms and data to make decisions and predictions. Modern AI includes:\n\n• **Machine Learning**: Systems that learn from data\n• **Natural Language Processing**: Understanding human language\n• **Computer Vision**: Interpreting visual information\n• **Robotics**: Physical embodiment of AI systems\n• **Expert Systems**: Rule-based decision making".to_string()
496 } else if query_lower.contains("algorithm") && (query_lower.contains("what") || query_lower.contains("explain")) {
497 "An algorithm is a step-by-step procedure or set of rules for solving a problem or completing a task. In computer science, algorithms are fundamental building blocks that:\n\n• Define precise instructions for computation\n• Transform input data into desired output\n• Have measurable time and space complexity\n• Can be expressed in pseudocode or programming languages\n\nCommon algorithm categories include sorting (quicksort, mergesort), searching (binary search), graph algorithms (Dijkstra's, A*), and dynamic programming. Algorithm efficiency is measured using Big O notation.".to_string()
498 } else if query_lower.contains("data structure") {
499 "Data structures are specialized formats for organizing, storing, and accessing data efficiently. Common data structures include:\n\n**Linear Structures:**\n• Arrays: Fixed-size sequential storage\n• Linked Lists: Dynamic size with node connections\n• Stacks: LIFO (Last In, First Out)\n• Queues: FIFO (First In, First Out)\n\n**Non-Linear Structures:**\n• Trees: Hierarchical data (Binary Trees, BST, AVL)\n• Graphs: Networks of nodes and edges\n• Hash Tables: Key-value pairs with O(1) average access\n• Heaps: Priority-based complete binary trees\n\nChoosing the right data structure is crucial for algorithm efficiency.".to_string()
500 } else if query_lower.contains("purpose") || query_lower.contains("what are you") {
501 "I'm Kimi, a neural inference engine designed to help answer questions across multiple domains including reasoning, coding, mathematics, language, and more. I use specialized expert networks to provide intelligent responses.".to_string()
502 } else if query_lower.contains("how") && query_lower.contains("work") {
503 "I work by routing your questions to specialized neural expert networks. Each expert is trained for specific domains like coding, math, or reasoning. The system analyzes your question and selects the most appropriate expert to provide a response.".to_string()
504 } else if query_lower.contains("hello") || query_lower.contains("hi") || query_lower.contains("hey") {
505 "Hello! I'm Kimi, your neural inference assistant. I can help you with questions about programming, mathematics, language, reasoning, and more. What would you like to know?".to_string()
506 } else if query_lower.contains("meaning of life") || query_lower.contains("meaning in life") {
507 "The meaning of life is one of humanity's oldest philosophical questions. Different perspectives offer various answers:\n\n**Philosophical Views:**\n• **Existentialism**: We create our own meaning through choices and actions\n• **Stoicism**: Live virtuously in harmony with nature and reason\n• **Hedonism**: Pursue pleasure and happiness\n• **Nihilism**: Life has no inherent meaning\n\n**Religious/Spiritual**: Many find meaning through faith, service, and connection to the divine\n\n**Humanistic**: Meaning comes from relationships, personal growth, and contributing to society\n\n**Scientific**: From a biological perspective, life's 'purpose' is survival and reproduction, but humans seek deeper significance\n\nUltimately, the meaning of life may be deeply personal - what gives *your* life meaning?".to_string()
508 } else if query_lower.contains("what is consciousness") || query_lower.contains("explain consciousness") {
509 "Consciousness is the subjective experience of awareness - the feeling of 'what it's like' to be you. This profound mystery involves:\n\n**Key Aspects:**\n• **Awareness**: Perception of self and environment\n• **Qualia**: Subjective experiences (the 'redness' of red)\n• **Self-reflection**: Thinking about thinking\n• **Integration**: Binding disparate inputs into unified experience\n\n**Major Theories:**\n• **Integrated Information Theory**: Consciousness arises from integrated information\n• **Global Workspace**: Consciousness as a 'broadcast' system in the brain\n• **Panpsychism**: Consciousness as fundamental property of matter\n• **Emergentism**: Consciousness emerges from complex neural activity\n\n**The Hard Problem**: Explaining how physical processes create subjective experience remains one of science's greatest challenges.".to_string()
510 } else if query_lower.contains("free will") || query_lower.contains("do we have free will") {
511 "Free will - whether our choices are truly free or determined - is a central philosophical debate:\n\n**Positions:**\n• **Libertarian Free Will**: We have genuine agency; our choices aren't predetermined\n• **Hard Determinism**: All events, including choices, are caused by prior events\n• **Compatibilism**: Free will and determinism can coexist\n• **Hard Incompatibilism**: Free will is incompatible with both determinism and indeterminism\n\n**Scientific Perspective**: Neuroscience shows brain activity preceding conscious decisions, suggesting our sense of choice may be illusory. However, the debate continues.\n\n**Practical View**: Whether or not free will exists metaphysically, the experience of choice shapes our lives, morality, and society.".to_string()
512 } else if query_lower.contains("what is reality") || query_lower.contains("nature of reality") {
513 "The nature of reality is a fundamental question spanning philosophy, physics, and consciousness:\n\n**Philosophical Views:**\n• **Materialism**: Only physical matter exists\n• **Idealism**: Reality is fundamentally mental/experiential\n• **Dualism**: Both mental and physical substances exist\n• **Simulation Hypothesis**: Reality might be a computed simulation\n\n**Physics Perspectives:**\n• **Quantum Mechanics**: Reality is probabilistic, not deterministic\n• **Relativity**: Space and time are unified and relative\n• **String Theory**: Reality has hidden dimensions\n• **Many Worlds**: All possibilities exist in parallel universes\n\n**Eastern Philosophy**: Reality as illusion (Maya) or interdependent arising\n\nThe question remains open - we experience reality through consciousness, but consciousness itself is part of the reality we're trying to understand.".to_string()
514 } else if query_lower.contains("what is love") || query_lower.contains("define love") {
515 "Love is a complex phenomenon spanning biology, psychology, and philosophy:\n\n**Biological Basis:**\n• Neurochemicals: Dopamine (attraction), oxytocin (bonding), serotonin (happiness)\n• Evolutionary function: Pair bonding for offspring survival\n• Brain regions: Reward system, attachment circuits\n\n**Types of Love (Greek concepts):**\n• **Eros**: Romantic, passionate love\n• **Agape**: Unconditional, universal love\n• **Philia**: Deep friendship\n• **Storge**: Family love\n\n**Psychological View**: Attachment, intimacy, and commitment (Sternberg's Triangle)\n\n**Philosophical**: Love as recognition of beauty, truth, or the divine in another\n\nLove transforms us - it's simultaneously a feeling, a choice, an action, and perhaps what gives life its deepest meaning.".to_string()
516 } else if query_lower.contains("purpose of existence") || query_lower.contains("why do we exist") || query_lower.contains("why exist") {
517 "The question of why we exist touches the deepest mysteries:\n\n**Scientific View**: We exist due to cosmic evolution - from Big Bang to stars to planets to life. But this explains 'how', not 'why'.\n\n**Philosophical Perspectives:**\n• **Teleological**: Existence has inherent purpose/direction\n• **Absurdist**: We exist without inherent purpose, but can create meaning\n• **Buddhist**: Existence is suffering; the goal is liberation\n• **Existentialist**: Existence precedes essence - we define ourselves\n\n**Anthropic Principle**: We exist to observe the universe; a universe without observers is meaningless\n\n**Personal View**: Perhaps the question itself is the answer - beings capable of wondering 'why' create meaning through that very wonder.\n\nYour existence allows you to experience, create, love, and ponder these very questions.".to_string()
518 } else if query_lower.contains("explain") && query_lower.contains("loops") && query_lower.contains("programming") {
519 "Loops are control structures that repeat code blocks. Main types:\n\n**1. For Loop**: Iterate a specific number of times\n**2. While Loop**: Continue while condition is true\n**3. For-Each/For-In**: Iterate over collections\n**4. Do-While**: Execute at least once\n\nLoops are essential for automation, data processing, and reducing code repetition.".to_string()
521 } else if query_lower.contains("explain") && query_lower.contains("statistics") {
522 "Statistics is the science of collecting, analyzing, and interpreting data. Key concepts include:\n\n• **Descriptive Statistics**: Mean, median, mode, standard deviation\n• **Inferential Statistics**: Hypothesis testing, confidence intervals\n• **Probability**: Foundation for statistical inference\n• **Regression**: Modeling relationships between variables\n\nStatistics is crucial for data science, research, and decision-making.".to_string()
524 } else {
525 format!("Analyzing '{}' through logical reasoning: This appears to be a {} complexity query requiring systematic analysis and structured thinking to provide a comprehensive response.", input, if input.len() > 50 { "high" } else { "moderate" })
526 }
527 }
528
529 fn generate_coding_response(&self, input: &str, _confidence: f32, _patterns: &[usize]) -> String {
531 let query_lower = input.to_lowercase();
532
533 if query_lower.contains("array") && (query_lower.contains("what") || query_lower.contains("explain")) {
534 "An array is a fundamental data structure that stores elements in contiguous memory locations. Key characteristics:\n\n• **Fixed Size**: Size is determined at creation (in most languages)\n• **Indexed Access**: Elements accessed by position (0-based or 1-based)\n• **O(1) Access**: Direct access to any element by index\n• **Same Type**: All elements typically of the same data type\n\n```python\n# Python array/list examples\narr = [1, 2, 3, 4, 5]\narr[0] # Access: O(1)\narr.append(6) # Add to end: O(1)\narr.insert(0, 0) # Insert at position: O(n)\n```\n\nArrays are ideal for scenarios requiring fast random access and when the size is known beforehand.".to_string()
535 } else if query_lower.contains("loop") && (query_lower.contains("what") || query_lower.contains("types") || query_lower.contains("explain")) {
536 "Loops are control structures that repeat code blocks. Main types:\n\n**1. For Loop**: Iterate a specific number of times\n```python\nfor i in range(5):\n print(i) # 0, 1, 2, 3, 4\n```\n\n**2. While Loop**: Continue while condition is true\n```python\ncount = 0\nwhile count < 5:\n print(count)\n count += 1\n```\n\n**3. For-Each/For-In**: Iterate over collections\n```python\nfor item in [1, 2, 3]:\n print(item)\n```\n\n**4. Do-While**: Execute at least once (not in Python)\n```javascript\ndo {\n console.log(i);\n i++;\n} while (i < 5);\n```".to_string()
537 } else if query_lower.contains("recursion") {
538 "Recursion is a programming technique where a function calls itself to solve smaller instances of the same problem. Key components:\n\n• **Base Case**: Condition to stop recursion\n• **Recursive Case**: Function calls itself with modified parameters\n\n```python\n# Classic recursion example - factorial\ndef factorial(n):\n # Base case\n if n <= 1:\n return 1\n # Recursive case\n return n * factorial(n - 1)\n\n# Tree traversal example\ndef print_tree(node):\n if node is None: # Base case\n return\n print(node.value)\n print_tree(node.left) # Recursive calls\n print_tree(node.right)\n```\n\n**Pros**: Elegant for tree/graph problems\n**Cons**: Stack overflow risk, often less efficient than iteration".to_string()
539 } else if query_lower.contains("fibonacci") {
540 "Here's a Python function to calculate Fibonacci numbers:\n\n```python\ndef fibonacci(n):\n if n <= 1:\n return n\n return fibonacci(n-1) + fibonacci(n-2)\n\n# More efficient iterative version:\ndef fibonacci_iterative(n):\n if n <= 1:\n return n\n a, b = 0, 1\n for _ in range(2, n + 1):\n a, b = b, a + b\n return b\n```".to_string()
541 } else if query_lower.contains("sort") && !query_lower.contains("algorithm") {
542 "Here are common sorting algorithms:\n\n```python\n# Bubble Sort - O(n²)\ndef bubble_sort(arr):\n n = len(arr)\n for i in range(n):\n for j in range(0, n-i-1):\n if arr[j] > arr[j+1]:\n arr[j], arr[j+1] = arr[j+1], arr[j]\n return arr\n\n# Quick Sort - O(n log n) average\ndef quicksort(arr):\n if len(arr) <= 1:\n return arr\n pivot = arr[len(arr) // 2]\n left = [x for x in arr if x < pivot]\n middle = [x for x in arr if x == pivot]\n right = [x for x in arr if x > pivot]\n return quicksort(left) + middle + quicksort(right)\n```".to_string()
543 } else if query_lower.contains("design") && query_lower.contains("neural network") {
544 "Here's a comprehensive neural network design for image classification:\n\n**Architecture:**\n```python\n# CNN for Image Classification\nmodel = Sequential([\n # Input layer\n Input(shape=(224, 224, 3)),\n \n # Convolutional blocks\n Conv2D(32, (3, 3), activation='relu', padding='same'),\n BatchNormalization(),\n MaxPooling2D((2, 2)),\n Dropout(0.25),\n \n Conv2D(64, (3, 3), activation='relu', padding='same'),\n BatchNormalization(),\n MaxPooling2D((2, 2)),\n Dropout(0.25),\n \n Conv2D(128, (3, 3), activation='relu', padding='same'),\n BatchNormalization(),\n MaxPooling2D((2, 2)),\n Dropout(0.4),\n \n # Dense layers\n Flatten(),\n Dense(512, activation='relu'),\n BatchNormalization(),\n Dropout(0.5),\n Dense(num_classes, activation='softmax')\n])\n\n# Compile\nmodel.compile(\n optimizer=Adam(learning_rate=0.001),\n loss='categorical_crossentropy',\n metrics=['accuracy']\n)\n```\n\n**Key Components:**\n• **Convolutional Layers**: Extract spatial features\n• **Pooling**: Reduce spatial dimensions\n• **BatchNorm**: Stabilize training\n• **Dropout**: Prevent overfitting\n• **Data Augmentation**: Improve generalization\n\n**Training Tips:**\n• Use transfer learning (ResNet, EfficientNet)\n• Apply data augmentation\n• Use learning rate scheduling\n• Monitor validation loss for early stopping".to_string()
545 } else if query_lower.contains("linked list") {
546 "A Linked List is a linear data structure where elements are stored in nodes, each containing data and a reference to the next node:\n\n```python\nclass Node:\n def __init__(self, data):\n self.data = data\n self.next = None\n\nclass LinkedList:\n def __init__(self):\n self.head = None\n \n def append(self, data):\n new_node = Node(data)\n if not self.head:\n self.head = new_node\n return\n current = self.head\n while current.next:\n current = current.next\n current.next = new_node\n```\n\n**Advantages**: Dynamic size, efficient insertion/deletion\n**Disadvantages**: No random access, extra memory for pointers".to_string()
547 } else if query_lower.contains("binary search") {
548 "Binary Search is an efficient algorithm for finding an element in a sorted array by repeatedly dividing the search interval in half:\n\n```python\ndef binary_search(arr, target):\n left, right = 0, len(arr) - 1\n \n while left <= right:\n mid = (left + right) // 2\n \n if arr[mid] == target:\n return mid\n elif arr[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n \n return -1 # Not found\n\n# Recursive version\ndef binary_search_recursive(arr, target, left, right):\n if left > right:\n return -1\n \n mid = (left + right) // 2\n if arr[mid] == target:\n return mid\n elif arr[mid] < target:\n return binary_search_recursive(arr, target, mid + 1, right)\n else:\n return binary_search_recursive(arr, target, left, mid - 1)\n```\n\n**Time Complexity**: O(log n)\n**Requirement**: Array must be sorted".to_string()
549 } else if query_lower.contains("reverse") && query_lower.contains("string") {
550 "Here are ways to reverse a string in Python:\n\n```python\n# Method 1: Slicing (most Pythonic)\ndef reverse_string(s):\n return s[::-1]\n\n# Method 2: Using reversed()\ndef reverse_string2(s):\n return ''.join(reversed(s))\n\n# Method 3: Loop\ndef reverse_string3(s):\n result = ''\n for char in s:\n result = char + result\n return result\n\n# Method 4: Recursion\ndef reverse_string4(s):\n if len(s) <= 1:\n return s\n return s[-1] + reverse_string4(s[:-1])\n```".to_string()
551 } else if query_lower.contains("function") || query_lower.contains("code") {
552 format!("For the programming task '{}', I recommend breaking it down into smaller functions, using appropriate data structures, following naming conventions, and including error handling. Consider the algorithm's time complexity and test edge cases.", input)
553 } else {
554 format!("Programming analysis of '{}': This requires understanding the problem requirements, choosing appropriate algorithms and data structures, and implementing clean, efficient code with proper testing.", input)
555 }
556 }
557
558 fn generate_language_response(&self, input: &str, _confidence: f32, _patterns: &[usize]) -> String {
560 let query_lower = input.to_lowercase();
561
562 if query_lower.contains("natural language processing") || query_lower.contains("nlp") {
563 "Natural Language Processing (NLP) is a field of AI that enables computers to understand, interpret, and generate human language. Key areas:\n\n**Core Tasks**:\n• **Tokenization**: Breaking text into words/subwords\n• **POS Tagging**: Identifying parts of speech\n• **Named Entity Recognition**: Finding people, places, organizations\n• **Sentiment Analysis**: Determining emotional tone\n• **Machine Translation**: Converting between languages\n\n**Modern Approaches**:\n• **Transformers**: Architecture behind GPT, BERT\n• **Word Embeddings**: Vector representations of words\n• **Attention Mechanisms**: Focus on relevant parts\n• **Pre-trained Models**: Transfer learning for NLP\n\nApplications: Chatbots, search engines, voice assistants, content moderation.".to_string()
564 } else if query_lower.contains("grammar") && (query_lower.contains("what") || query_lower.contains("explain")) {
565 "Grammar is the system of rules that governs language structure. Key components:\n\n**Parts of Speech**:\n• Nouns: Person, place, thing, idea\n• Verbs: Actions or states of being\n• Adjectives: Describe nouns\n• Adverbs: Modify verbs, adjectives, or other adverbs\n• Pronouns: Replace nouns\n• Prepositions: Show relationships\n• Conjunctions: Connect words/phrases\n\n**Sentence Structure**:\n• Subject: Who/what performs the action\n• Predicate: What the subject does\n• Objects: Direct/indirect recipients\n• Clauses: Independent vs. dependent\n\nGrammar ensures clear, consistent communication.".to_string()
566 } else if query_lower.contains("etymology") {
567 "Etymology is the study of word origins and how their meanings have evolved over time. It reveals:\n\n• **Language Families**: How languages are related\n• **Root Words**: Original forms (often Latin/Greek)\n• **Prefixes/Suffixes**: Meaning modifiers\n• **Borrowed Words**: Loanwords from other languages\n• **Semantic Shift**: How meanings change\n\n**Example**: 'Computer'\n• Latin: computare (to calculate)\n• Originally: Person who computes\n• Modern: Electronic calculating device\n\nEtymology helps understand word meanings and language evolution.".to_string()
568 } else if query_lower.contains("rhetoric") || query_lower.contains("persuasion") {
569 "Rhetoric is the art of effective communication and persuasion. Classical elements:\n\n**Aristotle's Appeals**:\n• **Ethos**: Credibility and character\n• **Pathos**: Emotional connection\n• **Logos**: Logic and reasoning\n\n**Rhetorical Devices**:\n• Metaphor: Implicit comparison\n• Anaphora: Repetition at beginning\n• Chiasmus: Reversed parallel structure\n• Alliteration: Repeated initial sounds\n• Hyperbole: Deliberate exaggeration\n\n**Structure**: Introduction → Arguments → Counterarguments → Conclusion\n\nApplications: Speeches, essays, debates, marketing.".to_string()
570 } else if query_lower.contains("hello") || query_lower.contains("hi") || query_lower.contains("greet") {
571 "Hello! I'm Kimi, your neural inference assistant. I can help you with a wide range of topics including:\n\n• **Programming**: Code examples, algorithms, data structures\n• **Mathematics**: Calculus, statistics, linear algebra\n• **Machine Learning**: Neural networks, deep learning, NLP\n• **Language**: Grammar, writing, linguistics\n• **Reasoning**: Logic, analysis, problem-solving\n\nWhat would you like to explore today?".to_string()
572 } else if query_lower.contains("translate") {
573 format!("For translation of '{}', I would need to know the source and target languages. Translation involves understanding context, idioms, and cultural nuances beyond literal word conversion.", input)
574 } else {
575 format!("Language analysis of '{}': This text can be examined for grammar, syntax, semantics, and pragmatic meaning. I can help with translation, writing improvement, or linguistic analysis.", input)
576 }
577 }
578
579 fn generate_math_response(&self, input: &str, _confidence: f32, _patterns: &[usize]) -> String {
581 let query_lower = input.to_lowercase();
582
583 if query_lower.contains("calculus") && (query_lower.contains("what") || query_lower.contains("explain")) {
584 "Calculus is the mathematical study of continuous change, divided into two main branches:\n\n**1. Differential Calculus**: Studies rates of change and slopes\n• Derivatives: Instantaneous rate of change\n• Applications: Velocity, acceleration, optimization\n• Key rules: Power rule, chain rule, product rule\n\n**2. Integral Calculus**: Studies accumulation and areas\n• Integrals: Area under curves, total accumulation\n• Applications: Area, volume, work, probability\n• Fundamental Theorem: Links derivatives and integrals\n\nCalculus is essential for physics, engineering, economics, and data science.".to_string()
585 } else if query_lower.contains("statistics") && (query_lower.contains("what") || query_lower.contains("explain")) {
586 "Statistics is the science of collecting, analyzing, and interpreting data. Key concepts:\n\n**Descriptive Statistics**: Summarize data\n• Mean: Average value\n• Median: Middle value\n• Mode: Most frequent value\n• Standard Deviation: Measure of spread\n\n**Inferential Statistics**: Draw conclusions\n• Hypothesis Testing: Test claims about populations\n• Confidence Intervals: Estimate population parameters\n• p-values: Probability of results by chance\n• Regression: Model relationships between variables\n\nApplications include research, business analytics, machine learning, and quality control.".to_string()
587 } else if query_lower.contains("linear algebra") {
588 "Linear Algebra is the branch of mathematics concerning linear equations, linear transformations, and vector spaces. Core concepts:\n\n**Vectors**: Quantities with magnitude and direction\n• Operations: Addition, scalar multiplication, dot product\n\n**Matrices**: Rectangular arrays of numbers\n• Operations: Multiplication, transpose, inverse\n• Applications: Systems of equations, transformations\n\n**Key Topics**:\n• Eigenvalues & Eigenvectors: Special vectors unchanged by transformations\n• Determinants: Scalar value describing matrix properties\n• Vector Spaces: Sets closed under vector operations\n• Linear Independence: Vectors not expressible as combinations of others\n\nEssential for computer graphics, machine learning, and physics.".to_string()
589 } else if query_lower.contains("probability") && (query_lower.contains("what") || query_lower.contains("explain")) {
590 "Probability is the mathematical framework for quantifying uncertainty. Key concepts:\n\n**Basic Probability**: P(Event) = Favorable outcomes / Total outcomes\n• Range: 0 (impossible) to 1 (certain)\n• Complement: P(not A) = 1 - P(A)\n\n**Rules**:\n• Addition: P(A or B) = P(A) + P(B) - P(A and B)\n• Multiplication: P(A and B) = P(A) × P(B|A)\n• Conditional: P(A|B) = P(A and B) / P(B)\n\n**Distributions**:\n• Discrete: Binomial, Poisson\n• Continuous: Normal, Exponential\n\n**Bayes' Theorem**: P(A|B) = P(B|A) × P(A) / P(B)\n\nApplications: Risk assessment, machine learning, quantum mechanics.".to_string()
591 } else if query_lower.contains("2+2") || query_lower.contains("2 + 2") {
592 "2 + 2 = 4\n\nThis is a basic addition problem. When you add 2 and 2, you get 4.".to_string()
593 } else if query_lower.contains("derivative") && query_lower.contains("x^2") {
594 "The derivative of x² is 2x.\n\nUsing the power rule: d/dx(x^n) = n·x^(n-1)\nFor x²: d/dx(x²) = 2·x^(2-1) = 2x".to_string()
595 } else if query_lower.contains("derivative") && query_lower.contains("x^3") {
596 "The derivative of x³ is 3x².\n\nUsing the power rule: d/dx(x^n) = n·x^(n-1)\nFor x³: d/dx(x³) = 3·x^(3-1) = 3x²".to_string()
597 } else if query_lower.contains("integral") && query_lower.contains("sin") {
598 "The integral of sin(x) is -cos(x) + C.\n\n∫sin(x)dx = -cos(x) + C\n\nWhere C is the constant of integration.".to_string()
599 } else if query_lower.contains("quadratic") && (query_lower.contains("formula") || query_lower.contains("equation")) {
600 "The quadratic formula solves ax² + bx + c = 0:\n\nx = (-b ± √(b² - 4ac)) / 2a\n\n**Components**:\n• a, b, c: Coefficients of the quadratic equation\n• Discriminant: b² - 4ac\n - If > 0: Two real solutions\n - If = 0: One real solution\n - If < 0: Two complex solutions\n\n**Example**: For x² - 5x + 6 = 0\na = 1, b = -5, c = 6\nx = (5 ± √(25 - 24)) / 2 = (5 ± 1) / 2\nx = 3 or x = 2".to_string()
601 } else if query_lower.contains("pythagorean") || (query_lower.contains("pythagoras") && query_lower.contains("theorem")) {
602 "The Pythagorean Theorem relates the sides of a right triangle:\n\na² + b² = c²\n\nWhere:\n• a, b = lengths of the two shorter sides (legs)\n• c = length of the longest side (hypotenuse)\n\n**Applications**:\n• Finding distances in coordinate geometry\n• Checking if a triangle is right-angled\n• 3D distance formula extension\n\n**Common Pythagorean triples**:\n• 3, 4, 5\n• 5, 12, 13\n• 8, 15, 17".to_string()
603 } else if query_lower.contains("solve") && query_lower.contains("2x") {
604 "To solve 2x + 5 = 15:\n\nStep 1: Subtract 5 from both sides\n2x + 5 - 5 = 15 - 5\n2x = 10\n\nStep 2: Divide both sides by 2\n2x ÷ 2 = 10 ÷ 2\nx = 5\n\nTherefore, x = 5".to_string()
605 } else if query_lower.contains("calculate") || query_lower.contains("math") {
606 format!("For the mathematical problem '{}', I'd need to break it down step by step. Please provide the specific calculation or equation you'd like me to solve.", input)
607 } else {
608 format!("Mathematical analysis of '{}': This involves applying appropriate mathematical principles, formulas, and step-by-step problem-solving techniques to reach the solution.", input)
609 }
610 }
611
612 fn generate_tool_response(&self, input: &str, confidence: f32, patterns: &[usize]) -> String {
614 if confidence > 0.6 {
615 format!("Tool analysis of '{}' identifies {} executable pathways with clear operational steps and robust error handling.", input, patterns.len())
616 } else if confidence > 0.3 {
617 format!("Processing the functional request '{}' reveals {} operational approaches requiring careful tool orchestration.", input, patterns.len())
618 } else {
619 format!("The operational task '{}' presents {} implementation strategies requiring systematic execution and validation.", input, patterns.len())
620 }
621 }
622
623 fn generate_context_response(&self, input: &str, confidence: f32, patterns: &[usize]) -> String {
625 if confidence > 0.6 {
626 format!("Contextual analysis of '{}' maintains {} coherent narrative threads with strong continuity and conversational flow.", input, patterns.len())
627 } else if confidence > 0.3 {
628 format!("Processing '{}' in context reveals {} relationship patterns connecting to established discussion themes.", input, patterns.len())
629 } else {
630 format!("The contextual elements in '{}' suggest {} potential connections requiring careful tracking for coherence.", input, patterns.len())
631 }
632 }
633
634 fn enhanced_pattern_processing(&self, input: &str) -> String {
636 let domain_patterns = self.domain.domain_patterns();
637 let matches: Vec<&str> = domain_patterns.iter()
638 .filter(|&&pattern| input.to_lowercase().contains(pattern))
639 .cloned()
640 .collect();
641
642 let match_score = matches.len() as f32 / domain_patterns.len() as f32;
643 let word_count = input.split_whitespace().count();
644 let complexity = if word_count > 20 { "high" } else if word_count > 10 { "medium" } else { "low" };
645
646 let intelligent_response = match self.domain {
647 ExpertDomain::Reasoning => {
648 format!("Applying logical reasoning to '{}': I detect {} domain indicators with {:.2} relevance. This {} complexity problem requires systematic analysis.",
649 input, matches.len(), match_score, complexity)
650 },
651 ExpertDomain::Coding => {
652 format!("Code analysis of '{}': Found {} programming patterns with {:.2} confidence. This {} complexity task needs structured implementation.",
653 input, matches.len(), match_score, complexity)
654 },
655 ExpertDomain::Language => {
656 format!("Linguistic processing of '{}': Identified {} language markers with {:.2} strength. This {} complexity text requires contextual understanding.",
657 input, matches.len(), match_score, complexity)
658 },
659 ExpertDomain::Mathematics => {
660 format!("Mathematical evaluation of '{}': Located {} quantitative elements with {:.2} precision. This {} complexity problem needs computational analysis.",
661 input, matches.len(), match_score, complexity)
662 },
663 ExpertDomain::ToolUse => {
664 format!("Operational analysis of '{}': Detected {} functional patterns with {:.2} clarity. This {} complexity task requires systematic execution.",
665 input, matches.len(), match_score, complexity)
666 },
667 ExpertDomain::Context => {
668 format!("Contextual processing of '{}': Maintaining {} reference points with {:.2} continuity. This {} complexity discussion builds on established themes.",
669 input, matches.len(), match_score, complexity)
670 },
671 };
672
673 format!("{} [Pattern-based processing with {} training cycles]", intelligent_response, self.training_iterations)
674 }
675}
676
677#[wasm_bindgen]
679pub struct ExpertRouter {
680 experts: Vec<MicroExpert>,
681 routing_history: Vec<(String, ExpertDomain)>,
682 consensus_threshold: f32,
683}
684
685#[wasm_bindgen]
686impl ExpertRouter {
687 #[wasm_bindgen(constructor)]
689 pub fn new() -> ExpertRouter {
690 ExpertRouter {
691 experts: Vec::new(),
692 routing_history: Vec::new(),
693 consensus_threshold: 0.7,
694 }
695 }
696
697 pub fn add_expert(&mut self, expert: MicroExpert) {
699 self.experts.push(expert);
700 }
701
702 pub fn route(&mut self, request: &str) -> String {
704 if self.experts.is_empty() {
705 return "No experts available for routing".to_string();
706 }
707
708 let best_expert_idx = self.select_best_expert(request);
710 let expert = &self.experts[best_expert_idx];
711
712 self.routing_history.push((request.to_string(), expert.domain));
714
715 let result = expert.process(request);
717
718 format!("{} [Routed to {:?} expert based on neural content analysis]", result, expert.domain)
720 }
721
722 pub fn get_consensus(&self, request: &str) -> String {
724 if self.experts.len() < 2 {
725 return self.route_single_expert(request);
726 }
727
728 let mut expert_responses = Vec::new();
730 let mut expert_scores = Vec::new();
731
732 for expert in &self.experts {
733 let relevance_score = self.calculate_relevance_score(request, expert);
734 expert_scores.push((expert, relevance_score));
735 }
736
737 expert_scores.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
739
740 for (expert, score) in expert_scores.iter().take(3) {
741 if *score > self.consensus_threshold {
742 let response = expert.process(request);
743 expert_responses.push((expert.domain, response, *score));
744 }
745 }
746
747 self.synthesize_consensus_response(request, expert_responses)
749 }
750}
751
752impl ExpertRouter {
753 fn select_best_expert(&self, request: &str) -> usize {
755 let mut best_score = 0.0;
756 let mut best_idx = 0;
757
758 for (idx, expert) in self.experts.iter().enumerate() {
759 let score = self.calculate_relevance_score(request, expert);
760
761 let recent_success = self.routing_history.iter().rev().take(5)
763 .filter(|(_, domain)| *domain == expert.domain)
764 .count() as f32 * 0.1;
765
766 let total_score = score + recent_success;
767
768 if total_score > best_score {
769 best_score = total_score;
770 best_idx = idx;
771 }
772 }
773
774 best_idx
775 }
776
777 fn calculate_relevance_score(&self, request: &str, expert: &MicroExpert) -> f32 {
779 let mut matcher = OptimizedPatternMatcher::new();
781 let domain_scores = matcher.calculate_domain_scores(request);
782
783 domain_scores.get(&expert.domain).copied().unwrap_or(0.0)
785 }
786
787 fn route_single_expert(&self, request: &str) -> String {
789 if let Some(expert) = self.experts.first() {
790 format!("{} [Single expert routing]", expert.process(request))
791 } else {
792 "No experts available".to_string()
793 }
794 }
795
796 fn synthesize_consensus_response(&self, request: &str, responses: Vec<(ExpertDomain, String, f32)>) -> String {
798 if responses.is_empty() {
799 return "No experts met consensus threshold".to_string();
800 }
801
802 if responses.len() == 1 {
803 return format!("{} [Single expert consensus]", responses[0].1);
804 }
805
806 let total_weight: f32 = responses.iter().map(|(_, _, score)| score).sum();
808 let primary_domain = responses[0].0;
809
810 let mut consensus = format!(
812 "Multi-expert consensus for '{}' (Primary: {:?}):\n",
813 request, primary_domain
814 );
815
816 for (domain, response, score) in &responses {
817 let weight_percent = (score / total_weight * 100.0) as u32;
818 consensus.push_str(&format!(
819 "• {:?} ({}% confidence): {}\n",
820 domain, weight_percent,
821 if response.len() > 100 {
823 format!("{}...", &response[..97])
824 } else {
825 response.clone()
826 }
827 ));
828 }
829
830 consensus.push_str(&format!(
831 "\nConsensus: Based on {} expert perspectives, this query best aligns with {:?} domain processing.",
832 responses.len(), primary_domain
833 ));
834
835 consensus
836 }
837}
838
839#[wasm_bindgen]
841#[derive(Debug, Clone)]
842pub struct ProcessingConfig {
843 pub max_experts: usize,
844 pub timeout_ms: u32,
845 pub neural_inference_enabled: bool,
846 pub consensus_threshold: f32,
847}
848
849#[wasm_bindgen]
850impl ProcessingConfig {
851 #[wasm_bindgen(constructor)]
853 pub fn new() -> ProcessingConfig {
854 ProcessingConfig {
855 max_experts: 6,
856 timeout_ms: 8000,
857 neural_inference_enabled: true,
858 consensus_threshold: 0.7,
859 }
860 }
861
862 pub fn new_neural_optimized() -> ProcessingConfig {
864 ProcessingConfig {
865 max_experts: 6,
866 timeout_ms: 12000,
867 neural_inference_enabled: true,
868 consensus_threshold: 0.8,
869 }
870 }
871
872 pub fn new_pattern_optimized() -> ProcessingConfig {
874 ProcessingConfig {
875 max_experts: 3,
876 timeout_ms: 3000,
877 neural_inference_enabled: false,
878 consensus_threshold: 0.6,
879 }
880 }
881}
882
883#[wasm_bindgen]
885pub struct KimiRuntime {
886 config: ProcessingConfig,
887 router: ExpertRouter,
888 query_count: u32,
889 consensus_mode: bool,
890}
891
892#[wasm_bindgen]
893impl KimiRuntime {
894 #[wasm_bindgen(constructor)]
896 pub fn new(config: ProcessingConfig) -> KimiRuntime {
897 let mut router = ExpertRouter::new();
898
899 router.add_expert(MicroExpert::new(ExpertDomain::Reasoning));
901 router.add_expert(MicroExpert::new(ExpertDomain::Coding));
902 router.add_expert(MicroExpert::new(ExpertDomain::Language));
903 router.add_expert(MicroExpert::new(ExpertDomain::Mathematics));
904 router.add_expert(MicroExpert::new(ExpertDomain::ToolUse));
905 router.add_expert(MicroExpert::new(ExpertDomain::Context));
906
907 KimiRuntime {
908 config,
909 router,
910 query_count: 0,
911 consensus_mode: false,
912 }
913 }
914
915 pub fn process(&mut self, query: &str) -> String {
917 self.query_count += 1;
918
919 let use_consensus = self.should_use_consensus(query);
921
922 let result = if use_consensus {
923 self.router.get_consensus(query)
924 } else {
925 self.router.route(query)
926 };
927
928 format!("{} [Runtime: Query #{}, Mode: {}, {} experts active]",
930 result, self.query_count,
931 if use_consensus { "Consensus" } else { "Single" },
932 self.config.max_experts)
933 }
934
935 pub fn set_consensus_mode(&mut self, enabled: bool) {
937 self.consensus_mode = enabled;
938 }
939}
940
941impl KimiRuntime {
942 fn should_use_consensus(&self, query: &str) -> bool {
944 if self.consensus_mode {
945 return true;
946 }
947
948 let word_count = query.split_whitespace().count();
950 let has_multiple_domains = self.count_domain_indicators(query) > 1;
951 let is_complex = query.to_lowercase().contains("complex") ||
952 query.to_lowercase().contains("analyze") ||
953 query.to_lowercase().contains("comprehensive");
954
955 word_count > 20 || has_multiple_domains || is_complex
956 }
957
958 fn count_domain_indicators(&self, query: &str) -> usize {
960 let text = query.to_lowercase();
961 let mut count = 0;
962
963 let domain_keywords = [
964 vec!["analyze", "logic", "reason"], vec!["code", "function", "program"], vec!["translate", "language", "text"], vec!["calculate", "math", "equation"], vec!["tool", "api", "execute"], vec!["context", "previous", "remember"], ];
971
972 for keywords in domain_keywords.iter() {
973 if keywords.iter().any(|&keyword| text.contains(keyword)) {
974 count += 1;
975 }
976 }
977
978 count
979 }
980}
981
982#[wasm_bindgen(start)]
984pub fn init() {
985 if let Ok(mut vocab) = GLOBAL_VOCAB.lock() {
987 *vocab = TokenEmbedding::new();
988 }
989
990 web_sys::console::log_1(&"Kimi-FANN Core initialized with neural networks".into());
992}
993
994#[derive(Debug, Clone, Serialize, Deserialize)]
996pub struct NetworkStats {
997 pub active_peers: usize,
998 pub total_queries: u64,
999 pub average_latency_ms: f64,
1000 pub expert_utilization: HashMap<ExpertDomain, f64>,
1001 pub neural_accuracy: f64,
1002}