eryon_actors/vnode/
impl_vnode.rs

1/*
2    Appellation: impl_vnode <module>
3    Contrib: @FL03
4*/
5use super::VNode;
6
7use crate::ctx::ActorContext;
8use crate::drivers::{Driver, RawDriver, TriadDriver};
9use crate::mem::prelude::{MemoryStatistics, RelationshipType};
10use crate::operators::Operator;
11use crate::surface::{Predict, SurfaceModel, SurfaceNetwork, Train};
12use crate::traits::{Actor, ActorExt};
13use crate::types::StabilityPattern;
14
15use ndarray::{Array1, Array2, ScalarOperand};
16use num_traits::{Float, FromPrimitive, NumAssign, ToPrimitive};
17use rshyper::EdgeId;
18use rstmt::nrt::{LPR, Triad, Triads};
19use rstmt::{Aspn, PitchMod};
20use std::collections::HashMap;
21use std::time::Duration;
22
23impl<D, T> VNode<D, T>
24where
25    D: RawDriver<Triad>,
26    T: Float
27        + FromPrimitive
28        + ToPrimitive
29        + NumAssign
30        + ScalarOperand
31        + core::iter::Sum
32        + core::str::FromStr,
33{
34    /// Apply learned stability patterns from another triad class to the current one
35    pub fn adapt_stability_patterns(&mut self, source_class: Triads) -> crate::Result<()> {
36        // Create patterns for the source class
37        let _source = self.create_stability_patterns_for_class(source_class);
38
39        // Create patterns for the current class
40        let _target = self.create_stability_patterns_for_class(self.class());
41
42        // Learn the appropriate patterns with adaptation
43
44        todo!("update the method to reflect modifications to the network");
45        // // First learn the original patterns to understand the source
46        // let mut all_inputs = Vec::new();
47        // let mut all_targets = Vec::new();
48
49        // // Use source patterns to understand the origin
50        // for (input, target) in &source_patterns {
51        //     all_inputs.push(*input);
52        //     all_targets.push(*target);
53        // }
54
55        // // Then add target patterns for adaptation
56        // for (input, target) in &target_patterns {
57        //     all_inputs.push(*input);
58        //     all_targets.push(*target);
59        // }
60
61        // // Learn with prioritizing target patterns (more iterations)
62        // self.learn_pattern(&all_inputs, &all_targets, 500)?;
63
64        // Ok(())
65    }
66    /// Adapt the surface network weights based on patterns from another node
67    pub fn adapt_surface_to_external_patterns<I>(&mut self, patterns: I) -> crate::Result<()>
68    where
69        I: IntoIterator<Item = Vec<usize>>,
70    {
71        // Ensure we have a surface network
72        if let Some(surface) = &mut self.surface {
73            // We need to reconstruct meaningful weight adjustments from discrete patterns
74            let mut adaptation_count = 0;
75            let scale = T::from_usize(1000).unwrap(); // Same scale used in extract_knowledge_patterns
76
77            for pattern in patterns {
78                // Skip patterns that don't have our marker or are too short
79                if pattern.len() < 4 || pattern[0] != 999 {
80                    continue;
81                }
82
83                // Check pattern type (1 = primary, 2 = secondary)
84                let is_primary = pattern[1] == 1;
85
86                if is_primary {
87                    // Process primary weights starting at index 2
88                    for i in (2..pattern.len()).step_by(2) {
89                        if i + 1 >= pattern.len() {
90                            continue;
91                        }
92
93                        let position = pattern[i];
94                        let quantized_value = pattern[i + 1];
95
96                        // Calculate row and column from position
97                        let (in_rows, in_cols) = surface.model().features().dim_input();
98                        let row = position / in_cols;
99                        let col = position % in_cols;
100
101                        // Check bounds
102                        if row < in_rows && col < in_cols {
103                            // Dequantize value
104                            let weight_value = T::from_usize(quantized_value).unwrap() / scale;
105
106                            // Adaptive integration - more conservative than full replacement
107                            let current = surface.input().weights()[[row, col]];
108                            let new_value = current * T::from_f32(0.85).unwrap()
109                                + weight_value * T::from_f32(0.15).unwrap();
110                            surface.input_mut().weights_mut()[[row, col]] = new_value;
111
112                            adaptation_count += 1;
113                        }
114                    }
115                } else {
116                    // Process secondary weights
117                    for i in (2..pattern.len()).step_by(2) {
118                        if i + 1 >= pattern.len() {
119                            continue;
120                        }
121
122                        let col = pattern[i];
123                        let quantized_value = pattern[i + 1];
124
125                        // Check bounds
126                        if col < surface.output().ncols() {
127                            // Dequantize value
128                            let weight_value = T::from_usize(quantized_value).unwrap() / scale;
129
130                            // Adaptive integration - more conservative than full replacement
131                            let current = surface.output().weights()[[0, col]];
132                            let new_value = current * T::from_f32(0.85).unwrap()
133                                + weight_value * T::from_f32(0.15).unwrap();
134                            surface.output_mut().weights_mut()[[0, col]] = new_value;
135
136                            adaptation_count += 1;
137                        }
138                    }
139                }
140            }
141
142            // If we made adaptations, record this in memory
143            if adaptation_count > 0 {
144                self.store
145                    .record_event("adapted_surface", Some(vec![adaptation_count]));
146            }
147        }
148
149        Ok(())
150    }
151    /// Adjust surface parameters towards target parameters
152    pub fn adjust_surface_parameters(&mut self, target: &(T, T, T), rate: T) -> crate::Result<()> {
153        if let Some(surface) = &mut self.surface {
154            // Interpolate between current and target parameters
155            let lr = *surface.model().learning_rate();
156            let decay = *surface.model().decay();
157            let momentum = *surface.model().momentum();
158            let new_gamma = lr + rate * (target.2 - lr);
159            let new_decay = decay + rate * (target.1 - decay);
160            let new_momentum = momentum + rate * (target.0 - momentum);
161
162            // Apply adjusted parameters
163            let config =
164                crate::surface::SurfaceModelConfig::new(new_gamma, new_momentum, new_decay);
165            surface.model_mut().set_config(config);
166        }
167
168        Ok(())
169    }
170    /// Apply federated gradients to local model
171    pub fn apply_federated_gradients(
172        &mut self,
173        averaged_gradients: &HashMap<String, Vec<Vec<T>>>,
174    ) -> crate::Result<()> {
175        if let Some(surface) = &mut self.surface {
176            // Apply averaged primary gradients if available
177            if let Some(primary_grads) = averaged_gradients.get("primary") {
178                if !primary_grads.is_empty() {
179                    // Calculate average gradient
180                    let mut avg_grad = vec![T::zero(); primary_grads[0].len()];
181
182                    for grad in primary_grads {
183                        for (i, &value) in grad.iter().enumerate() {
184                            if i < avg_grad.len() {
185                                avg_grad[i] += value / T::from_usize(primary_grads.len()).unwrap();
186                            }
187                        }
188                    }
189
190                    // Apply to primary weights
191                    let mut idx = 0;
192                    for i in 0..surface.input().nrows() {
193                        for j in 0..surface.input().ncols() {
194                            if idx < avg_grad.len() {
195                                surface.input_mut().weights_mut()[[i, j]] =
196                                    T::from_f32(0.9).unwrap() * surface.input().weights()[[i, j]]
197                                        + T::from_usize(10).unwrap().recip() * avg_grad[idx];
198                                idx += 1;
199                            }
200                        }
201                    }
202                }
203            }
204
205            // Apply averaged secondary gradients if available
206            if let Some(secondary_grads) = averaged_gradients.get("secondary") {
207                if !secondary_grads.is_empty() {
208                    // Calculate average gradient
209                    let mut avg_grad = vec![T::zero(); secondary_grads[0].len()];
210
211                    for grad in secondary_grads {
212                        for (i, &value) in grad.iter().enumerate() {
213                            if i < avg_grad.len() {
214                                avg_grad[i] +=
215                                    value / T::from_usize(secondary_grads.len()).unwrap();
216                            }
217                        }
218                    }
219
220                    // Apply to secondary weights
221                    let mut idx = 0;
222                    for i in 0..surface.output().nrows() {
223                        for j in 0..surface.output().ncols() {
224                            if idx < avg_grad.len() {
225                                surface.output_mut().weights_mut()[[i, j]] =
226                                    T::from_f32(0.9).unwrap() * surface.output().weights()[[i, j]]
227                                        + T::from_usize(10).unwrap().recip() * avg_grad[idx];
228                                idx += 1;
229                            }
230                        }
231                    }
232                }
233            }
234        }
235
236        Ok(())
237    }
238    /// Calculate adaptive target for maximum feature count
239    pub fn calculate_adaptive_feature_target(&self) -> usize
240    where
241        T: core::str::FromStr,
242    {
243        // Base target depends on the node's role
244        let base_target = match self.operator() {
245            Operator::Observer(_) => 2000, // Observers can store more features
246            Operator::Agent(_) => 1500,    // Agents need to be more efficient
247        };
248        let base_target = T::from_usize(base_target).unwrap();
249
250        // Adjust based on critical point count (more critical points -> more memory needed)
251        let critical_point_factor =
252            T::from_f32(1.0 + (self.critical_points().len() as f32 * 0.1)).unwrap();
253
254        // Adjust based on learning quality
255        let learning_accuracy = self.get_learning_accuracy();
256        let learning_factor = if learning_accuracy > T::from_f32(0.8).unwrap() {
257            // Good learning - allow more memory to capture nuances
258            T::from_f32(1.2).unwrap()
259        } else if learning_accuracy > T::from_f32(0.5).unwrap() {
260            // Average learning - normal memory allocation
261            T::one()
262        } else {
263            // Poor learning - restrict memory to force better generalization
264            T::from_f32(0.8).unwrap()
265        };
266
267        // Final target calculation
268        let adaptive_target = (base_target * critical_point_factor * learning_factor)
269            .to_usize()
270            .unwrap();
271
272        // Ensure reasonable bounds
273        adaptive_target.clamp(500, 5000)
274    }
275    /// Consolidate similar patterns to reduce memory usage
276    pub fn consolidate_similar_patterns(
277        &mut self,
278        similarity_threshold: T,
279    ) -> crate::Result<usize> {
280        // Create a memory buffer to avoid modifying while iterating
281        let feature_ids: Vec<_> = self
282            .store
283            .features()
284            .iter()
285            .filter(|f| f.dimension() == 2 && f.death().is_none())
286            .map(|f| f.id())
287            .collect();
288
289        // Track how many features were consolidated
290        let mut consolidated_count = 0;
291
292        // Find similar patterns and merge them
293        for i in 0..feature_ids.len() {
294            // Skip already processed features
295            if i >= feature_ids.len() {
296                continue;
297            }
298
299            let id1 = feature_ids[i];
300
301            // Get the first feature, skip if already deleted
302            let f1 = match self.store().find_feature_by_id(id1).cloned() {
303                Some(f) if f.death().is_none() => f,
304                _ => continue,
305            };
306
307            // Compare with all subsequent features
308            for j in i + 1..feature_ids.len() {
309                let id2 = feature_ids[j];
310
311                // Get the second feature, skip if already deleted
312                let f2 = match self.store().find_feature_by_id(id2).cloned() {
313                    Some(f) if f.death().is_none() => f,
314                    _ => continue,
315                };
316
317                // Only compare features of the same dimension
318                if f1.dimension() != f2.dimension() {
319                    continue;
320                }
321
322                // Calculate similarity between the features
323                let similarity = self
324                    .store()
325                    .content_similarity(f1.content().as_slice(), f2.content().as_slice());
326
327                // If similarity is above threshold, merge the features
328                if similarity >= similarity_threshold {
329                    // Keep the more important feature
330                    let (keep_id, remove_id) = if f1.importance() >= f2.importance() {
331                        (id1, id2)
332                    } else {
333                        (id2, id1)
334                    };
335
336                    // Merge the features
337                    self.store.merge_features(keep_id, remove_id);
338                    consolidated_count += 1;
339                }
340            }
341        }
342
343        Ok(consolidated_count)
344    }
345    /// Get contextual information about the current state
346    pub fn contextualize(&mut self) -> crate::Result<ActorContext<T>> {
347        // Check if we have a recent cached context
348        if let Some((context, timestamp)) = self.last_context() {
349            // Use cached context if it's recent enough (within 100ms)
350            if timestamp.elapsed() < Duration::from_millis(100) {
351                return Ok(context.clone());
352            }
353        }
354
355        // Generate new context
356        let ctx = self.generate_context()?;
357        self.set_last_context(ctx);
358        self.last_context().map_or_else(
359            || Err(crate::ActorError::GenerativeError("".to_string())),
360            |(c, _)| Ok(c.clone()),
361        )
362    }
363    /// Create stability patterns specific to the current headspace
364    pub fn create_stability_patterns_for_class(&self, class: Triads) -> Vec<StabilityPattern<T>> {
365        match class {
366            Triads::Major => {
367                vec![
368                    (
369                        [
370                            T::from_f32(0.8).unwrap(),
371                            T::from_f32(0.1).unwrap(),
372                            T::from_f32(0.1).unwrap(),
373                        ],
374                        Array1::ones(3),
375                    ), // Root emphasis - high stability
376                    (
377                        [
378                            T::from_f32(0.1).unwrap(),
379                            T::from_f32(0.8).unwrap(),
380                            T::from_f32(0.1).unwrap(),
381                        ],
382                        Array1::from_elem(3, T::from_f32(0.5).unwrap()),
383                    ), // Third emphasis - medium stability
384                    (
385                        [
386                            T::from_f32(0.1).unwrap(),
387                            T::from_f32(0.1).unwrap(),
388                            T::from_f32(0.8).unwrap(),
389                        ],
390                        Array1::zeros(3),
391                    ), // Fifth emphasis - low stability
392                    (
393                        [T::from_f32(3.0.recip()).unwrap(); 3],
394                        Array1::from_elem(3, T::from_f32(0.5).unwrap()),
395                    ), // Balanced - medium stability
396                ]
397            }
398            Triads::Minor => {
399                vec![
400                    (
401                        [
402                            T::from_f32(0.8).unwrap(),
403                            T::from_f32(0.1).unwrap(),
404                            T::from_f32(0.1).unwrap(),
405                        ],
406                        Array1::from_elem(3, T::from_f32(0.5).unwrap()),
407                    ), // Root emphasis - medium stability
408                    (
409                        [
410                            T::from_f32(0.1).unwrap(),
411                            T::from_f32(0.8).unwrap(),
412                            T::from_f32(0.1).unwrap(),
413                        ],
414                        Array1::zeros(3),
415                    ), // Third emphasis - low stability
416                    (
417                        [
418                            T::from_f32(0.1).unwrap(),
419                            T::from_f32(0.1).unwrap(),
420                            T::from_f32(0.8).unwrap(),
421                        ],
422                        Array1::ones(3),
423                    ), // Fifth emphasis - high stability
424                    (
425                        [T::from_f32(3.0.recip()).unwrap(); 3],
426                        Array1::from_elem(3, T::from_f32(0.5).unwrap()),
427                    ), // Balanced - medium stability
428                ]
429            }
430            Triads::Diminished => {
431                vec![
432                    (
433                        [
434                            T::from_f32(0.8).unwrap(),
435                            T::from_f32(0.1).unwrap(),
436                            T::from_f32(0.1).unwrap(),
437                        ],
438                        Array1::from_elem(3, T::from_f32(0.4).unwrap()),
439                    ), // Root emphasis - moderate stability
440                    (
441                        [
442                            T::from_f32(0.1).unwrap(),
443                            T::from_f32(0.8).unwrap(),
444                            T::from_f32(0.1).unwrap(),
445                        ],
446                        Array1::from_elem(3, T::from_f32(0.3).unwrap()),
447                    ), // Third emphasis - lower stability
448                    (
449                        [
450                            T::from_f32(0.1).unwrap(),
451                            T::from_f32(0.1).unwrap(),
452                            T::from_f32(0.8).unwrap(),
453                        ],
454                        Array1::from_elem(3, T::from_f32(0.2).unwrap()),
455                    ), // Fifth emphasis - least stability
456                    (
457                        [T::from_f32(3.0.recip()).unwrap(); 3],
458                        Array1::from_elem(3, T::from_f32(0.3).unwrap()),
459                    ), // Balanced - moderate stability
460                ]
461            }
462            Triads::Augmented => {
463                vec![
464                    (
465                        [
466                            T::from_f32(0.8).unwrap(),
467                            T::from_f32(0.1).unwrap(),
468                            T::from_f32(0.1).unwrap(),
469                        ],
470                        Array1::from_elem(3, T::from_f32(0.5).unwrap()),
471                    ), // Root emphasis - medium stability
472                    (
473                        [
474                            T::from_f32(0.1).unwrap(),
475                            T::from_f32(0.8).unwrap(),
476                            T::from_f32(0.1).unwrap(),
477                        ],
478                        Array1::from_elem(3, T::from_f32(0.5).unwrap()),
479                    ), // Third emphasis - medium stability
480                    (
481                        [
482                            T::from_f32(0.1).unwrap(),
483                            T::from_f32(0.1).unwrap(),
484                            T::from_f32(0.8).unwrap(),
485                        ],
486                        Array1::from_elem(3, T::from_f32(0.5).unwrap()),
487                    ), // Fifth emphasis - medium stability
488                    (
489                        [T::from_f32(3.0.recip()).unwrap(); 3],
490                        Array1::from_elem(3, T::from_f32(0.5).unwrap()),
491                    ), // Balanced - medium stability
492                ]
493            }
494        }
495    }
496    /// Calculate distance from headspace root to critical point
497    pub fn distance_to_critical_point(&self, name: &str) -> Option<usize> {
498        if let Some(&pitch_class) = self.critical_points.get(name) {
499            let root = self.headspace().root();
500            let distance = ((pitch_class as isize - root as isize).pmod()) as usize;
501            Some(distance)
502        } else {
503            None
504        }
505    }
506    /// Calculate distance to a point
507    pub fn distance_to_point(&self, point: usize) -> usize {
508        let current = self.driver.headspace();
509        let mut min_distance = 12;
510
511        for note in current.notes() {
512            let raw_distance = ((note as isize) - (point as isize)).abs().pmod();
513            let distance = core::cmp::min(raw_distance, 12 - raw_distance) as usize;
514            min_distance = core::cmp::min(min_distance, distance);
515        }
516
517        min_distance
518    }
519    /// Ensure the surface network is initialized
520    #[cfg(not(feature = "rand"))]
521    pub fn ensure_surface_network(&mut self) -> crate::Result<&mut SurfaceNetwork<T>> {
522        if self.surface.is_none() {
523            self.init_surface()?;
524        }
525        self.surface
526            .as_mut()
527            .ok_or(crate::ActorError::NotInitialized(
528                "Unable to initialize the surface network".to_string(),
529            ))
530    }
531    #[cfg(feature = "rand")]
532    pub fn ensure_surface_network(&mut self) -> crate::Result<&mut SurfaceNetwork<T>>
533    where
534        rand_distr::StandardNormal: rand_distr::Distribution<T>,
535    {
536        if self.surface.is_none() {
537            self.init_surface()?;
538        }
539        self.surface
540            .as_mut()
541            .ok_or(crate::ActorError::NotInitialized(
542                "Unable to initialize the surface network".to_string(),
543            ))
544    }
545    /// Extract knowledge patterns in a discrete form
546    pub fn extract_knowledge_patterns(&self) -> crate::Result<Vec<Vec<usize>>> {
547        let mut patterns = Vec::new();
548
549        // Extract from memory features
550        // Get stabilized patterns (dimension 2 features that have persisted)
551        for feature in self.store().features() {
552            if feature.dimension() == 2
553                && feature.death().is_none()
554                && feature.importance() > &T::from_f32(0.6).unwrap()
555            {
556                patterns.push(feature.content().to_vec());
557            }
558        }
559
560        // If we have a surface network, extract learned patterns
561        if let Some(surface) = &self.surface {
562            // Convert floating point weights to discrete representations
563            let inputs = surface.input();
564            let outputs = surface.output();
565
566            // Quantize weights (convert T to usize with scaling)
567            let scale = T::from_usize(1000).unwrap();
568
569            // Process primary weights
570            for i in 0..inputs.nrows() {
571                let mut pattern = Vec::with_capacity(inputs.ncols() * 2 + 2);
572                pattern.push(999); // Marker for surface weights
573                pattern.push(1); // Primary weight indicator
574
575                for j in 0..inputs.ncols() {
576                    let value = inputs.weights()[[i, j]] * scale;
577                    pattern.push(i * inputs.ncols() + j); // Position
578                    // Quantized weight
579                    if let Some(value) = value.to_usize() {
580                        pattern.push(value); // Convert to usize
581                    } else {
582                        pattern.push(0); // Fallback for non-integer values
583                    }
584                }
585                patterns.push(pattern);
586            }
587
588            // Process secondary weights
589            let mut pattern = Vec::with_capacity(outputs.ncols() * 2 + 2);
590            pattern.push(999); // Marker for surface weights
591            pattern.push(2); // Secondary weight indicator
592
593            for j in 0..outputs.ncols() {
594                let value = (outputs.weights()[[0, j]] * scale).to_usize().unwrap();
595                pattern.push(j); // Position
596                pattern.push(value); // Quantized weight
597            }
598            patterns.push(pattern);
599        }
600
601        Ok(patterns)
602    }
603    /// Extract learning gradients for federated learning
604    pub fn extract_learning_gradients(&self) -> crate::Result<HashMap<String, Vec<T>>> {
605        let mut gradients = HashMap::new();
606
607        if let Some(surface) = &self.surface {
608            // Extract gradients from surface network
609            // Pattern ID -> Gradient vector
610            let input_weights = surface.input().weights();
611            let output_weights = surface.output().weights();
612
613            // Flatten weights into gradient vectors
614            let mut input_delta = Vec::with_capacity(input_weights.len());
615            for i in 0..input_weights.nrows() {
616                for j in 0..input_weights.ncols() {
617                    input_delta.push(input_weights[[i, j]]);
618                }
619            }
620
621            let mut output_delta = Vec::with_capacity(output_weights.len());
622            for i in 0..output_weights.nrows() {
623                for j in 0..output_weights.ncols() {
624                    output_delta.push(output_weights[[i, j]]);
625                }
626            }
627
628            // Store gradients with pattern IDs
629            gradients.insert("primary".to_string(), input_delta);
630            gradients.insert("secondary".to_string(), output_delta);
631        }
632
633        Ok(gradients)
634    }
635    /// Find features related to the current triad
636    pub fn find_related_triads(&self) -> Vec<Triad> {
637        // Get current triad feature
638        let current_notes = self.get_notes().to_vec();
639
640        // Find features with the same content
641        let current_features = self.store.find_by_content_prefix(&current_notes);
642
643        if current_features.is_empty() {
644            return Vec::new();
645        }
646
647        // Get related features through relationships
648        let mut related_features = Vec::new();
649        for feature in current_features {
650            let related = self
651                .store
652                .find_related_features(feature.id(), Some(RelationshipType::Transformation));
653            related_features.extend(related);
654        }
655
656        // Convert features to triads
657        related_features
658            .into_iter()
659            .filter(|f| f.dimension() == 2 && f.content().len() == 3)
660            .map(|f| {
661                // Determine class by checking if its major or minor
662                let notes = [f[0], f[1], f[2]];
663                let root = notes[0];
664                let third_interval = (notes[1] + 12 - root).pmod();
665                let class = if third_interval == 4 {
666                    Triads::Major
667                } else {
668                    Triads::Minor
669                };
670
671                Triad::new(notes, class)
672            })
673            .collect()
674    }
675    /// Find paths to a triad containing the target pitch
676    pub fn find_paths_to_point(&self, target: usize) -> Vec<Vec<LPR>> {
677        // use the pathfinder to find paths to the target pitch
678        self.driver()
679            .headspace()
680            .path_finder()
681            .find_paths_to_target(target)
682            .into_iter()
683            .map(|tgt| tgt.path().clone())
684            .collect::<Vec<_>>()
685    }
686    /// Get a list of triads that have been frequently visited
687    pub fn get_frequently_visited_triads(&self, min_occurrences: usize) -> Vec<Triad> {
688        // Get all triad features (dimension 2)
689        let triad_features = self.store.find_by_dimension(2);
690
691        // Count the occurrences of each triad
692        let mut triad_counts = HashMap::new();
693        for feature in triad_features {
694            if feature.content_len() == 3 {
695                let notes = [feature[0], feature[1], feature[2]];
696                *triad_counts.entry(notes).or_insert(0) += 1;
697            }
698        }
699
700        // Filter by minimum occurrences and convert to Triads
701        triad_counts
702            .into_iter()
703            .filter(|(_, count)| *count >= min_occurrences)
704            .map(|(notes, _)| {
705                // Determine class by checking if its major or minor
706                let root = notes[0];
707                let third_interval = (notes[1] + 12 - root).pmod();
708                let class = if third_interval == 4 {
709                    Triads::Major
710                } else {
711                    Triads::Minor
712                };
713
714                Triad::new(notes, class)
715            })
716            .collect()
717    }
718    /// Get convergence rate for learning
719    pub fn get_convergence_rate(&self) -> T {
720        // Check if we have convergence metrics
721        if let Some(convergence) = self.store.get_property_float("convergence_rate") {
722            return convergence;
723        }
724
725        // Default value if no metrics available
726        T::from_f32(0.5).unwrap()
727    }
728    /// Get feature quality metric
729    pub fn get_feature_quality(&self) -> T {
730        // Use feature count as a proxy for quality
731        let feature_count = T::from_usize(self.total_features()).unwrap();
732        (feature_count / T::from_usize(100).unwrap()).min(T::one())
733    }
734    /// Get the current learning accuracy
735    pub fn get_learning_accuracy(&self) -> T
736    where
737        T: core::str::FromStr,
738    {
739        self.store()
740            .get_property("learning_accuracy")
741            .and_then(|i| i.parse().ok())
742            .unwrap_or(T::from_f32(0.5).unwrap())
743    }
744    /// Get statistics about the memory state
745    pub fn get_memory_statistics(&self) -> MemoryStatistics<T> {
746        self.store().get_statistics()
747    }
748    /// Get learned stability patterns from this node's surface network
749    pub fn get_stability_patterns(&self) -> Option<Vec<StabilityPattern<T>>>
750    where
751        SurfaceModel<T>: Predict<Array1<T>, Output = Array1<T>>,
752    {
753        if !self.has_surface_network() {
754            return None;
755        }
756
757        // Generate representative patterns based on current headspace
758        let mut patterns = Vec::new();
759        let p1 = T::from_usize(10).unwrap().recip();
760        let p5 = T::from_f32(0.5).unwrap();
761        let p8 = T::from_f32(0.8).unwrap();
762        let p118 = [p1, p1, p8];
763        let p181 = [p1, p8, p1];
764        let p811 = [p8, p1, p1];
765
766        // Add standard patterns based on chord type
767        match self.class() {
768            Triads::Major => {
769                // In major triads:
770                // - Root emphasis creates stability
771                // - Fifth emphasis creates tension
772                // - Third emphasis creates moderate stability
773                patterns.push((
774                    p811,
775                    self.process_surface(&Array1::from_iter(p811))
776                        .unwrap_or(Array1::ones(3)),
777                ));
778                patterns.push((
779                    p181,
780                    self.process_surface(&Array1::from_iter(p181))
781                        .unwrap_or(Array1::from_elem(3, T::from_f32(0.5).unwrap())),
782                ));
783                patterns.push((
784                    p118,
785                    self.process_surface(&Array1::from_iter(p118))
786                        .unwrap_or(Array1::zeros(3)),
787                ));
788            }
789            Triads::Minor => {
790                // In minor triads:
791                // - Fifth emphasis creates stability
792                // - Third emphasis creates tension
793                // - Root emphasis creates moderate stability
794                patterns.push((
795                    p118,
796                    self.process_surface(&Array1::from_iter(p118))
797                        .unwrap_or(Array1::ones(3)),
798                ));
799                patterns.push((
800                    p811,
801                    self.process_surface(&Array1::from_iter(p811))
802                        .unwrap_or(Array1::from_elem(3, T::from_f32(0.5).unwrap())),
803                ));
804                patterns.push((
805                    p181,
806                    self.process_surface(&Array1::from_iter(p181))
807                        .unwrap_or(Array1::zeros(3)),
808                ));
809            }
810            Triads::Diminished => {
811                // In diminished triads, more subtle differences
812                patterns.push((
813                    p811,
814                    self.process_surface(&Array1::from_iter(p811))
815                        .unwrap_or(Array1::from_elem(3, T::from_f32(0.4).unwrap())),
816                ));
817                patterns.push((
818                    p181,
819                    self.process_surface(&Array1::from_iter(p181))
820                        .unwrap_or(Array1::from_elem(3, T::from_f32(0.3).unwrap())),
821                ));
822                patterns.push((
823                    p118,
824                    self.process_surface(&Array1::from_iter(p118))
825                        .unwrap_or(Array1::from_elem(3, T::from_f32(0.2).unwrap())),
826                ));
827            }
828            Triads::Augmented => {
829                // Augmented triads have symmetric stability
830                patterns.push((
831                    p811,
832                    self.process_surface(&Array1::from_iter(p811))
833                        .unwrap_or(Array1::from_elem(3, T::from_f32(0.5).unwrap())),
834                ));
835                patterns.push((
836                    p181,
837                    self.process_surface(&Array1::from_iter(p181))
838                        .unwrap_or(Array1::from_elem(3, T::from_f32(0.5).unwrap())),
839                ));
840                patterns.push((
841                    p118,
842                    self.process_surface(&Array1::from_iter(p118))
843                        .unwrap_or(Array1::from_elem(3, T::from_f32(0.5).unwrap())),
844                ));
845            }
846        }
847
848        // Add balanced distribution
849        patterns.push((
850            [T::from_f32(1.0 / 3.0).unwrap(); 3],
851            self.process_surface(&Array1::<T>::from_elem(3, T::from_f32(1.0 / 3.0).unwrap()))
852                .unwrap_or(Array1::from_elem(3, p5)),
853        ));
854
855        Some(patterns)
856    }
857    /// Get surface network parameters
858    pub fn get_surface_parameters(&self) -> (T, T, T) {
859        let config = if let Some(surface) = &self.surface {
860            *surface.model().config()
861        } else {
862            crate::surface::SurfaceModelConfig::default() // Default parameters
863        };
864        (*config.momentum(), *config.decay(), *config.learning_rate())
865    }
866    /// Identify a critical point in the tonal space
867    pub fn identify_critical_point(&mut self, name: &str, pitch: usize) {
868        self.critical_points.insert(name.to_string(), pitch.pmod());
869    }
870    /// Initialize the surface network for behavior learning
871    #[cfg(not(feature = "rand"))]
872    pub fn init_surface(&mut self) -> crate::Result<()> {
873        if self.surface.is_none() {
874            let headspace = *self.driver.headspace();
875            let surface = SurfaceNetwork::new(headspace).init();
876            self.surface = Some(surface);
877            self.prev_weight_changes = Some((
878                Array2::zeros((5, 3)), // Default size for primary weights
879                Array2::zeros((1, 5)), // Default size for secondary weights
880            ));
881        }
882        Ok(())
883    }
884    #[cfg(feature = "rand")]
885    pub fn init_surface(&mut self) -> crate::Result<()>
886    where
887        rand_distr::StandardNormal: rand_distr::Distribution<T>,
888    {
889        if self.surface.is_none() {
890            let headspace = self.driver.headspace().clone();
891            let surface = SurfaceNetwork::new(headspace).init();
892            self.surface = Some(surface);
893            self.prev_weight_changes = Some((
894                Array2::zeros((5, 3)), // Default size for primary weights
895                Array2::zeros((1, 5)), // Default size for secondary weights
896            ));
897        }
898        Ok(())
899    }
900    /// Integrate knowledge from another node
901    pub fn integrate_external_knowledge(
902        &mut self,
903        patterns: &[Vec<usize>],
904        source_id: &EdgeId,
905    ) -> crate::Result<()>
906    where
907        T: core::fmt::Display + core::str::FromStr,
908    {
909        // 1. Store patterns in topological memory
910        let mut adaptation_count = 0;
911        for pattern in patterns {
912            // Skip empty patterns
913            if pattern.is_empty() {
914                continue;
915            }
916
917            // For surface weights, integrate them into our surface network
918            if pattern.len() >= 3 && pattern[0] == 999 {
919                // This is a weight pattern, process it with adapt_surface_to_external_patterns
920                adaptation_count += 1;
921            } else {
922                // Regular pattern, store in memory
923                // Add source information
924                let mut content = Vec::with_capacity(pattern.len() + 2);
925                content.push(998); // Marker for external pattern
926                content.push(**source_id); // Source ID
927                content.extend_from_slice(pattern);
928
929                self.store.create_feature(2, content);
930            }
931        }
932
933        // 2. If we have enough surface patterns and a surface network, apply them
934        if adaptation_count > 0 && self.has_surface_network() {
935            // Filter out just the surface patterns
936            let surface_pattern_iter = patterns.iter().cloned().filter_map(|p| {
937                if p.len() >= 3 && p[0] == 999 {
938                    return Some(p);
939                }
940                None
941            });
942
943            self.adapt_surface_to_external_patterns(surface_pattern_iter)?;
944
945            // Update learning metrics
946            let current_accuracy = self.get_learning_accuracy();
947            self.store.set_property(
948                "learning_accuracy",
949                format!(
950                    "{}",
951                    (current_accuracy * T::from_f32(0.9).unwrap()
952                        + T::from_f32(0.6).unwrap() * T::from_f32(0.1).unwrap())
953                ),
954            );
955        }
956
957        Ok(())
958    }
959    /// Learn headspace based on its structure and critical points
960    pub fn learn_headspace(&mut self) -> crate::Result<()> {
961        let triad = self.driver().headspace();
962        let notes = triad.notes();
963
964        // Define relationships between notes in the triad
965        // (index1, index2, strength)
966        let mut relationships = Vec::new();
967
968        // Root-third relationship
969        let root_third_strength = match triad.class() {
970            Triads::Major => 0.8, // Strong in major
971            Triads::Minor => 0.7, // Slightly less strong in minor
972            _ => 0.6,
973        };
974        relationships.push((0, 1, T::from_f32(root_third_strength).unwrap()));
975
976        // Third-fifth relationship
977        let third_fifth_strength = match triad.class() {
978            Triads::Diminished => 0.6, // Weaker in diminished
979            _ => 0.75,
980        };
981        relationships.push((1, 2, T::from_f32(third_fifth_strength).unwrap()));
982
983        // Fifth-root relationship (complete the cycle)
984        let fifth_root_strength = match triad.class() {
985            Triads::Augmented => 0.65,  // Weaker in augmented
986            Triads::Diminished => 0.65, // Weaker in diminished
987            _ => 0.7,
988        };
989        relationships.push((2, 0, T::from_f32(fifth_root_strength).unwrap()));
990
991        // Learn these relationships
992        self.store
993            .learn_headspace_relationships(&notes, &relationships);
994
995        Ok(())
996    }
997    /// Learn a pattern from inputs within current headspace
998    pub fn learn_pattern<X, Y, Z>(
999        &mut self,
1000        inputs: &X,
1001        targets: &Y,
1002        iterations: usize,
1003    ) -> crate::Result<()>
1004    where
1005        SurfaceModel<T>: Train<X, Y, Output = Z>,
1006    {
1007        // Train surface network with ndarray types
1008        let surface = self
1009            .surface
1010            .as_mut()
1011            .ok_or(crate::ActorError::NotInitialized(
1012                "surface network hasn't been initialized yet...".to_string(),
1013            ))?;
1014        surface.train_for(inputs, targets, iterations)?;
1015        Ok(())
1016    }
1017    /// Learn stability characteristics specific to the current chord class
1018    pub fn learn_stability_characteristics(&mut self) -> crate::Result<()>
1019    where
1020        D: TriadDriver,
1021    {
1022        // Create patterns appropriate for the current chord class
1023        let _patterns = self.create_stability_patterns_for_class(self.class());
1024        todo!("update stability learning");
1025
1026        // // Extract inputs and targets for training
1027        // let mut training_inputs = Array2::zeros((patterns.len(), 3));
1028        // let mut training_targets = Array1::zeros(patterns.len());
1029        // for (i, (input, target)) in patterns.iter().enumerate() {
1030        //     training_inputs.row_mut(i).assign(&Array1::from_vec(input.to_vec()));
1031        //     training_targets[[i]] = *target;
1032        // }
1033        // // Learn the stability patterns with multiple iterations for reinforcement
1034        // if !training_inputs.is_empty() {
1035        //     self.learn_pattern(&training_inputs, &training_targets.insert_axis(Axis(1)), 200)?;
1036        // }
1037
1038        // // Record that this node has learned its own stability characteristics
1039        // // Note: This is useful for tracking which nodes have been trained
1040        // let chord_class_code = match self.class() {
1041        //     TriadClass::Major => 0,
1042        //     TriadClass::Minor => 1,
1043        //     TriadClass::Diminished => 2,
1044        //     TriadClass::Augmented => 3,
1045        // };
1046        // self.store
1047        //     .record_event("learned_stability", Some(vec![chord_class_code]));
1048
1049        // Ok(())
1050    }
1051    /// Learn stability patterns from other nodes
1052    pub fn learn_stability_patterns(
1053        &mut self,
1054        _patterns: &[StabilityPattern],
1055    ) -> crate::Result<()> {
1056        todo!("update pattern learning ");
1057    }
1058    /// Learn the surface defined by critical points
1059    pub fn learn_surface(&mut self) -> crate::Result<()> {
1060        let tonic = self.get_tonic();
1061
1062        // Learn the surface based on critical points
1063        self.store
1064            .learn_surface(&self.critical_points, tonic.class());
1065
1066        // Now additionally learn parameters between critical points
1067        self.interpolate_surface_between_critical_points();
1068
1069        Ok(())
1070    }
1071    /// Learn a transformation sequence
1072    pub fn learn_transformation_sequence(&mut self, transforms: &[LPR]) -> crate::Result<()> {
1073        // Convert LPR sequence to usize values
1074        let sequence: Vec<usize> = transforms.iter().map(|&lpr| lpr.into()).collect();
1075
1076        // Record in memory with medium importance
1077        self.store.record_pattern(&sequence);
1078
1079        Ok(())
1080    }
1081    /// Navigate to satisfy a specific critical point
1082    pub fn navigate_to_critical_point(&mut self, point_name: &str) -> crate::Result<Vec<LPR>>
1083    where
1084        D: TriadDriver,
1085    {
1086        // Get critical points
1087        let target_point = match self.critical_points.get(point_name) {
1088            Some(&point) => point,
1089            None => return Err(crate::ActorError::InvalidCriticalPoint),
1090        };
1091
1092        // Current position
1093        let _headspace = *self.driver().headspace();
1094
1095        // Try to find existing navigation path in memory
1096        let path = self.find_path_to_point(target_point);
1097
1098        if let Some(transforms) = path {
1099            // Execute the transforms
1100            self.transform_batch(&transforms)?;
1101
1102            // Learn from successful navigation
1103            // self.learn_from_navigation(current_triad.clone(), &transforms, target_point);
1104
1105            return Ok(transforms);
1106        }
1107
1108        // If no path found, use motion planning to get there
1109        let transform_path = self.plan_motion_to_point(target_point)?;
1110
1111        if !transform_path.is_empty() {
1112            // Execute the transforms
1113            self.transform_batch(&transform_path)?;
1114
1115            // Learn from this navigation
1116            // self.learn_from_navigation(current_triad.clone(), &transform_path, target_point);
1117            todo!("learn from navigation");
1118        }
1119
1120        Ok(transform_path)
1121    }
1122    /// Optimize memory usage with intelligent feature management
1123    pub fn optimize_memory(
1124        &mut self,
1125        max_features: usize,
1126    ) -> crate::Result<super::VirtualMemoryAnalysis<T>> {
1127        // Get memory statistics before optimization
1128        let stats_before = self.store.get_statistics();
1129
1130        // STEP 1: Identify knowledge importance by usage and recency
1131        // Calculate importance thresholds based on memory pressure
1132        let current_feature_count = self.total_features();
1133        let memory_pressure = (current_feature_count as f32) / (max_features as f32);
1134
1135        // Adjust pruning threshold based on memory pressure
1136        let importance_threshold = if memory_pressure > 1.5 {
1137            // High memory pressure - be more aggressive (keep only very important features)
1138            0.6
1139        } else if memory_pressure > 1.0 {
1140            // Moderate pressure - normal pruning
1141            0.4
1142        } else {
1143            // Low pressure - conservative pruning (just remove clearly unimportant features)
1144            0.2
1145        };
1146        let memory_pressure = T::from_f32(memory_pressure).unwrap();
1147        let importance_threshold = T::from_f32(importance_threshold).unwrap();
1148
1149        // STEP 2: Consolidate similar features to reduce redundancy
1150        let similarity_threshold = T::from_f32(0.75).unwrap(); // Features with 75% similarity can be merged
1151        self.consolidate_similar_patterns(similarity_threshold)?;
1152
1153        // STEP 3: Protect features related to critical points
1154        let mut protected_features = std::collections::HashSet::new();
1155
1156        // Protect features related to critical points in tonal space
1157        for &pitch in self.critical_points.values() {
1158            // Find features containing this pitch class
1159            for feature in self.store.features() {
1160                if feature.contains(&pitch) && feature.importance() >= &T::from_f32(0.3).unwrap() {
1161                    protected_features.insert(feature.id());
1162                }
1163            }
1164        }
1165
1166        // STEP 4: Prune low-importance features, respecting protected features
1167        let feature_ids_to_prune: Vec<_> = self
1168            .store
1169            .features()
1170            .iter()
1171            .filter(|f| {
1172                f.importance() < &importance_threshold
1173                    && !protected_features.contains(&f.id())
1174                    && f.death().is_none()
1175            })
1176            .map(|f| f.id())
1177            .collect();
1178        let pruned_count = feature_ids_to_prune.len();
1179        for id in feature_ids_to_prune {
1180            self.store.prune_feature(id);
1181        }
1182
1183        // STEP 5: Compact memory to remove dead features
1184        self.store.compact();
1185
1186        // STEP 6: Rebuild any specialized indices not covered by compact()
1187        // If we have any custom indexing needs:
1188        if current_feature_count > 1000 {
1189            // For large memory stores, ensure topological relationships are optimized
1190            self.store.rebuild_indices();
1191        }
1192
1193        // STEP 7: Optimize relationships between remaining features
1194        self.store.optimize();
1195
1196        // Get memory statistics after optimization
1197        let stats_after = self.store.get_statistics();
1198
1199        // Return statistics about the optimization process
1200        Ok(super::VirtualMemoryAnalysis {
1201            features_before: stats_before.total_features(),
1202            features_after: stats_after.total_features(),
1203            pruned_count,
1204            memory_pressure,
1205            importance_threshold,
1206        })
1207    }
1208    /// Predict the next likely transformation based on patterns in memory
1209    pub fn predict_next_transformation(&self) -> Option<LPR> {
1210        let discovery = self.store.find_by_dimension(1);
1211        // Get recent transformation features (dimension 1)
1212        let recent_transforms = discovery
1213            .iter()
1214            .filter(|f| f.is_alive())
1215            .collect::<Vec<_>>();
1216
1217        if recent_transforms.len() < 2 {
1218            return None;
1219        }
1220
1221        // Extract transformation types (LPR values)
1222        let transform_sequence = recent_transforms
1223            .iter()
1224            .rev() // Most recent first
1225            .take(5) // Look at last 5 transformations
1226            .filter_map(|f| {
1227                if f.content_len() > 6 {
1228                    // Last element is the transform type
1229                    Some(f[6])
1230                } else {
1231                    None
1232                }
1233            })
1234            .collect::<Vec<_>>();
1235
1236        // Find patterns matching this sequence
1237        let patterns = self.store.find_matching_patterns(&transform_sequence);
1238
1239        if patterns.is_empty() {
1240            return None;
1241        }
1242
1243        // Sort by importance
1244        let mut sorted_patterns = patterns.clone();
1245        sorted_patterns.sort_by(|a, b| {
1246            b.importance()
1247                .partial_cmp(a.importance())
1248                .unwrap_or(core::cmp::Ordering::Equal)
1249        });
1250
1251        // Get the most important pattern
1252        let best_pattern = &sorted_patterns[0];
1253
1254        // If the pattern suggests a next move
1255        if best_pattern.sequence_len() > transform_sequence.len() {
1256            // Get the predicted next move
1257            let next_move = best_pattern[transform_sequence.len()];
1258
1259            // Convert usize back to LPR
1260            match next_move {
1261                0 => Some(LPR::Leading),
1262                1 => Some(LPR::Parallel),
1263                2 => Some(LPR::Relative),
1264                _ => None,
1265            }
1266        } else {
1267            None
1268        }
1269    }
1270    /// Process a message from another node
1271    pub fn process_message(&mut self, source: EdgeId, message: &[u8]) -> crate::Result<()> {
1272        self.operator
1273            .active_operator_mut::<D>()
1274            .process_message(source, message, &mut self.store)
1275    }
1276    /// Process an input through the surface network to predict behavior
1277    pub fn process_surface<X, Y>(&self, input: &X) -> crate::Result<Y>
1278    where
1279        SurfaceModel<T>: Predict<X, Output = Y>,
1280    {
1281        // Ensure the surface network is initialized
1282
1283        if let Some(surface) = &self.surface {
1284            // Process the input through the surface network
1285            let output = surface.predict(input)?;
1286            Ok(output)
1287        } else {
1288            Err(crate::ActorError::NotInitialized(
1289                "Surface network not initialized".to_string(),
1290            ))
1291        }
1292    }
1293    /// Have the active operator propose a transformation (if supported)
1294    pub fn propose_transformation(&self) -> Option<LPR> {
1295        match &self.operator {
1296            Operator::Agent(agent) => {
1297                Some(agent.propose_transformation(&self.driver, &self.store)?)
1298            }
1299            _ => None, // Observer doesn't propose transformations
1300        }
1301    }
1302    /// Get the resource requirements for this node
1303    pub fn resource_requirements(&self) -> (usize, usize) {
1304        let (actor_mem, actor_compute) = self
1305            .operator()
1306            .active_operator::<D>()
1307            .resource_requirements();
1308
1309        // Add memory system requirements
1310        let memory_usage = self.store.count_features() * 10; // 10 units per feature
1311
1312        // Add plant requirements
1313        let plant_memory = 50; // Base memory for plant
1314
1315        // Total resources (memory, compute)
1316        (actor_mem + memory_usage + plant_memory, actor_compute)
1317    }
1318    /// Prune low-importance features
1319    pub fn prune_low_importance_features(&mut self, threshold: T) -> crate::Result<()> {
1320        // Get IDs of features to prune
1321        let to_prune: Vec<_> = self
1322            .store()
1323            .features()
1324            .iter()
1325            .filter_map(|f| {
1326                if f.death().is_none() && f.importance() < &threshold {
1327                    return Some(f.id());
1328                }
1329                None
1330            })
1331            .collect();
1332
1333        // Prune features
1334        for id in to_prune {
1335            self.store_mut().prune_feature(id);
1336        }
1337
1338        Ok(())
1339    }
1340    /// Set the name of the node
1341    pub fn set_name(&mut self, name: &str) {
1342        // Store name in memory as a property
1343        self.store.set_property("name", name);
1344    }
1345    /// Share patterns with another node
1346    pub fn share_patterns<E>(&self, target: &mut VNode<E, T>) -> crate::Result<()>
1347    where
1348        E: Driver<Triad>,
1349    {
1350        // Only share if both nodes allow it
1351        let self_allows = self
1352            .operator()
1353            .active_operator::<D>()
1354            .allows_pattern_sharing();
1355
1356        let target_allows = target
1357            .operator()
1358            .active_operator::<E>()
1359            .allows_pattern_sharing();
1360
1361        if !self_allows || !target_allows {
1362            return Ok(());
1363        }
1364
1365        // Get patterns from this node's memory
1366        for pattern in self.store.patterns() {
1367            // Only share significant patterns
1368            if pattern.importance() > &T::from_f32(0.5).unwrap() {
1369                target.store_mut().record_pattern_with_importance(
1370                    pattern.sequence(),
1371                    *pattern.importance() * T::from_f32(0.9).unwrap(), // Slight decay when sharing
1372                );
1373            }
1374        }
1375
1376        Ok(())
1377    }
1378    /// Train the surface network on a pattern
1379    #[cfg(not(feature = "rand"))]
1380    pub fn train_surface<X, Y, Z>(&mut self, inputs: &X, targets: &Y) -> crate::Result<Z>
1381    where
1382        T: core::fmt::Debug + Send + Sync + num_traits::Signed + rustfft::FftNum,
1383        SurfaceModel<T>: Train<X, Y, Output = Z>,
1384    {
1385        // Ensure the surface network is initialized
1386        self.ensure_surface_network()?;
1387
1388        if let Some(surface) = &mut self.surface {
1389            // Train the surface network with the provided inputs and targets
1390            let output = surface.train(inputs, targets)?;
1391            Ok(output)
1392        } else {
1393            Err(crate::ActorError::NotInitialized(
1394                "Surface network not initialized".to_string(),
1395            ))
1396        }
1397    }
1398    /// Train the surface network on a pattern
1399    #[cfg(feature = "rand")]
1400    pub fn train_surface<X, Y, Z>(&mut self, inputs: &X, targets: &Y) -> crate::Result<Z>
1401    where
1402        T: core::fmt::Debug + Send + Sync + num_traits::Signed + rustfft::FftNum,
1403        SurfaceModel<T>: Train<X, Y, Output = Z>,
1404        rand_distr::StandardNormal: rand_distr::Distribution<T>,
1405    {
1406        // Ensure the surface network is initialized
1407        self.ensure_surface_network()?;
1408
1409        if let Some(surface) = &mut self.surface {
1410            // Train the surface network with the provided inputs and targets
1411            let output = surface.train(inputs, targets)?;
1412            Ok(output)
1413        } else {
1414            Err(crate::ActorError::NotInitialized(
1415                "Surface network not initialized".to_string(),
1416            ))
1417        }
1418    }
1419    /// Apply a transformation to the plant and record in memory
1420    pub fn transform(&mut self, transform: LPR) -> crate::Result<()>
1421    where
1422        D: TriadDriver,
1423    {
1424        // First check if the operator accepts this transformation
1425        let accepted = match &mut self.operator {
1426            Operator::Observer(observer) => {
1427                observer.process_transform(transform, &mut self.driver, &mut self.store)?
1428            }
1429            Operator::Agent(agent) => {
1430                agent.process_transform(transform, &mut self.driver, &mut self.store)?
1431            }
1432        };
1433
1434        if !accepted {
1435            return Err(crate::ActorError::TransformationError);
1436        }
1437
1438        // Capture current state
1439        let from_notes = self.get_notes();
1440        let from_class = self.class();
1441
1442        // Apply transformation
1443        self.driver.transform(transform)?;
1444
1445        // Record transformation in memory
1446        self.store.record_transformation(
1447            from_notes,
1448            from_class,
1449            transform,
1450            self.get_notes(),
1451            self.class(),
1452        );
1453
1454        // Update tonic history
1455        let tonic = self.get_tonic();
1456        self.tonic_history.push(tonic.class());
1457
1458        // Advance time in memory
1459        self.store.next_epoch();
1460
1461        // Invalidate cached context
1462        self.last_context = None;
1463
1464        Ok(())
1465    }
1466    /// Efficiently apply multiple transformations in batch
1467    pub fn transform_batch(&mut self, transforms: &[LPR]) -> crate::Result<()>
1468    where
1469        D: TriadDriver,
1470    {
1471        // Pre-allocate with capacity instead of growing dynamically
1472        let mut transform_data = Vec::with_capacity(transforms.len());
1473        let mut tonic_history = Vec::with_capacity(transforms.len() + 1);
1474
1475        if transforms.is_empty() {
1476            return Ok(());
1477        }
1478
1479        // Check each transform with the operator
1480        for &transform in transforms {
1481            let accepted = match &mut self.operator {
1482                Operator::Observer(observer) => {
1483                    observer.process_transform(transform, &mut self.driver, &mut self.store)?
1484                }
1485                Operator::Agent(agent) => {
1486                    agent.process_transform(transform, &mut self.driver, &mut self.store)?
1487                }
1488            };
1489
1490            if !accepted {
1491                return Err(crate::ActorError::TransformationError);
1492            }
1493        }
1494
1495        // Collect state information before transforms
1496        let original_notes = self.get_notes();
1497        let original_class = self.class();
1498
1499        // Prepare batch data
1500        tonic_history.push(self.get_tonic());
1501
1502        // Current state tracking
1503        let mut current_notes = original_notes;
1504        let mut current_class = original_class;
1505        let mut current_triad = *self.driver().headspace();
1506
1507        // Process each transformation
1508        for &transform in transforms {
1509            // Apply the transformation to our tracking variables
1510            let next_triad = current_triad.transform(transform);
1511            let next_notes = next_triad.notes();
1512            let next_class = next_triad.class();
1513            let octave = next_triad.octave();
1514
1515            // Record transformation data
1516            transform_data.push((
1517                current_notes,
1518                current_class,
1519                transform,
1520                next_notes,
1521                next_class,
1522            ));
1523
1524            // Update current state
1525            current_notes = next_notes;
1526            current_class = next_class;
1527            current_triad = next_triad;
1528
1529            // Calculate tonic for the new state
1530            let root = Aspn::new(current_triad.root(), octave);
1531            tonic_history.push(root);
1532        }
1533
1534        // Actually apply all transformations to the plant
1535        self.driver_mut().walk(transforms)?;
1536
1537        // Record all transformations in memory at once
1538        self.store_mut()
1539            .record_transformations_batch(transform_data);
1540
1541        // Update tonic history
1542        self.tonic_history
1543            .extend(tonic_history.into_iter().skip(1).map(|t| t.class()));
1544
1545        // Advance time in memory
1546        self.store_mut().next_epoch();
1547
1548        // Invalidate cached context
1549        self.last_context = None;
1550
1551        Ok(())
1552    }
1553    /// Update the surface network when the headspace changes
1554    pub fn update_surface_headspace(&mut self) -> crate::Result<()> {
1555        let headspace = *self.driver().headspace();
1556        if let Some(surface) = &mut self.surface {
1557            surface.reconfigure(headspace);
1558        }
1559        Ok(())
1560    }
1561}
1562
1563impl<D, T> VNode<D, T>
1564where
1565    D: RawDriver<Triad>,
1566    T: Float + FromPrimitive + ScalarOperand,
1567{
1568    /// Find a path to a point using memory and learning
1569    pub(crate) fn find_path_to_point(&self, target_point: usize) -> Option<Vec<LPR>> {
1570        // Check for direct paths in memory
1571        let current_notes = self.driver().headspace().notes();
1572        // TODO: integrate with pre-existing methods to reduce the amount of excess code
1573        // let _path_to_target = self
1574        //     .driver()
1575        //     .headspace()
1576        //     .path_finder()
1577        //     .find_paths_to_target(target_point);
1578
1579        // Look for navigation features that lead to target
1580        let nav_features = self
1581            .store
1582            .features()
1583            .iter()
1584            .filter(|f| {
1585                f.dimension() == 2 && // Navigation features
1586                f.is_alive() &&
1587                f.content().len() > current_notes.len() &&
1588                f.content()[0..current_notes.len()] == current_notes &&
1589                f.content()[current_notes.len()..].contains(&target_point)
1590            })
1591            .collect::<Vec<_>>();
1592
1593        if nav_features.is_empty() {
1594            return None;
1595        }
1596
1597        // Sort by importance
1598        let mut sorted_nav = nav_features.clone();
1599        sorted_nav.sort_by(|a, b| {
1600            b.importance()
1601                .partial_cmp(a.importance())
1602                .unwrap_or(core::cmp::Ordering::Equal)
1603        });
1604
1605        // Extract the navigation path from the best feature
1606        let best_nav = &sorted_nav[0];
1607
1608        // Try to reconstruct the transform sequence
1609        // This is simplified - in a full implementation we'd need to store the actual transforms
1610        let path_hash = best_nav.content().last().cloned()?;
1611        let mut transforms = Vec::new();
1612        let mut hash = path_hash;
1613
1614        while hash > 0 {
1615            let transform_id = hash % 3;
1616            hash /= 3;
1617
1618            let transform = LPR::from(transform_id);
1619
1620            transforms.push(transform);
1621        }
1622
1623        transforms.reverse();
1624
1625        if transforms.is_empty() {
1626            None
1627        } else {
1628            Some(transforms)
1629        }
1630    }
1631    /// generate context for the node
1632    pub(crate) fn generate_context(&self) -> crate::Result<ActorContext<T>>
1633    where
1634        T: core::iter::Sum + NumAssign,
1635    {
1636        self.operator()
1637            .active_operator()
1638            .contextualize(&self.driver, &self.store)
1639    }
1640    /// Interpolate parameters between critical points
1641    pub(crate) fn interpolate_surface_between_critical_points(&mut self) {
1642        // For each pair of critical points, create intermediate parameters
1643        for (name_i, point_i) in &self.critical_points {
1644            for (name_j, point_j) in &self.critical_points {
1645                if name_i >= name_j {
1646                    continue;
1647                } // Skip duplicates
1648
1649                // Calculate distance between points (in pitch class space)
1650                let raw_dist = ((*point_j as isize) - (*point_i as isize)).pmod();
1651                let distance = core::cmp::min(raw_dist, 12 - raw_dist) as usize;
1652
1653                // Skip if too far apart
1654                if distance > 3 {
1655                    continue;
1656                }
1657
1658                // Create interpolated parameters
1659                for k in 1..distance {
1660                    // Calculate interpolated position
1661                    let position = (*point_i + k).pmod();
1662
1663                    // Calculate interpolated value
1664                    let t = k as f32 / distance as f32;
1665                    let value = 0.5 - (t - 0.5).abs(); // Peaked at midpoint
1666
1667                    // Store the interpolated parameter
1668                    self.store
1669                        .learn_surface_parameter(vec![position], T::from_f32(value).unwrap());
1670                }
1671            }
1672        }
1673    }
1674    /// Plan motion to a point using transformations
1675    pub(crate) fn plan_motion_to_point(&self, target_point: usize) -> crate::Result<Vec<LPR>> {
1676        let paths = self
1677            .driver()
1678            .headspace()
1679            .path_finder()
1680            .find_paths_to_target(target_point);
1681
1682        match paths.first() {
1683            Some(chain) => {
1684                // If we found a path, return it
1685                Ok(chain.path().clone())
1686            }
1687            None => {
1688                // If no path found, return an empty vector
1689                Err(crate::ActorError::no_path_found(
1690                    self.driver().headspace(),
1691                    target_point,
1692                ))
1693            }
1694        }
1695    }
1696
1697    // Learn from a successful navigation
1698    // fn learn_from_navigation(&mut self, start_triad: Triad, path: &[LPR], target_point: usize) {
1699    //     // Record the navigation path
1700    //     self.store
1701    //         .record_navigation(start_triad.notes(), self.get_notes(), path);
1702    //     // Calculate reward based on how close we got to target
1703    //     let distance = self.distance_to_point(target_point);
1704    //     // Get the driver to learn from this
1705    //     let driver = &mut self.plant;
1706    //     let reward = if distance == 0 {
1707    //         1.0 // Perfect match
1708    //     } else {
1709    //         T::from_f32(0.8).unwrap() - (distance as f32 * T::from_f32(0.1).unwrap()) // Diminishing reward with distance
1710    //     };
1711
1712    //     // Learn from this execution sequence
1713    //     driver.learn_from_execution(&mut self.store, path.len(), reward);
1714    // }
1715}