eryon 0.0.3

eryon is a harmonic, topological framework for managing computational entities at scale.
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
/*
    Appellation: cognitive_composer <example>
    Contrib: @FL03
*/
//! This example demonstrates a cognitive music composition system that:
//!     1. Uses a network of triads to create a musical space
//!     2. Learns patterns and transformations between triads
//!     3. Transfers knowledge between similar triadic contexts
//!     4. Predicts and generates novel musical progressions
//!     5. Demonstrates the emergent cognitive capabilities of the substrate
use eryon::prelude::{NeuralPlant, Runtime, SurfaceNetwork, TaskType, VNode};
use rstmt::prelude::{LPR, PitchMod, Triad, Triads};
use tracing::info;

#[allow(unused_variables)]
fn main() -> anyhow::Result<()> {
    // Initialize tracing for better logging
    tracing_subscriber::fmt()
        .with_max_level(tracing::Level::INFO)
        .init();

    info!("=== Cognitive Composer ===");
    info!("Initializing system...");

    // Create a runtime to manage our cognitive system
    let mut runtime = Runtime::<NeuralPlant>::new();

    // Step 1: Create a network of triads representing a musical space
    info!("Creating harmonic space with multiple triads...");

    // Create a set of foundational triads covering different harmonic functions
    let c_major_id = runtime.add_triad(Triad::major(0))?; // C major (tonic)
    let a_minor_id = runtime.add_triad(Triad::minor(9))?; // A minor (relative minor)
    let f_major_id = runtime.add_triad(Triad::major(5))?; // F major (subdominant)
    let g_major_id = runtime.add_triad(Triad::major(7))?; // G major (dominant)
    let d_minor_id = runtime.add_triad(Triad::minor(2))?; // D minor
    let e_minor_id = runtime.add_triad(Triad::minor(4))?; // E minor

    // Add some extended harmony chords
    let b_dim_id = runtime.add_triad(Triad::diminished(11))?; // B diminished
    let c_aug_id = runtime.add_triad(Triad::augmented(0))?; // C augmented

    println!("Created harmonic space with 8 triads.");

    // Step 2: Initialize surface networks for all nodes
    info!("Initializing surface networks...");
    runtime.init_node_surfaces()?;

    // Enhance surface networks with domain knowledge
    for node_id in &[
        c_major_id, a_minor_id, f_major_id, g_major_id, d_minor_id, e_minor_id, b_dim_id, c_aug_id,
    ] {
        if let Some(node) = runtime.fragment_mut().get_vnode_mut(node_id) {
            if let Some(surface) = node.surface_mut() {
                initialize_surface(surface);
            }
        }
    }

    // Step 3: Train each node on patterns specific to its triad class
    info!("Training nodes with class-specific patterns...");

    for node_id in &[c_major_id, a_minor_id, f_major_id, g_major_id] {
        if let Some(node) = runtime.fragment_mut().get_vnode_mut(node_id) {
            let class = node.class();
            let (inputs, targets) = generate_training_data(class);

            // Train the node
            // node.learn_pattern(&inputs, &targets, 500)?;
            // println!("Trained {} node with {} patterns", class, inputs.len());
        }
    }

    // Step 4: Establish patterns through repetition
    info!("Establishing transformation patterns...");

    // Create a common progression starting from C major
    let c_major_pattern = vec![LPR::Relative, LPR::Parallel, LPR::Relative];

    // Apply the pattern multiple times to establish it
    for _ in 0..3 {
        let batch_task = runtime.schedule_task(
            TaskType::BatchTransform(vec![(c_major_id, c_major_pattern.clone())]),
            8, // High priority
        );
        runtime.execute_task(batch_task)?;
    }

    // Create a different pattern starting from A minor
    let a_minor_pattern = vec![LPR::Parallel, LPR::Relative, LPR::Leading];

    // Apply the pattern multiple times to establish it
    for _ in 0..3 {
        let batch_task = runtime.schedule_task(
            TaskType::BatchTransform(vec![(a_minor_id, a_minor_pattern.clone())]),
            8, // High priority
        );
        runtime.execute_task(batch_task)?;
    }

    // Step 5: Detect patterns and optimize memory
    info!("Detecting and consolidating patterns...");

    // Schedule pattern detection
    // let pattern_task = runtime.schedule_task(
    //     TaskType::DetectPatterns,
    //     7, // Medium-high priority
    // );
    // runtime.execute_task(pattern_task)?;

    // Schedule memory optimization
    let optimize_task = runtime.schedule_task(
        TaskType::OptimizeMemory { max_features: 200 },
        6, // Medium priority
    );
    runtime.execute_task(optimize_task)?;

    // Step 6: Coordinate knowledge across nodes
    info!("Coordinating knowledge across nodes...");

    // Schedule knowledge coordination
    let coord_task = runtime.schedule_task(
        TaskType::CoordinateLearning,
        9, // High priority
    );
    runtime.execute_task(coord_task)?;

    // Step 7: Analyze current patterns and memory state
    info!("Analyzing memory state...");

    // Display memory analysis for key nodes
    if let Some(c_node) = runtime.fragment().get_vnode(&c_major_id) {
        println!("{}", analyze_memory(c_node));
    }

    if let Some(a_node) = runtime.fragment().get_vnode(&a_minor_id) {
        println!("{}", analyze_memory(a_node));
    }

    // Step 8: Generate novel progression based on learned patterns
    info!("Generating novel progressions based on learned patterns...");

    // Start from C major and generate a progression
    let current_id = c_major_id;
    let mut progression = Vec::new();

    // Add the starting triad
    if let Some(node) = runtime.get_vnode(&current_id) {
        progression.push(*node.headspace());
    }

    // Generate a sequence of transformations based on learned patterns
    for _ in 0..6 {
        if let Some(node) = runtime.get_vnode_mut(&current_id) {
            // Try to predict next transformation based on patterns
            if let Some(next_transform) = node.predict_next_transformation() {
                // Apply the predicted transformation
                let transform_task = runtime.schedule_task(
                    TaskType::transform(current_id, next_transform),
                    10, // Highest priority
                );
                runtime.execute_task(transform_task)?;

                // Add the resulting triad to our progression
                if let Some(updated_node) = runtime.fragment().get_vnode(&current_id) {
                    progression.push(*updated_node.headspace());
                }
            } else {
                // If no prediction available, use a reasonable default
                let default_transform = match node.class() {
                    Triads::Major => LPR::Relative,
                    Triads::Minor => LPR::Parallel,
                    Triads::Augmented => LPR::Leading,
                    Triads::Diminished => LPR::Relative,
                };

                let transform_task = runtime.schedule_task(
                    TaskType::transform(current_id, default_transform),
                    10, // Highest priority
                );
                runtime.execute_task(transform_task)?;

                // Add the resulting triad to our progression
                if let Some(updated_node) = runtime.fragment().get_vnode(&current_id) {
                    progression.push(*updated_node.headspace());
                }
            }
        }
    }

    // Visualize the generated progression
    info!("Generated progression based on learned patterns:");
    visualize_progression(&progression);

    // Step 9: Demonstrate cross-class learning
    info!("Demonstrating cross-class pattern adaptation...");

    // Transfer learning between major and minor contexts
    if let Some(c_node) = runtime.get_vnode_mut(&c_major_id) {
        println!("\nAdapting patterns from Minor to Major context...");
        c_node.adapt_stability_patterns(Triads::Minor)?;

        // Test how the adaptation affects prediction
        println!("Testing stability prediction after adaptation:");

        // Test with root emphasis (should be influenced by minor pattern)
        let _input = [0.7, 0.2, 0.1]; // Root emphasis
        // if let Some(stability) = c_node.process_surface(&input) {
        //     println!("  Root emphasis stability in C Major: {:.2}", stability);
        // }
    }
    // let target = runtime.find_node_mut(c_major_id);
    // let source = runtime.find_node(a_minor_id);
    // if let (Some(c_node), Some(a_node)) = (target, source) {
    //     println!("\nAdapting patterns from Minor to Major context...");
    //     c_node.adapt_stability_patterns(TriadClass::Minor)?;

    //     // Test how the adaptation affects prediction
    //     println!("Testing stability prediction after adaptation:");

    //     // Test with root emphasis (should be influenced by minor pattern)
    //     let input = [0.7, 0.2, 0.1]; // Root emphasis
    //     if let Some(stability) = c_node.process_surface(&input) {
    //         println!("  Root emphasis stability in C Major: {:.2}", stability);
    //     }
    // }

    // Step 10: Final analysis
    info!("Analyzing and reporting on the cognitive system...");

    // Display efficiency statistics
    println!("\n--- System Efficiency Statistics ---");
    println!("Total tasks processed: {}", runtime.completed_tasks().len());

    // Extract and display transformation frequency across chord classes
    if let Some(c_node) = runtime.fragment().get_vnode(&c_major_id) {
        let transform_stats = c_node.store().analyze_transformation_by_class();

        println!("\n--- Transformation Frequency Analysis ---");
        for (class, transforms) in transform_stats {
            println!("{:?}:", class);
            for (transform, freq) in transforms {
                if freq > 0.0 {
                    println!("  {:?}: {:.1}%", transform, freq * 100.0);
                }
            }
        }
    }

    info!("Cognitive Composer example completed successfully");
    Ok(())
}

/// Initialize a surface network with domain-specific knowledge
fn initialize_surface(surface: &mut SurfaceNetwork<f32>) {
    use ndarray::{array, s};
    // Set initial weights to represent tonal stability relationships
    let initial_weights = ndarray::array![
        // Primary weights (input → critical points)
        [0.8, 0.3, 0.2], // Root emphasis → stability
        [0.2, 0.7, 0.1], // Third emphasis → color/character
        [0.3, 0.1, 0.6], // Fifth emphasis → tension
        // Additional critical points as needed
        [0.5, 0.5, 0.2], // Balanced input → moderate stability
        [0.2, 0.2, 0.8], // Fifth emphasis → dominant function
    ];

    // Secondary weights (critical points → output)
    let secondary_weights = array![
        0.7, 0.3, -0.2, 0.4, 0.1, // Weights to tonic stability output
    ];
    surface
        .input_mut()
        .weights_mut()
        .slice_mut(s![0..5, 0..3])
        .assign(&initial_weights);

    surface
        .input_mut()
        .weights_mut()
        .slice_mut(s![0, 0..5])
        .assign(&secondary_weights);
}

/// Generate training data for a specific triad class
fn generate_training_data(triad_class: Triads) -> (Vec<[f32; 3]>, Vec<f32>) {
    let mut inputs = Vec::new();
    let mut targets = Vec::new();

    // Common patterns for stability across all triads
    inputs.push([0.8, 0.1, 0.1]); // Root emphasis - high stability
    targets.push(0.9);

    inputs.push([0.1, 0.8, 0.1]); // Third emphasis - moderate stability
    targets.push(0.6);

    inputs.push([0.1, 0.1, 0.8]); // Fifth emphasis - lower stability
    targets.push(0.4);

    // Class-specific patterns
    match triad_class {
        Triads::Major => {
            // Major triads have distinctive stability profiles
            inputs.push([0.6, 0.3, 0.1]); // Root-third emphasis
            targets.push(0.8);

            inputs.push([0.5, 0.1, 0.4]); // Root-fifth emphasis
            targets.push(0.7);
        }
        Triads::Minor => {
            // Minor triads have different stability characteristics
            inputs.push([0.6, 0.3, 0.1]); // Root-third emphasis
            targets.push(0.7);

            inputs.push([0.5, 0.1, 0.4]); // Root-fifth emphasis
            targets.push(0.6);
        }
        Triads::Augmented => {
            // Augmented triads are inherently less stable
            inputs.push([0.6, 0.3, 0.1]); // Root-third emphasis
            targets.push(0.3);

            inputs.push([0.5, 0.1, 0.4]); // Root-fifth emphasis
            targets.push(0.2);
        }
        Triads::Diminished => {
            // Diminished triads have unique tension properties
            inputs.push([0.6, 0.3, 0.1]); // Root-third emphasis
            targets.push(0.4);

            inputs.push([0.5, 0.1, 0.4]); // Root-fifth emphasis
            targets.push(0.3);
        }
    }

    (inputs, targets)
}

/// Create a progression of transformations that forms a coherent pattern
fn _create_transformation_pattern(start_from: &Triad) -> Vec<LPR> {
    // Create some common musical patterns like:
    // - Circle progression (R→P→R→P)
    // - Deceptive cadence (P→L)
    // - Modal mixture (P→P→R)

    match start_from.class() {
        Triads::Major => {
            // Major to relative minor to dominant to tonic (common progression)
            vec![LPR::Relative, LPR::Leading, LPR::Relative]
        }
        Triads::Minor => {
            // Minor to parallel major to subdominant (modal mixture)
            vec![LPR::Parallel, LPR::Relative, LPR::Parallel]
        }
        Triads::Augmented => {
            // Augmented triads often resolve inward
            vec![LPR::Leading, LPR::Parallel]
        }
        Triads::Diminished => {
            // Diminished triads often resolve outward
            vec![LPR::Relative, LPR::Leading]
        }
    }
}

/// Visualize a progression of triads
fn visualize_progression(triads: &[Triad]) {
    println!("\n┌─────────────────────────────────────┐");
    println!("│       Harmonic Progression          │");
    println!("├─────────┬──────────┬────────────────┤");
    println!("│  Class  │  Notes   │  Description   │");
    println!("├─────────┼──────────┼────────────────┤");

    for (i, triad) in triads.iter().enumerate() {
        let class_str = format!("{:?}", triad.class());
        let notes_str = format!("{:?}", triad.notes());

        // Generate a simple description
        let desc = match triad.class() {
            Triads::Major if i == 0 => "Start (tonic)",
            Triads::Major if i == triads.len() - 1 => "Resolution",
            Triads::Major => "Major harmony",
            Triads::Minor if triad.root() == (triads[0].root() + 9_usize).pmod() => {
                "Relative minor"
            }
            Triads::Minor => "Minor color",
            Triads::Augmented => "Tension",
            Triads::Diminished => "Passing chord",
        };

        println!("{:<7}{:<8} │ {:<14} │", class_str, notes_str, desc);
    }

    println!("└─────────┴──────────┴────────────────┘");
}

/// Analyze the memory state of a node
fn analyze_memory(node: &VNode<NeuralPlant>) -> String {
    let stats = node.get_memory_statistics();

    let mut output = String::new();
    output.push_str(&format!(
        "\n--- Memory Analysis for {:?} ---\n",
        node.class()
    ));
    output.push_str(&format!("Active features: {}\n", stats.active_features()));
    output.push_str(&format!("Total patterns: {}\n", stats.pattern_count()));
    output.push_str(&format!(
        "Relationship count: {}\n",
        stats.relationship_count()
    ));

    if !stats.most_common_patterns().is_empty() {
        output.push_str("\nCommon transformation patterns:\n");
        for (i, (pattern, occurrences)) in stats.most_common_patterns().iter().enumerate().take(3) {
            let pattern_str = pattern
                .iter()
                .map(|&id| match id {
                    0 => "L",
                    1 => "P",
                    2 => "R",
                    _ => "?",
                })
                .collect::<Vec<_>>()
                .join("");

            output.push_str(&format!(
                "  {}: {} (occurrences: {})\n",
                i + 1,
                pattern_str,
                occurrences
            ));
        }
    }

    output
}