Skip to main content

pc_rl_core/
serializer.rs

1// Author: Julian Bolivar
2// Version: 1.0.0
3// Date: 2026-03-25
4
5//! JSON-based weight persistence for the PC-Actor-Critic agent.
6//!
7//! Provides save/load for complete agent state (weights, config, metadata)
8//! and checkpoint support with auto-named files.
9//!
10//! Serialization always goes through CPU types (`CpuLinAlg`). Generic agents
11//! convert to/from CPU weights via `to_weights()` / `from_weights()`.
12
13use std::path::{Path, PathBuf};
14
15use chrono::Utc;
16use serde::{Deserialize, Serialize};
17
18use crate::error::PcError;
19use crate::layer::Layer;
20use crate::linalg::LinAlg;
21use crate::mlp_critic::MlpCritic;
22use crate::pc_actor::PcActor;
23use crate::pc_actor_critic::{PcActorCritic, PcActorCriticConfig};
24
25/// Metadata embedded in every save file.
26///
27/// Tracks version, creation timestamp, episode count, and optional
28/// training metrics for provenance.
29#[derive(Debug, Clone, Serialize, Deserialize)]
30pub struct AgentMetadata {
31    /// Crate version string.
32    pub version: String,
33    /// UTC timestamp of when the file was created.
34    pub created: String,
35    /// Episode number at time of save.
36    pub episode: usize,
37    /// Optional training statistics snapshot.
38    pub metrics: Option<TrainingMetrics>,
39}
40
41/// Training statistics snapshot for inclusion in save files.
42#[derive(Debug, Clone, Serialize, Deserialize)]
43pub struct TrainingMetrics {
44    /// Fraction of games won.
45    pub win_rate: f64,
46    /// Fraction of games lost.
47    pub loss_rate: f64,
48    /// Fraction of games drawn.
49    pub draw_rate: f64,
50    /// Average surprise score over recent episodes.
51    pub avg_surprise: f64,
52    /// Current curriculum depth level.
53    pub curriculum_depth: usize,
54}
55
56/// Serializable weight snapshot for the PC actor.
57#[derive(Debug, Clone, Serialize, Deserialize)]
58pub struct PcActorWeights {
59    /// Layer snapshots in order (hidden layers + output layer).
60    pub layers: Vec<Layer>,
61    /// ReZero scaling factors for residual skip connections.
62    #[serde(default)]
63    pub rezero_alpha: Vec<f64>,
64    /// Projection matrices for heterogeneous skip connections.
65    #[serde(default)]
66    pub skip_projections: Vec<Option<crate::matrix::Matrix>>,
67}
68
69/// Complete save file containing agent state and metadata.
70#[derive(Debug, Clone, Serialize, Deserialize)]
71pub struct SaveFile {
72    /// File metadata (version, timestamp, episode).
73    pub metadata: AgentMetadata,
74    /// Agent configuration.
75    pub config: PcActorCriticConfig,
76    /// Actor network weights.
77    pub actor_weights: PcActorWeights,
78    /// Critic network weights.
79    pub critic_weights: crate::mlp_critic::MlpCriticWeights,
80}
81
82/// Saves the agent's full state to a JSON file.
83///
84/// Creates parent directories if they don't exist. Extracts weights
85/// from both actor and critic via `to_weights()`, bundles with config
86/// and metadata, and writes as pretty-printed JSON.
87///
88/// # Arguments
89///
90/// * `agent` - The agent to save (any `LinAlg` backend).
91/// * `path` - File path for the JSON output.
92/// * `episode` - Current episode number.
93/// * `metrics` - Optional training metrics snapshot.
94///
95/// # Errors
96///
97/// Returns `PcError::Io` on file system errors, `PcError::Serialization`
98/// on JSON encoding errors.
99pub fn save_agent<L: LinAlg>(
100    agent: &PcActorCritic<L>,
101    path: &str,
102    episode: usize,
103    metrics: Option<TrainingMetrics>,
104) -> Result<(), PcError> {
105    let save_file = SaveFile {
106        metadata: AgentMetadata {
107            version: env!("CARGO_PKG_VERSION").to_string(),
108            created: Utc::now().to_rfc3339(),
109            episode,
110            metrics,
111        },
112        config: agent.config.clone(),
113        actor_weights: agent.actor.to_weights(),
114        critic_weights: agent.critic.to_weights(),
115    };
116
117    let json = serde_json::to_string_pretty(&save_file)?;
118
119    // Create parent directories if needed
120    let path = Path::new(path);
121    if let Some(parent) = path.parent() {
122        if !parent.as_os_str().is_empty() {
123            std::fs::create_dir_all(parent)?;
124        }
125    }
126
127    std::fs::write(path, json)?;
128    Ok(())
129}
130
131/// Loads an agent from a JSON save file (CPU backend).
132///
133/// Reads the file, deserializes the `SaveFile`, validates that the
134/// topology matches the config, then reconstructs the agent using
135/// `CpuLinAlg` (the default backend).
136///
137/// # Arguments
138///
139/// * `path` - Path to the JSON save file.
140///
141/// # Errors
142///
143/// Returns `PcError::Io` if the file doesn't exist, `PcError::Serialization`
144/// for invalid JSON, or `PcError::DimensionMismatch` if the saved weights
145/// don't match the config topology.
146pub fn load_agent(path: &str) -> Result<(PcActorCritic, AgentMetadata), PcError> {
147    let json = std::fs::read_to_string(path)?;
148    let save_file: SaveFile = serde_json::from_str(&json)?;
149
150    let actor = PcActor::from_weights(save_file.config.actor.clone(), save_file.actor_weights)?;
151    let critic =
152        MlpCritic::from_weights(save_file.config.critic.clone(), save_file.critic_weights)?;
153
154    use rand::SeedableRng;
155    let rng = rand::rngs::StdRng::from_entropy();
156
157    let agent = PcActorCritic::from_parts(save_file.config, actor, critic, rng);
158
159    Ok((agent, save_file.metadata))
160}
161
162/// Loads an agent from a JSON save file with a specific `LinAlg` backend.
163///
164/// Same as [`load_agent`] but reconstructs the agent using the specified
165/// backend type `L`. Weights are deserialized as CPU types and then
166/// converted via `PcActor::<L>::from_weights()` and
167/// `MlpCritic::<L>::from_weights()`.
168///
169/// # Arguments
170///
171/// * `path` - Path to the JSON save file.
172///
173/// # Errors
174///
175/// Returns `PcError::Io` if the file doesn't exist, `PcError::Serialization`
176/// for invalid JSON, or `PcError::DimensionMismatch` if the saved weights
177/// don't match the config topology.
178pub fn load_agent_generic<L: LinAlg>(
179    path: &str,
180) -> Result<(PcActorCritic<L>, AgentMetadata), PcError> {
181    let json = std::fs::read_to_string(path)?;
182    let save_file: SaveFile = serde_json::from_str(&json)?;
183
184    let actor =
185        PcActor::<L>::from_weights(save_file.config.actor.clone(), save_file.actor_weights)?;
186    let critic =
187        MlpCritic::<L>::from_weights(save_file.config.critic.clone(), save_file.critic_weights)?;
188
189    use rand::SeedableRng;
190    let rng = rand::rngs::StdRng::from_entropy();
191
192    let agent = PcActorCritic::from_parts(save_file.config, actor, critic, rng);
193
194    Ok((agent, save_file.metadata))
195}
196
197/// Generates a checkpoint filename with no colons (filesystem-safe).
198///
199/// Format: `checkpoint_ep{N}_{YYYYMMDD_HHMMSS}.json`
200///
201/// # Arguments
202///
203/// * `episode` - Episode number to embed in the filename.
204///
205/// # Examples
206///
207/// ```
208/// use pc_rl_core::serializer::checkpoint_filename;
209///
210/// let name = checkpoint_filename(100);
211/// assert!(name.starts_with("checkpoint_ep100_"));
212/// assert!(name.ends_with(".json"));
213/// assert!(!name.contains(':'));
214/// ```
215pub fn checkpoint_filename(episode: usize) -> String {
216    let now = Utc::now().format("%Y%m%d_%H%M%S");
217    format!("checkpoint_ep{episode}_{now}.json")
218}
219
220/// Saves a checkpoint to a directory with an auto-generated filename.
221///
222/// # Arguments
223///
224/// * `agent` - The agent to checkpoint (any `LinAlg` backend).
225/// * `dir` - Directory where the checkpoint file will be created.
226/// * `episode` - Current episode number.
227/// * `metrics` - Optional training metrics snapshot.
228///
229/// # Returns
230///
231/// The full path to the created checkpoint file.
232///
233/// # Errors
234///
235/// Returns `PcError` on I/O or serialization failures.
236pub fn save_checkpoint<L: LinAlg>(
237    agent: &PcActorCritic<L>,
238    dir: &str,
239    episode: usize,
240    metrics: Option<TrainingMetrics>,
241) -> Result<PathBuf, PcError> {
242    let filename = checkpoint_filename(episode);
243    let path = Path::new(dir).join(filename);
244    let path_str = path.to_string_lossy().to_string();
245    save_agent(agent, &path_str, episode, metrics)?;
246    Ok(path)
247}
248
249#[cfg(test)]
250mod tests {
251    use super::*;
252    use crate::activation::Activation;
253    use crate::layer::LayerDef;
254    use crate::mlp_critic::MlpCriticConfig;
255    use crate::pc_actor::PcActorConfig;
256    use std::fs;
257
258    fn default_config() -> PcActorCriticConfig {
259        PcActorCriticConfig {
260            actor: PcActorConfig {
261                input_size: 9,
262                hidden_layers: vec![LayerDef {
263                    size: 18,
264                    activation: Activation::Tanh,
265                }],
266                output_size: 9,
267                output_activation: Activation::Tanh,
268                alpha: 0.1,
269                tol: 0.01,
270                min_steps: 1,
271                max_steps: 20,
272                lr_weights: 0.01,
273                synchronous: true,
274                temperature: 1.0,
275                local_lambda: 1.0,
276                residual: false,
277                rezero_init: 0.001,
278            },
279            critic: MlpCriticConfig {
280                input_size: 27,
281                hidden_layers: vec![LayerDef {
282                    size: 36,
283                    activation: Activation::Tanh,
284                }],
285                output_activation: Activation::Linear,
286                lr: 0.005,
287            },
288            gamma: 0.95,
289            surprise_low: 0.02,
290            surprise_high: 0.15,
291            adaptive_surprise: false,
292            surprise_buffer_size: 100,
293            entropy_coeff: 0.01,
294        }
295    }
296
297    fn make_agent() -> PcActorCritic {
298        let agent: PcActorCritic = PcActorCritic::new(default_config(), 42).unwrap();
299        agent
300    }
301
302    fn temp_path(name: &str) -> String {
303        let dir = std::env::temp_dir().join("pc_core_tests");
304        fs::create_dir_all(&dir).unwrap();
305        dir.join(name).to_string_lossy().to_string()
306    }
307
308    /// Asserts two f64 slices are approximately equal (within 1e-15).
309    fn assert_vecs_approx_eq(a: &[f64], b: &[f64]) {
310        assert_eq!(
311            a.len(),
312            b.len(),
313            "Lengths differ: {} vs {}",
314            a.len(),
315            b.len()
316        );
317        for (i, (va, vb)) in a.iter().zip(b.iter()).enumerate() {
318            assert!((va - vb).abs() < 1e-15, "Element {i} differs: {va} vs {vb}");
319        }
320    }
321
322    #[test]
323    fn test_roundtrip_preserves_actor_weights() {
324        let agent = make_agent();
325        let path = temp_path("test_actor_roundtrip.json");
326        save_agent(&agent, &path, 10, None).unwrap();
327        let (loaded, _) = load_agent(&path).unwrap();
328        for (orig, loaded_layer) in agent.actor.layers.iter().zip(loaded.actor.layers.iter()) {
329            assert_vecs_approx_eq(&orig.weights.data, &loaded_layer.weights.data);
330            assert_vecs_approx_eq(&orig.bias, &loaded_layer.bias);
331        }
332        let _ = fs::remove_file(&path);
333    }
334
335    #[test]
336    fn test_roundtrip_preserves_critic_weights() {
337        let agent = make_agent();
338        let path = temp_path("test_critic_roundtrip.json");
339        save_agent(&agent, &path, 10, None).unwrap();
340        let (loaded, _) = load_agent(&path).unwrap();
341        for (orig, loaded_layer) in agent.critic.layers.iter().zip(loaded.critic.layers.iter()) {
342            assert_vecs_approx_eq(&orig.weights.data, &loaded_layer.weights.data);
343            assert_vecs_approx_eq(&orig.bias, &loaded_layer.bias);
344        }
345        let _ = fs::remove_file(&path);
346    }
347
348    #[test]
349    fn test_roundtrip_preserves_config() {
350        let agent = make_agent();
351        let path = temp_path("test_config_roundtrip.json");
352        save_agent(&agent, &path, 10, None).unwrap();
353        let (loaded, _) = load_agent(&path).unwrap();
354        assert_eq!(loaded.config.gamma, agent.config.gamma);
355        assert_eq!(
356            loaded.config.actor.input_size,
357            agent.config.actor.input_size
358        );
359        assert_eq!(
360            loaded.config.critic.input_size,
361            agent.config.critic.input_size
362        );
363        assert_eq!(loaded.config.entropy_coeff, agent.config.entropy_coeff);
364        let _ = fs::remove_file(&path);
365    }
366
367    #[test]
368    fn test_metadata_includes_version_and_episode() {
369        let agent = make_agent();
370        let path = temp_path("test_metadata.json");
371        save_agent(&agent, &path, 42, None).unwrap();
372        let (_, metadata) = load_agent(&path).unwrap();
373        assert!(!metadata.version.is_empty());
374        assert_eq!(metadata.episode, 42);
375        assert!(!metadata.created.is_empty());
376        let _ = fs::remove_file(&path);
377    }
378
379    #[test]
380    fn test_checkpoint_filename_no_colons() {
381        let name = checkpoint_filename(100);
382        assert!(!name.contains(':'), "Filename contains colons: {name}");
383    }
384
385    #[test]
386    fn test_checkpoint_filename_contains_episode_number() {
387        let name = checkpoint_filename(42);
388        assert!(
389            name.contains("ep42"),
390            "Filename doesn't contain episode number: {name}"
391        );
392        assert!(name.ends_with(".json"));
393    }
394
395    #[test]
396    fn test_load_nonexistent_returns_error() {
397        let result = load_agent("/nonexistent/path/agent.json");
398        assert!(result.is_err());
399        let err = result.err().unwrap();
400        assert!(
401            matches!(err, PcError::Io(_)),
402            "Expected PcError::Io, got: {err}"
403        );
404    }
405
406    #[test]
407    fn test_load_invalid_json_returns_error() {
408        let path = temp_path("test_invalid.json");
409        fs::write(&path, "not valid json {{{").unwrap();
410        let result = load_agent(&path);
411        assert!(result.is_err());
412        let err = result.err().unwrap();
413        assert!(
414            matches!(err, PcError::Serialization(_)),
415            "Expected PcError::Serialization, got: {err}"
416        );
417        let _ = fs::remove_file(&path);
418    }
419
420    #[test]
421    fn test_load_topology_mismatch_returns_error() {
422        let agent = make_agent();
423        let path = temp_path("test_mismatch.json");
424        save_agent(&agent, &path, 0, None).unwrap();
425
426        // Tamper: read JSON, change actor layer count in config
427        let json = fs::read_to_string(&path).unwrap();
428        let mut save_file: SaveFile = serde_json::from_str(&json).unwrap();
429        // Add an extra hidden layer to config (but not weights)
430        save_file.config.actor.hidden_layers.push(LayerDef {
431            size: 10,
432            activation: Activation::Relu,
433        });
434        let tampered = serde_json::to_string_pretty(&save_file).unwrap();
435        fs::write(&path, tampered).unwrap();
436
437        let result = load_agent(&path);
438        assert!(result.is_err());
439        let err = result.err().unwrap();
440        assert!(
441            matches!(err, PcError::DimensionMismatch { .. }),
442            "Expected PcError::DimensionMismatch, got: {err}"
443        );
444        let _ = fs::remove_file(&path);
445    }
446
447    #[test]
448    fn test_load_agent_uses_entropy_seed_not_fixed() {
449        let agent = make_agent();
450        let path = temp_path("test_seed_entropy.json");
451        save_agent(&agent, &path, 10, None).unwrap();
452
453        let (mut loaded1, _) = load_agent(&path).unwrap();
454        let (mut loaded2, _) = load_agent(&path).unwrap();
455
456        // Both agents should produce different action sequences
457        // because they use entropy-based RNG seeding
458        let input = vec![0.5; 9];
459        let valid: Vec<usize> = (0..9).collect();
460
461        let mut actions1 = Vec::new();
462        let mut actions2 = Vec::new();
463        for _ in 0..20 {
464            let (a1, _) = loaded1.act(&input, &valid, crate::pc_actor::SelectionMode::Training);
465            let (a2, _) = loaded2.act(&input, &valid, crate::pc_actor::SelectionMode::Training);
466            actions1.push(a1);
467            actions2.push(a2);
468        }
469
470        assert_ne!(
471            actions1, actions2,
472            "Two loaded agents should have different exploration due to entropy seeding"
473        );
474        let _ = fs::remove_file(&path);
475    }
476
477    #[test]
478    fn test_loaded_agent_produces_identical_inference() {
479        let agent = make_agent();
480        let path = temp_path("test_identical_infer.json");
481        save_agent(&agent, &path, 10, None).unwrap();
482        let (loaded, _) = load_agent(&path).unwrap();
483
484        let input = vec![0.5, -0.5, 1.0, -1.0, 0.0, 0.5, -0.5, 1.0, -1.0];
485        let orig_result = agent.infer(&input);
486        let loaded_result = loaded.infer(&input);
487
488        // y_conv must be identical
489        assert_eq!(orig_result.y_conv.len(), loaded_result.y_conv.len());
490        for (a, b) in orig_result.y_conv.iter().zip(loaded_result.y_conv.iter()) {
491            assert!((a - b).abs() < 1e-12, "y_conv differs: {a} vs {b}");
492        }
493        // latent_concat must be identical
494        for (a, b) in orig_result
495            .latent_concat
496            .iter()
497            .zip(loaded_result.latent_concat.iter())
498        {
499            assert!((a - b).abs() < 1e-12, "latent_concat differs: {a} vs {b}");
500        }
501        let _ = fs::remove_file(&path);
502    }
503
504    #[test]
505    fn test_save_creates_parent_directories() {
506        let dir = std::env::temp_dir()
507            .join("pc_core_tests")
508            .join("nested")
509            .join("deep");
510        let path = dir.join("agent.json").to_string_lossy().to_string();
511
512        // Remove if exists from prior run
513        let _ = fs::remove_dir_all(&dir);
514
515        let agent = make_agent();
516        save_agent(&agent, &path, 0, None).unwrap();
517        assert!(Path::new(&path).exists());
518
519        // Cleanup
520        let _ = fs::remove_dir_all(std::env::temp_dir().join("pc_core_tests").join("nested"));
521    }
522
523    #[test]
524    fn test_roundtrip_preserves_modified_rezero_alpha() {
525        use crate::pc_actor::SelectionMode;
526        let config = PcActorCriticConfig {
527            actor: PcActorConfig {
528                residual: true,
529                rezero_init: 0.005,
530                hidden_layers: vec![
531                    LayerDef {
532                        size: 27,
533                        activation: Activation::Tanh,
534                    },
535                    LayerDef {
536                        size: 27,
537                        activation: Activation::Tanh,
538                    },
539                ],
540                ..default_config().actor
541            },
542            critic: MlpCriticConfig {
543                input_size: 63,
544                ..default_config().critic
545            },
546            ..default_config()
547        };
548        let mut agent: PcActorCritic = PcActorCritic::new(config, 42).unwrap();
549        // Train one step to modify rezero_alpha
550        let input = vec![0.5; 9];
551        let valid: Vec<usize> = (0..9).collect();
552        let (action, infer) = agent.act(&input, &valid, SelectionMode::Training);
553        let trajectory = vec![crate::pc_actor_critic::TrajectoryStep {
554            input: input.clone(),
555            latent_concat: infer.latent_concat,
556            y_conv: infer.y_conv,
557            hidden_states: infer.hidden_states,
558            prediction_errors: infer.prediction_errors,
559            tanh_components: infer.tanh_components,
560            action,
561            valid_actions: valid,
562            reward: 1.0,
563            surprise_score: infer.surprise_score,
564            steps_used: infer.steps_used,
565        }];
566        agent.learn(&trajectory);
567        let alpha_after_train = agent.actor.rezero_alpha.clone();
568        // Alpha should have changed from init
569        assert_ne!(alpha_after_train, vec![0.005]);
570
571        let path = temp_path("test_rezero_roundtrip.json");
572        save_agent(&agent, &path, 10, None).unwrap();
573        let (loaded, _) = load_agent(&path).unwrap();
574        assert_eq!(
575            loaded.actor.rezero_alpha, alpha_after_train,
576            "Loaded rezero_alpha should match trained value, not rezero_init"
577        );
578        let _ = fs::remove_file(&path);
579    }
580
581    #[test]
582    fn test_roundtrip_non_residual_backward_compat() {
583        let agent = make_agent();
584        assert!(agent.actor.rezero_alpha.is_empty());
585
586        let path = temp_path("test_nonresidual_compat.json");
587        save_agent(&agent, &path, 10, None).unwrap();
588        let (loaded, _) = load_agent(&path).unwrap();
589        assert!(loaded.actor.rezero_alpha.is_empty());
590        let _ = fs::remove_file(&path);
591    }
592
593    #[test]
594    fn test_load_agent_generic_matches_load_agent() {
595        let agent = make_agent();
596        let path = temp_path("test_generic_load.json");
597        save_agent(&agent, &path, 10, None).unwrap();
598
599        let (loaded_default, _) = load_agent(&path).unwrap();
600        let (loaded_generic, _) =
601            load_agent_generic::<crate::linalg::cpu::CpuLinAlg>(&path).unwrap();
602
603        let input = vec![0.5, -0.5, 1.0, -1.0, 0.0, 0.5, -0.5, 1.0, -1.0];
604        let r1 = loaded_default.infer(&input);
605        let r2 = loaded_generic.infer(&input);
606
607        for (a, b) in r1.y_conv.iter().zip(r2.y_conv.iter()) {
608            assert!((a - b).abs() < 1e-15, "y_conv differs: {a} vs {b}");
609        }
610        let _ = fs::remove_file(&path);
611    }
612
613    #[test]
614    fn test_roundtrip_preserves_skip_projections_directly() {
615        use crate::pc_actor::SelectionMode;
616        let config = PcActorCriticConfig {
617            actor: PcActorConfig {
618                residual: true,
619                rezero_init: 0.005,
620                hidden_layers: vec![
621                    LayerDef {
622                        size: 27,
623                        activation: Activation::Tanh,
624                    },
625                    LayerDef {
626                        size: 18,
627                        activation: Activation::Tanh,
628                    },
629                ],
630                ..default_config().actor
631            },
632            critic: MlpCriticConfig {
633                input_size: 54,
634                ..default_config().critic
635            },
636            ..default_config()
637        };
638        let mut agent: PcActorCritic = PcActorCritic::new(config, 42).unwrap();
639        // Train to modify projection weights
640        let input = vec![0.5; 9];
641        let valid: Vec<usize> = (0..9).collect();
642        let (action, infer) = agent.act(&input, &valid, SelectionMode::Training);
643        let trajectory = vec![crate::pc_actor_critic::TrajectoryStep {
644            input: input.clone(),
645            latent_concat: infer.latent_concat,
646            y_conv: infer.y_conv,
647            hidden_states: infer.hidden_states,
648            prediction_errors: infer.prediction_errors,
649            tanh_components: infer.tanh_components,
650            action,
651            valid_actions: valid,
652            reward: 1.0,
653            surprise_score: infer.surprise_score,
654            steps_used: infer.steps_used,
655        }];
656        agent.learn(&trajectory);
657
658        // Verify projection exists (27→18 requires projection)
659        assert!(agent.actor.skip_projections[0].is_some());
660        let orig_proj = agent.actor.skip_projections[0].as_ref().unwrap();
661        let orig_data = orig_proj.data.clone();
662
663        let path = temp_path("test_skip_proj_roundtrip.json");
664        save_agent(&agent, &path, 10, None).unwrap();
665        let (loaded, _) = load_agent(&path).unwrap();
666
667        let loaded_proj = loaded.actor.skip_projections[0].as_ref().unwrap();
668        assert_eq!(orig_data.len(), loaded_proj.data.len());
669        for (i, (a, b)) in orig_data.iter().zip(loaded_proj.data.iter()).enumerate() {
670            assert!(
671                (a - b).abs() < 1e-15,
672                "skip_projection element {i} differs: {a} vs {b}"
673            );
674        }
675        let _ = fs::remove_file(&path);
676    }
677}