1pub mod ann;
67pub mod auto_discovery;
68pub mod features;
69pub mod gpu_acceleration;
70pub mod license;
71pub mod lichess_loader;
72pub mod lsh;
73pub mod manifold_learner;
74pub mod nnue;
75pub mod opening_book;
76pub mod persistence;
77pub mod position_encoder;
78pub mod similarity_search;
79pub mod streaming_loader;
80pub mod tactical_search;
81pub mod training;
82pub mod ultra_fast_loader;
83pub mod variational_autoencoder;
84pub mod uci;
86
87pub use auto_discovery::{AutoDiscovery, FormatPriority, TrainingFile};
88pub use features::{FeatureChecker, FeatureError, FeatureRegistry, FeatureTier};
89pub use gpu_acceleration::{DeviceType, GPUAccelerator};
90pub use license::{
91 LicenseError, LicenseKey, LicenseStatus, LicenseVerifier, LicensedFeatureChecker,
92};
93pub use lichess_loader::{load_lichess_puzzles_basic, load_lichess_puzzles_premium, LichessLoader};
94pub use lsh::LSH;
95pub use manifold_learner::ManifoldLearner;
96pub use nnue::{BlendStrategy, EvalStats, HybridEvaluator, NNUEConfig, NNUE};
97pub use opening_book::{OpeningBook, OpeningBookStats, OpeningEntry};
98pub use persistence::{Database, LSHTableData, PositionData};
99pub use position_encoder::PositionEncoder;
100pub use similarity_search::SimilaritySearch;
101pub use streaming_loader::StreamingLoader;
102pub use tactical_search::{TacticalConfig, TacticalResult, TacticalSearch};
103pub use training::{
104 EngineEvaluator, GameExtractor, SelfPlayConfig, SelfPlayTrainer, TacticalPuzzle,
105 TacticalPuzzleParser, TacticalTrainingData, TrainingData, TrainingDataset,
106};
107pub use ultra_fast_loader::{LoadingStats, UltraFastLoader};
108pub use variational_autoencoder::{VAEConfig, VariationalAutoencoder};
109pub use uci::{run_uci_engine, run_uci_engine_with_config, UCIConfig, UCIEngine};
111
112use chess::{Board, ChessMove};
113use ndarray::{Array1, Array2};
114use serde_json::Value;
115use std::collections::HashMap;
116use std::path::Path;
117use std::str::FromStr;
118
119fn move_centrality(chess_move: &ChessMove) -> f32 {
122 let dest_square = chess_move.get_dest();
123 let rank = dest_square.get_rank().to_index() as f32;
124 let file = dest_square.get_file().to_index() as f32;
125
126 let center_rank = 3.5;
128 let center_file = 3.5;
129
130 let rank_distance = (rank - center_rank).abs();
131 let file_distance = (file - center_file).abs();
132
133 let max_distance = 3.5; let distance = (rank_distance + file_distance) / 2.0;
136 max_distance - distance
137}
138
139#[derive(Debug, Clone)]
141pub struct MoveRecommendation {
142 pub chess_move: ChessMove,
143 pub confidence: f32,
144 pub from_similar_position_count: usize,
145 pub average_outcome: f32,
146}
147
148#[derive(Debug, Clone)]
150pub struct TrainingStats {
151 pub total_positions: usize,
152 pub unique_positions: usize,
153 pub has_move_data: bool,
154 pub move_data_entries: usize,
155 pub lsh_enabled: bool,
156 pub manifold_enabled: bool,
157 pub opening_book_enabled: bool,
158}
159
160#[derive(Debug, Clone)]
162pub struct HybridConfig {
163 pub pattern_confidence_threshold: f32,
165 pub enable_tactical_refinement: bool,
167 pub tactical_config: TacticalConfig,
169 pub pattern_weight: f32,
171 pub min_similar_positions: usize,
173}
174
175impl Default for HybridConfig {
176 fn default() -> Self {
177 Self {
178 pattern_confidence_threshold: 0.8,
179 enable_tactical_refinement: true,
180 tactical_config: TacticalConfig::default(),
181 pattern_weight: 0.7, min_similar_positions: 3,
183 }
184 }
185}
186
187pub struct ChessVectorEngine {
241 encoder: PositionEncoder,
242 similarity_search: SimilaritySearch,
243 lsh_index: Option<LSH>,
244 manifold_learner: Option<ManifoldLearner>,
245 use_lsh: bool,
246 use_manifold: bool,
247 position_moves: HashMap<usize, Vec<(ChessMove, f32)>>,
249 manifold_similarity_search: Option<SimilaritySearch>,
251 manifold_lsh_index: Option<LSH>,
253 feature_checker: FeatureChecker,
255 licensed_feature_checker: Option<LicensedFeatureChecker>,
257 position_vectors: Vec<Array1<f32>>,
259 position_boards: Vec<Board>,
261 position_evaluations: Vec<f32>,
263 opening_book: Option<OpeningBook>,
265 database: Option<Database>,
267 tactical_search: Option<TacticalSearch>,
269 hybrid_config: HybridConfig,
273}
274
275impl Clone for ChessVectorEngine {
276 fn clone(&self) -> Self {
277 Self {
278 encoder: self.encoder.clone(),
279 similarity_search: self.similarity_search.clone(),
280 lsh_index: self.lsh_index.clone(),
281 manifold_learner: None, use_lsh: self.use_lsh,
283 use_manifold: false, position_moves: self.position_moves.clone(),
285 manifold_similarity_search: self.manifold_similarity_search.clone(),
286 manifold_lsh_index: self.manifold_lsh_index.clone(),
287 feature_checker: self.feature_checker.clone(),
288 licensed_feature_checker: None, position_vectors: self.position_vectors.clone(),
290 position_boards: self.position_boards.clone(),
291 position_evaluations: self.position_evaluations.clone(),
292 opening_book: self.opening_book.clone(),
293 database: None, tactical_search: self.tactical_search.clone(),
295 hybrid_config: self.hybrid_config.clone(),
297 }
298 }
299}
300
301impl ChessVectorEngine {
302 pub fn new(vector_size: usize) -> Self {
304 Self {
305 encoder: PositionEncoder::new(vector_size),
306 similarity_search: SimilaritySearch::new(vector_size),
307 lsh_index: None,
308 manifold_learner: None,
309 use_lsh: false,
310 use_manifold: false,
311 position_moves: HashMap::new(),
312 manifold_similarity_search: None,
313 manifold_lsh_index: None,
314 feature_checker: FeatureChecker::new(FeatureTier::OpenSource), licensed_feature_checker: None,
316 position_vectors: Vec::new(),
317 position_boards: Vec::new(),
318 position_evaluations: Vec::new(),
319 opening_book: None,
320 database: None,
321 tactical_search: None,
322 hybrid_config: HybridConfig::default(),
324 }
325 }
326
327 pub fn new_with_tier(vector_size: usize, tier: FeatureTier) -> Self {
329 let mut engine = Self::new(vector_size);
330 engine.feature_checker = FeatureChecker::new(tier);
331 engine
332 }
333
334 pub fn get_feature_tier(&self) -> &FeatureTier {
336 self.feature_checker.get_current_tier()
337 }
338
339 pub fn upgrade_tier(&mut self, new_tier: FeatureTier) {
341 self.feature_checker.upgrade_tier(new_tier);
342 }
343
344 pub fn is_feature_available(&self, feature: &str) -> bool {
346 self.feature_checker.check_feature(feature).is_ok()
347 }
348
349 pub fn require_feature(&self, feature: &str) -> Result<(), FeatureError> {
351 self.feature_checker.require_feature(feature)
352 }
353
354 pub fn new_adaptive(vector_size: usize, expected_positions: usize, use_case: &str) -> Self {
357 match use_case {
358 "training" => {
359 if expected_positions > 10000 {
360 Self::new_with_lsh(vector_size, 12, 20)
362 } else {
363 Self::new(vector_size)
364 }
365 }
366 "gameplay" => {
367 if expected_positions > 15000 {
368 Self::new_with_lsh(vector_size, 10, 18)
370 } else {
371 Self::new(vector_size)
372 }
373 }
374 "analysis" => {
375 if expected_positions > 10000 {
376 Self::new_with_lsh(vector_size, 14, 22)
378 } else {
379 Self::new(vector_size)
380 }
381 }
382 _ => Self::new(vector_size), }
384 }
385
386 pub fn new_with_lsh(vector_size: usize, num_tables: usize, hash_size: usize) -> Self {
388 Self {
389 encoder: PositionEncoder::new(vector_size),
390 similarity_search: SimilaritySearch::new(vector_size),
391 lsh_index: Some(LSH::new(vector_size, num_tables, hash_size)),
392 manifold_learner: None,
393 use_lsh: true,
394 use_manifold: false,
395 position_moves: HashMap::new(),
396 manifold_similarity_search: None,
397 manifold_lsh_index: None,
398 feature_checker: FeatureChecker::new(FeatureTier::OpenSource),
399 licensed_feature_checker: None,
400 position_vectors: Vec::new(),
401 position_boards: Vec::new(),
402 position_evaluations: Vec::new(),
403 opening_book: None,
404 database: None,
405 tactical_search: None,
406 hybrid_config: HybridConfig::default(),
408 }
409 }
410
411 pub fn enable_lsh(&mut self, num_tables: usize, hash_size: usize) {
413 self.lsh_index = Some(LSH::new(self.encoder.vector_size(), num_tables, hash_size));
414 self.use_lsh = true;
415
416 if let Some(ref mut lsh) = self.lsh_index {
418 for (vector, evaluation) in self.similarity_search.get_all_positions() {
419 lsh.add_vector(vector, evaluation);
420 }
421 }
422 }
423
424 pub fn add_position(&mut self, board: &Board, evaluation: f32) {
426 if !self.is_position_safe(board) {
428 return; }
430
431 let vector = self.encoder.encode(board);
432 self.similarity_search
433 .add_position(vector.clone(), evaluation);
434
435 self.position_vectors.push(vector.clone());
437 self.position_boards.push(*board);
438 self.position_evaluations.push(evaluation);
439
440 if let Some(ref mut lsh) = self.lsh_index {
442 lsh.add_vector(vector.clone(), evaluation);
443 }
444
445 if self.use_manifold {
447 if let Some(ref learner) = self.manifold_learner {
448 let compressed = learner.encode(&vector);
449
450 if let Some(ref mut search) = self.manifold_similarity_search {
451 search.add_position(compressed.clone(), evaluation);
452 }
453
454 if let Some(ref mut lsh) = self.manifold_lsh_index {
455 lsh.add_vector(compressed, evaluation);
456 }
457 }
458 }
459 }
460
461 pub fn find_similar_positions(&self, board: &Board, k: usize) -> Vec<(Array1<f32>, f32, f32)> {
463 let query_vector = self.encoder.encode(board);
464
465 if self.use_manifold {
467 if let Some(ref manifold_learner) = self.manifold_learner {
468 let compressed_query = manifold_learner.encode(&query_vector);
469
470 if let Some(ref lsh) = self.manifold_lsh_index {
472 return lsh.query(&compressed_query, k);
473 }
474
475 if let Some(ref search) = self.manifold_similarity_search {
477 return search.search(&compressed_query, k);
478 }
479 }
480 }
481
482 if self.use_lsh {
484 if let Some(ref lsh_index) = self.lsh_index {
485 return lsh_index.query(&query_vector, k);
486 }
487 }
488
489 self.similarity_search.search(&query_vector, k)
491 }
492
493 pub fn find_similar_positions_with_indices(
495 &self,
496 board: &Board,
497 k: usize,
498 ) -> Vec<(usize, f32, f32)> {
499 let query_vector = self.encoder.encode(board);
500
501 let mut results = Vec::new();
504
505 for (i, stored_vector) in self.position_vectors.iter().enumerate() {
506 let similarity = self.encoder.similarity(&query_vector, stored_vector);
507 let eval = self.position_evaluations.get(i).copied().unwrap_or(0.0);
508 results.push((i, eval, similarity));
509 }
510
511 results.sort_by(|a, b| b.2.partial_cmp(&a.2).unwrap_or(std::cmp::Ordering::Equal));
513 results.truncate(k);
514
515 results
516 }
517
518 pub fn evaluate_position(&mut self, board: &Board) -> Option<f32> {
520 if let Some(entry) = self.get_opening_entry(board) {
529 return Some(entry.evaluation);
530 }
531
532 let similar_positions = self.find_similar_positions(board, 5);
534
535 if similar_positions.is_empty() {
536 if let Some(ref mut tactical_search) = self.tactical_search {
538 let result = tactical_search.search(board);
539 return Some(result.evaluation);
540 }
541 return None;
542 }
543
544 let mut weighted_sum = 0.0;
546 let mut weight_sum = 0.0;
547 let mut similarity_scores = Vec::new();
548
549 for (_, evaluation, similarity) in &similar_positions {
550 let weight = *similarity;
551 weighted_sum += evaluation * weight;
552 weight_sum += weight;
553 similarity_scores.push(*similarity);
554 }
555
556 let pattern_evaluation = weighted_sum / weight_sum;
557
558 let avg_similarity = similarity_scores.iter().sum::<f32>() / similarity_scores.len() as f32;
560 let count_factor = (similar_positions.len() as f32
561 / self.hybrid_config.min_similar_positions as f32)
562 .min(1.0);
563 let pattern_confidence = avg_similarity * count_factor;
564
565 let use_tactical = self.hybrid_config.enable_tactical_refinement
567 && pattern_confidence < self.hybrid_config.pattern_confidence_threshold
568 && self.tactical_search.is_some();
569
570 if use_tactical {
571 if let Some(ref mut tactical_search) = self.tactical_search {
573 let tactical_result = if tactical_search.config.enable_parallel_search {
574 tactical_search.search_parallel(board)
575 } else {
576 tactical_search.search(board)
577 };
578
579 let pattern_weight = self.hybrid_config.pattern_weight * pattern_confidence;
581 let tactical_weight = 1.0 - pattern_weight;
582
583 let hybrid_evaluation = (pattern_evaluation * pattern_weight)
584 + (tactical_result.evaluation * tactical_weight);
585
586 Some(hybrid_evaluation)
587 } else {
588 Some(pattern_evaluation)
590 }
591 } else {
592 Some(pattern_evaluation)
594 }
595 }
596
597 pub fn encode_position(&self, board: &Board) -> Array1<f32> {
599 self.encoder.encode(board)
600 }
601
602 pub fn calculate_similarity(&self, board1: &Board, board2: &Board) -> f32 {
604 let vec1 = self.encoder.encode(board1);
605 let vec2 = self.encoder.encode(board2);
606 self.encoder.similarity(&vec1, &vec2)
607 }
608
609 pub fn knowledge_base_size(&self) -> usize {
611 self.similarity_search.size()
612 }
613
614 pub fn save_training_data<P: AsRef<std::path::Path>>(
616 &self,
617 path: P,
618 ) -> Result<(), Box<dyn std::error::Error>> {
619 use crate::training::{TrainingData, TrainingDataset};
620
621 let mut dataset = TrainingDataset::new();
622
623 for (i, board) in self.position_boards.iter().enumerate() {
625 if i < self.position_evaluations.len() {
626 dataset.data.push(TrainingData {
627 board: *board,
628 evaluation: self.position_evaluations[i],
629 depth: 15, game_id: i, });
632 }
633 }
634
635 dataset.save_incremental(path)?;
636 println!("Saved {} positions to training data", dataset.data.len());
637 Ok(())
638 }
639
640 pub fn load_training_data_incremental<P: AsRef<std::path::Path>>(
642 &mut self,
643 path: P,
644 ) -> Result<(), Box<dyn std::error::Error>> {
645 use crate::training::TrainingDataset;
646 use indicatif::{ProgressBar, ProgressStyle};
647 use std::collections::HashSet;
648
649 let existing_size = self.knowledge_base_size();
650
651 let path_ref = path.as_ref();
653 let binary_path = path_ref.with_extension("bin");
654 if binary_path.exists() {
655 println!("š Loading optimized binary format...");
656 return self.load_training_data_binary(binary_path);
657 }
658
659 println!("š Loading training data from {}...", path_ref.display());
660 let dataset = TrainingDataset::load(path)?;
661
662 let total_positions = dataset.data.len();
663 if total_positions == 0 {
664 println!("ā ļø No positions found in dataset");
665 return Ok(());
666 }
667
668 let dedup_pb = ProgressBar::new(total_positions as u64);
670 dedup_pb.set_style(
671 ProgressStyle::default_bar()
672 .template("š Checking duplicates [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({percent}%) {msg}")?
673 .progress_chars("āāā")
674 );
675
676 let mut existing_boards: HashSet<_> = self.position_boards.iter().cloned().collect();
678 let mut new_positions = Vec::new();
679 let mut new_evaluations = Vec::new();
680
681 for (i, data) in dataset.data.into_iter().enumerate() {
683 if !existing_boards.contains(&data.board) {
684 existing_boards.insert(data.board);
685 new_positions.push(data.board);
686 new_evaluations.push(data.evaluation);
687 }
688
689 if i % 1000 == 0 || i == total_positions - 1 {
690 dedup_pb.set_position((i + 1) as u64);
691 dedup_pb.set_message(format!("{} new positions found", new_positions.len()));
692 }
693 }
694 dedup_pb.finish_with_message(format!("ā
Found {} new positions", new_positions.len()));
695
696 if new_positions.is_empty() {
697 println!("ā¹ļø No new positions to add (all positions already exist)");
698 return Ok(());
699 }
700
701 let add_pb = ProgressBar::new(new_positions.len() as u64);
703 add_pb.set_style(
704 ProgressStyle::default_bar()
705 .template("ā Adding positions [{elapsed_precise}] [{bar:40.green/blue}] {pos}/{len} ({percent}%) {msg}")?
706 .progress_chars("āāā")
707 );
708
709 for (i, (board, evaluation)) in new_positions
711 .into_iter()
712 .zip(new_evaluations.into_iter())
713 .enumerate()
714 {
715 self.add_position(&board, evaluation);
716
717 if i % 500 == 0 || i == add_pb.length().unwrap() as usize - 1 {
718 add_pb.set_position((i + 1) as u64);
719 add_pb.set_message("vectors encoded".to_string());
720 }
721 }
722 add_pb.finish_with_message("ā
All positions added");
723
724 println!(
725 "šÆ Loaded {} new positions (total: {})",
726 self.knowledge_base_size() - existing_size,
727 self.knowledge_base_size()
728 );
729 Ok(())
730 }
731
732 pub fn save_training_data_binary<P: AsRef<std::path::Path>>(
734 &self,
735 path: P,
736 ) -> Result<(), Box<dyn std::error::Error>> {
737 use lz4_flex::compress_prepend_size;
738
739 println!("š¾ Saving training data in binary format (compressed)...");
740
741 #[derive(serde::Serialize)]
743 struct BinaryTrainingData {
744 positions: Vec<String>, evaluations: Vec<f32>,
746 vectors: Vec<Vec<f32>>, created_at: i64,
748 }
749
750 let current_time = std::time::SystemTime::now()
751 .duration_since(std::time::UNIX_EPOCH)?
752 .as_secs() as i64;
753
754 let mut positions = Vec::with_capacity(self.position_boards.len());
756 let mut evaluations = Vec::with_capacity(self.position_boards.len());
757 let mut vectors = Vec::with_capacity(self.position_boards.len());
758
759 for (i, board) in self.position_boards.iter().enumerate() {
760 if i < self.position_evaluations.len() {
761 positions.push(board.to_string());
762 evaluations.push(self.position_evaluations[i]);
763
764 if i < self.position_vectors.len() {
766 if let Some(vector_slice) = self.position_vectors[i].as_slice() {
767 vectors.push(vector_slice.to_vec());
768 }
769 }
770 }
771 }
772
773 let binary_data = BinaryTrainingData {
774 positions,
775 evaluations,
776 vectors,
777 created_at: current_time,
778 };
779
780 let serialized = bincode::serialize(&binary_data)?;
782
783 let compressed = compress_prepend_size(&serialized);
785
786 std::fs::write(path, &compressed)?;
788
789 println!(
790 "ā
Saved {} positions to binary file ({} bytes compressed)",
791 binary_data.positions.len(),
792 compressed.len()
793 );
794 Ok(())
795 }
796
797 pub fn load_training_data_binary<P: AsRef<std::path::Path>>(
799 &mut self,
800 path: P,
801 ) -> Result<(), Box<dyn std::error::Error>> {
802 use indicatif::{ProgressBar, ProgressStyle};
803 use lz4_flex::decompress_size_prepended;
804
805 println!("š Loading training data from binary format...");
806
807 #[derive(serde::Deserialize)]
808 struct BinaryTrainingData {
809 positions: Vec<String>,
810 evaluations: Vec<f32>,
811 #[allow(dead_code)]
812 vectors: Vec<Vec<f32>>,
813 #[allow(dead_code)]
814 created_at: i64,
815 }
816
817 let existing_size = self.knowledge_base_size();
818
819 let file_size = std::fs::metadata(&path)?.len();
821 println!(
822 "š¦ Reading {} compressed file...",
823 Self::format_bytes(file_size)
824 );
825
826 let compressed_data = std::fs::read(path)?;
827 println!("š Decompressing data...");
828 let serialized = decompress_size_prepended(&compressed_data)?;
829
830 println!("š Deserializing binary data...");
831 let binary_data: BinaryTrainingData = bincode::deserialize(&serialized)?;
832
833 let total_positions = binary_data.positions.len();
834 if total_positions == 0 {
835 println!("ā ļø No positions found in binary file");
836 return Ok(());
837 }
838
839 println!(
840 "š Processing {} positions from binary format...",
841 total_positions
842 );
843
844 let pb = ProgressBar::new(total_positions as u64);
846 pb.set_style(
847 ProgressStyle::default_bar()
848 .template("ā” Loading positions [{elapsed_precise}] [{bar:40.green/blue}] {pos}/{len} ({percent}%) {msg}")?
849 .progress_chars("āāā")
850 );
851
852 let mut added_count = 0;
853
854 for (i, fen) in binary_data.positions.iter().enumerate() {
856 if i < binary_data.evaluations.len() {
857 if let Ok(board) = fen.parse() {
858 if !self.position_boards.contains(&board) {
860 self.add_position(&board, binary_data.evaluations[i]);
861 added_count += 1;
862 }
863 }
864 }
865
866 if i % 1000 == 0 || i == total_positions - 1 {
867 pb.set_position((i + 1) as u64);
868 pb.set_message(format!("{} new positions", added_count));
869 }
870 }
871 pb.finish_with_message(format!("ā
Loaded {} new positions", added_count));
872
873 println!(
874 "šÆ Binary loading complete: {} new positions (total: {})",
875 self.knowledge_base_size() - existing_size,
876 self.knowledge_base_size()
877 );
878 Ok(())
879 }
880
881 pub fn load_training_data_mmap<P: AsRef<Path>>(
884 &mut self,
885 path: P,
886 ) -> Result<(), Box<dyn std::error::Error>> {
887 self.require_feature("memory_mapped_files")?;
889
890 use memmap2::Mmap;
891 use std::fs::File;
892
893 let path_ref = path.as_ref();
894 println!(
895 "š Loading training data via memory mapping: {}",
896 path_ref.display()
897 );
898
899 let file = File::open(path_ref)?;
900 let mmap = unsafe { Mmap::map(&file)? };
901
902 if let Ok(data) = rmp_serde::from_slice::<Vec<(String, f32)>>(&mmap) {
904 println!("š¦ Detected MessagePack format");
905 return self.load_positions_from_tuples(data);
906 }
907
908 if let Ok(data) = bincode::deserialize::<Vec<(String, f32)>>(&mmap) {
910 println!("š¦ Detected bincode format");
911 return self.load_positions_from_tuples(data);
912 }
913
914 let decompressed = lz4_flex::decompress_size_prepended(&mmap)?;
916 let data: Vec<(String, f32)> = bincode::deserialize(&decompressed)?;
917 println!("š¦ Detected LZ4+bincode format");
918 self.load_positions_from_tuples(data)
919 }
920
921 pub fn load_training_data_msgpack<P: AsRef<Path>>(
924 &mut self,
925 path: P,
926 ) -> Result<(), Box<dyn std::error::Error>> {
927 use std::fs::File;
928 use std::io::BufReader;
929
930 let path_ref = path.as_ref();
931 println!(
932 "š Loading MessagePack training data: {}",
933 path_ref.display()
934 );
935
936 let file = File::open(path_ref)?;
937 let reader = BufReader::new(file);
938 let data: Vec<(String, f32)> = rmp_serde::from_read(reader)?;
939
940 println!("š¦ MessagePack data loaded: {} positions", data.len());
941 self.load_positions_from_tuples(data)
942 }
943
944 pub fn load_training_data_streaming_json<P: AsRef<Path>>(
947 &mut self,
948 path: P,
949 ) -> Result<(), Box<dyn std::error::Error>> {
950 use dashmap::DashMap;
951 use rayon::prelude::*;
952 use std::fs::File;
953 use std::io::{BufRead, BufReader};
954 use std::sync::Arc;
955
956 let path_ref = path.as_ref();
957 println!(
958 "š Loading JSON with streaming parallel processing: {}",
959 path_ref.display()
960 );
961
962 let file = File::open(path_ref)?;
963 let reader = BufReader::new(file);
964
965 let chunk_size = 10000;
967 let position_map = Arc::new(DashMap::new());
968
969 let lines: Vec<String> = reader.lines().collect::<Result<Vec<_>, _>>()?;
970 let total_lines = lines.len();
971
972 lines.par_chunks(chunk_size).for_each(|chunk| {
974 for line in chunk {
975 if let Ok(data) = serde_json::from_str::<serde_json::Value>(line) {
976 if let (Some(fen), Some(eval)) = (
977 data.get("fen").and_then(|v| v.as_str()),
978 data.get("evaluation").and_then(|v| v.as_f64()),
979 ) {
980 position_map.insert(fen.to_string(), eval as f32);
981 }
982 }
983 }
984 });
985
986 println!(
987 "š¦ Parallel JSON processing complete: {} positions from {} lines",
988 position_map.len(),
989 total_lines
990 );
991
992 let data: Vec<(String, f32)> = match Arc::try_unwrap(position_map) {
995 Ok(map) => map.into_iter().collect(),
996 Err(arc_map) => {
997 arc_map
999 .iter()
1000 .map(|entry| (entry.key().clone(), *entry.value()))
1001 .collect()
1002 }
1003 };
1004 self.load_positions_from_tuples(data)
1005 }
1006
1007 pub fn load_training_data_compressed<P: AsRef<Path>>(
1010 &mut self,
1011 path: P,
1012 ) -> Result<(), Box<dyn std::error::Error>> {
1013 use std::fs::File;
1014 use std::io::BufReader;
1015
1016 let path_ref = path.as_ref();
1017 println!(
1018 "š Loading zstd compressed training data: {}",
1019 path_ref.display()
1020 );
1021
1022 let file = File::open(path_ref)?;
1023 let reader = BufReader::new(file);
1024 let decoder = zstd::stream::Decoder::new(reader)?;
1025
1026 if let Ok(data) = rmp_serde::from_read::<_, Vec<(String, f32)>>(decoder) {
1028 println!("š¦ Zstd+MessagePack data loaded: {} positions", data.len());
1029 return self.load_positions_from_tuples(data);
1030 }
1031
1032 let file = File::open(path_ref)?;
1034 let reader = BufReader::new(file);
1035 let decoder = zstd::stream::Decoder::new(reader)?;
1036 let data: Vec<(String, f32)> = bincode::deserialize_from(decoder)?;
1037
1038 println!("š¦ Zstd+bincode data loaded: {} positions", data.len());
1039 self.load_positions_from_tuples(data)
1040 }
1041
1042 fn load_positions_from_tuples(
1045 &mut self,
1046 data: Vec<(String, f32)>,
1047 ) -> Result<(), Box<dyn std::error::Error>> {
1048 use indicatif::{ProgressBar, ProgressStyle};
1049 use std::collections::HashSet;
1050
1051 let existing_size = self.knowledge_base_size();
1052 let mut seen_positions = HashSet::new();
1053 let mut loaded_count = 0;
1054
1055 let pb = ProgressBar::new(data.len() as u64);
1057 pb.set_style(ProgressStyle::with_template(
1058 "{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({per_sec}) {msg}"
1059 )?);
1060
1061 for (fen, evaluation) in data {
1062 pb.inc(1);
1063
1064 if seen_positions.contains(&fen) {
1066 continue;
1067 }
1068 seen_positions.insert(fen.clone());
1069
1070 if let Ok(board) = Board::from_str(&fen) {
1072 self.add_position(&board, evaluation);
1073 loaded_count += 1;
1074
1075 if loaded_count % 1000 == 0 {
1076 pb.set_message(format!("Loaded {} positions", loaded_count));
1077 }
1078 }
1079 }
1080
1081 pb.finish_with_message(format!("ā
Loaded {} new positions", loaded_count));
1082
1083 println!(
1084 "šÆ Ultra-fast loading complete: {} new positions (total: {})",
1085 self.knowledge_base_size() - existing_size,
1086 self.knowledge_base_size()
1087 );
1088
1089 Ok(())
1090 }
1091
1092 fn format_bytes(bytes: u64) -> String {
1094 const UNITS: &[&str] = &["B", "KB", "MB", "GB"];
1095 let mut size = bytes as f64;
1096 let mut unit_index = 0;
1097
1098 while size >= 1024.0 && unit_index < UNITS.len() - 1 {
1099 size /= 1024.0;
1100 unit_index += 1;
1101 }
1102
1103 format!("{:.1} {}", size, UNITS[unit_index])
1104 }
1105
1106 pub fn train_from_dataset_incremental(&mut self, dataset: &crate::training::TrainingDataset) {
1108 let _existing_size = self.knowledge_base_size();
1109 let mut added = 0;
1110
1111 for data in &dataset.data {
1112 if !self.position_boards.contains(&data.board) {
1114 self.add_position(&data.board, data.evaluation);
1115 added += 1;
1116 }
1117 }
1118
1119 println!(
1120 "Added {} new positions from dataset (total: {})",
1121 added,
1122 self.knowledge_base_size()
1123 );
1124 }
1125
1126 pub fn training_stats(&self) -> TrainingStats {
1128 TrainingStats {
1129 total_positions: self.knowledge_base_size(),
1130 unique_positions: self.position_boards.len(),
1131 has_move_data: !self.position_moves.is_empty(),
1132 move_data_entries: self.position_moves.len(),
1133 lsh_enabled: self.use_lsh,
1134 manifold_enabled: self.use_manifold,
1135 opening_book_enabled: self.opening_book.is_some(),
1136 }
1137 }
1138
1139 pub fn auto_load_training_data(&mut self) -> Result<Vec<String>, Box<dyn std::error::Error>> {
1141 use indicatif::{ProgressBar, ProgressStyle};
1142
1143 let common_files = vec![
1144 "training_data.json",
1145 "tactical_training_data.json",
1146 "engine_training.json",
1147 "chess_training.json",
1148 "my_training.json",
1149 ];
1150
1151 let tactical_files = vec![
1152 "tactical_puzzles.json",
1153 "lichess_puzzles.json",
1154 "my_puzzles.json",
1155 ];
1156
1157 let mut available_files = Vec::new();
1159 for file_path in &common_files {
1160 if std::path::Path::new(file_path).exists() {
1161 available_files.push((file_path, "training"));
1162 }
1163 }
1164 for file_path in &tactical_files {
1165 if std::path::Path::new(file_path).exists() {
1166 available_files.push((file_path, "tactical"));
1167 }
1168 }
1169
1170 if available_files.is_empty() {
1171 return Ok(Vec::new());
1172 }
1173
1174 println!(
1175 "š Found {} training files to auto-load",
1176 available_files.len()
1177 );
1178
1179 let pb = ProgressBar::new(available_files.len() as u64);
1181 pb.set_style(
1182 ProgressStyle::default_bar()
1183 .template("š Auto-loading files [{elapsed_precise}] [{bar:40.blue/cyan}] {pos}/{len} {msg}")?
1184 .progress_chars("āāā")
1185 );
1186
1187 let mut loaded_files = Vec::new();
1188
1189 for (i, (file_path, file_type)) in available_files.iter().enumerate() {
1190 pb.set_position(i as u64);
1191 pb.set_message("Processing...".to_string());
1192
1193 let result = match *file_type {
1194 "training" => self.load_training_data_incremental(file_path).map(|_| {
1195 loaded_files.push(file_path.to_string());
1196 println!("Loading complete");
1197 }),
1198 "tactical" => crate::training::TacticalPuzzleParser::load_tactical_puzzles(
1199 file_path,
1200 )
1201 .map(|puzzles| {
1202 crate::training::TacticalPuzzleParser::load_into_engine_incremental(
1203 &puzzles, self,
1204 );
1205 loaded_files.push(file_path.to_string());
1206 println!("Loading complete");
1207 }),
1208 _ => Ok(()),
1209 };
1210
1211 if let Err(_e) = result {
1212 println!("Loading complete");
1213 }
1214 }
1215
1216 pb.set_position(available_files.len() as u64);
1217 pb.finish_with_message(format!("ā
Auto-loaded {} files", loaded_files.len()));
1218
1219 Ok(loaded_files)
1220 }
1221
1222 pub fn load_lichess_puzzles_premium<P: AsRef<std::path::Path>>(
1224 &mut self,
1225 csv_path: P,
1226 ) -> Result<(), Box<dyn std::error::Error>> {
1227 self.require_feature("ultra_fast_loading")?; println!("š„ Loading Lichess puzzles with premium performance...");
1230 let puzzle_entries =
1231 crate::lichess_loader::load_lichess_puzzles_premium_with_moves(csv_path)?;
1232
1233 for (board, evaluation, best_move) in puzzle_entries {
1234 self.add_position_with_move(&board, evaluation, Some(best_move), Some(evaluation));
1235 }
1236
1237 println!("ā
Premium Lichess puzzle loading complete!");
1238 Ok(())
1239 }
1240
1241 pub fn load_lichess_puzzles_basic<P: AsRef<std::path::Path>>(
1243 &mut self,
1244 csv_path: P,
1245 max_puzzles: usize,
1246 ) -> Result<(), Box<dyn std::error::Error>> {
1247 println!(
1248 "š Loading Lichess puzzles (basic tier, limited to {} puzzles)...",
1249 max_puzzles
1250 );
1251 let puzzle_entries =
1252 crate::lichess_loader::load_lichess_puzzles_basic_with_moves(csv_path, max_puzzles)?;
1253
1254 for (board, evaluation, best_move) in puzzle_entries {
1255 self.add_position_with_move(&board, evaluation, Some(best_move), Some(evaluation));
1256 }
1257
1258 println!("ā
Basic Lichess puzzle loading complete!");
1259 Ok(())
1260 }
1261
1262 pub fn new_with_auto_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1264 let mut engine = Self::new(vector_size);
1265 engine.enable_opening_book();
1266
1267 let loaded_files = engine.auto_load_training_data()?;
1269
1270 if loaded_files.is_empty() {
1271 println!("š¤ Created fresh engine (no training data found)");
1272 } else {
1273 println!(
1274 "š Created engine with auto-loaded training data from {} files",
1275 loaded_files.len()
1276 );
1277 let _stats = engine.training_stats();
1278 println!("Loading complete");
1279 println!("Loading complete");
1280 }
1281
1282 Ok(engine)
1283 }
1284
1285 pub fn new_with_fast_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1288 use indicatif::{ProgressBar, ProgressStyle};
1289
1290 let mut engine = Self::new(vector_size);
1291 engine.enable_opening_book();
1292
1293 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1295 println!("Loading complete");
1296 }
1297
1298 let binary_files = [
1300 "training_data_a100.bin", "training_data.bin",
1302 "tactical_training_data.bin",
1303 "engine_training.bin",
1304 "chess_training.bin",
1305 ];
1306
1307 let existing_binary_files: Vec<_> = binary_files
1309 .iter()
1310 .filter(|&file_path| std::path::Path::new(file_path).exists())
1311 .collect();
1312
1313 let mut loaded_count = 0;
1314
1315 if !existing_binary_files.is_empty() {
1316 println!(
1317 "ā” Fast loading: Found {} binary files",
1318 existing_binary_files.len()
1319 );
1320
1321 let pb = ProgressBar::new(existing_binary_files.len() as u64);
1323 pb.set_style(
1324 ProgressStyle::default_bar()
1325 .template("š Fast loading [{elapsed_precise}] [{bar:40.green/cyan}] {pos}/{len} {msg}")?
1326 .progress_chars("āāā")
1327 );
1328
1329 for (i, file_path) in existing_binary_files.iter().enumerate() {
1330 pb.set_position(i as u64);
1331 pb.set_message("Processing...".to_string());
1332
1333 if engine.load_training_data_binary(file_path).is_ok() {
1334 loaded_count += 1;
1335 }
1336 }
1337
1338 pb.set_position(existing_binary_files.len() as u64);
1339 pb.finish_with_message(format!("ā
Loaded {} binary files", loaded_count));
1340 } else {
1341 println!("š¦ No binary files found, falling back to JSON auto-loading...");
1342 let _ = engine.auto_load_training_data()?;
1343 }
1344
1345 if let Err(e) = engine.load_manifold_models() {
1347 println!("ā ļø No pre-trained manifold models found ({})", e);
1348 println!(" Use --rebuild-models flag to train new models");
1349 }
1350
1351 let stats = engine.training_stats();
1352 println!(
1353 "ā” Fast engine ready with {} positions ({} binary files loaded)",
1354 stats.total_positions, loaded_count
1355 );
1356
1357 Ok(engine)
1358 }
1359
1360 pub fn new_with_auto_discovery(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1363 println!("š Initializing engine with AUTO-DISCOVERY and format consolidation...");
1364 let mut engine = Self::new(vector_size);
1365 engine.enable_opening_book();
1366
1367 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1369 println!("Loading complete");
1370 }
1371
1372 let discovered_files = AutoDiscovery::discover_training_files(".", true)?;
1374
1375 if discovered_files.is_empty() {
1376 println!("ā¹ļø No training data found. Use convert methods to create optimized files.");
1377 return Ok(engine);
1378 }
1379
1380 let consolidated = AutoDiscovery::consolidate_by_base_name(discovered_files.clone());
1382
1383 let mut total_loaded = 0;
1384 for (base_name, best_file) in &consolidated {
1385 println!("š Loading {} ({})", base_name, best_file.format);
1386
1387 let initial_size = engine.knowledge_base_size();
1388 engine.load_file_by_format(&best_file.path, &best_file.format)?;
1389 let loaded_count = engine.knowledge_base_size() - initial_size;
1390 total_loaded += loaded_count;
1391
1392 println!(" ā
Loaded {} positions", loaded_count);
1393 }
1394
1395 let cleanup_candidates = AutoDiscovery::get_cleanup_candidates(&discovered_files);
1397 if !cleanup_candidates.is_empty() {
1398 println!(
1399 "š§¹ Found {} old format files that can be cleaned up:",
1400 cleanup_candidates.len()
1401 );
1402 AutoDiscovery::cleanup_old_formats(&cleanup_candidates, true)?; println!(" š” To actually remove old files, run: cargo run --bin cleanup_formats");
1405 }
1406
1407 if let Err(e) = engine.load_manifold_models() {
1409 println!("ā ļø No pre-trained manifold models found ({})", e);
1410 }
1411
1412 println!(
1413 "šÆ Engine ready: {} positions loaded from {} datasets",
1414 total_loaded,
1415 consolidated.len()
1416 );
1417 Ok(engine)
1418 }
1419
1420 pub fn new_with_instant_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1423 println!("š Initializing engine with INSTANT loading...");
1424 let mut engine = Self::new(vector_size);
1425 engine.enable_opening_book();
1426
1427 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1429 println!("Loading complete");
1430 }
1431
1432 let discovered_files = AutoDiscovery::discover_training_files(".", false)?;
1434
1435 if discovered_files.is_empty() {
1436 println!("ā¹ļø No user training data found, loading starter dataset...");
1438 if let Err(_e) = engine.load_starter_dataset() {
1439 println!("Loading complete");
1440 println!("ā¹ļø Starting with empty engine");
1441 } else {
1442 println!(
1443 "ā
Loaded starter dataset with {} positions",
1444 engine.knowledge_base_size()
1445 );
1446 }
1447 return Ok(engine);
1448 }
1449
1450 if let Some(best_file) = discovered_files.first() {
1452 println!(
1453 "ā” Loading {} format: {}",
1454 best_file.format,
1455 best_file.path.display()
1456 );
1457 engine.load_file_by_format(&best_file.path, &best_file.format)?;
1458 println!(
1459 "ā
Loaded {} positions from {} format",
1460 engine.knowledge_base_size(),
1461 best_file.format
1462 );
1463 }
1464
1465 if let Err(e) = engine.load_manifold_models() {
1467 println!("ā ļø No pre-trained manifold models found ({})", e);
1468 }
1469
1470 println!(
1471 "šÆ Engine ready: {} positions loaded",
1472 engine.knowledge_base_size()
1473 );
1474 Ok(engine)
1475 }
1476
1477 pub fn new_with_license(vector_size: usize, license_url: String) -> Self {
1479 let mut engine = Self::new(vector_size);
1480 engine.licensed_feature_checker = Some(LicensedFeatureChecker::new(license_url));
1481 engine
1482 }
1483
1484 pub fn new_with_offline_license(vector_size: usize) -> Self {
1486 let mut engine = Self::new(vector_size);
1487 engine.licensed_feature_checker = Some(LicensedFeatureChecker::new_offline());
1488 engine
1489 }
1490
1491 pub async fn activate_license(&mut self, key: &str) -> Result<FeatureTier, LicenseError> {
1493 if let Some(ref mut checker) = self.licensed_feature_checker {
1494 let tier = checker.activate_license(key).await?;
1495 self.feature_checker.upgrade_tier(tier.clone());
1497 Ok(tier)
1498 } else {
1499 Err(LicenseError::InvalidFormat(
1500 "No license checker initialized".to_string(),
1501 ))
1502 }
1503 }
1504
1505 pub async fn check_licensed_feature(&mut self, feature: &str) -> Result<(), FeatureError> {
1507 if let Some(ref mut checker) = self.licensed_feature_checker {
1508 checker.check_feature(feature).await
1509 } else {
1510 self.feature_checker.check_feature(feature)
1512 }
1513 }
1514
1515 pub fn load_license_cache<P: AsRef<std::path::Path>>(
1517 &mut self,
1518 path: P,
1519 ) -> Result<(), Box<dyn std::error::Error>> {
1520 if let Some(ref mut checker) = self.licensed_feature_checker {
1521 checker.load_cache(path)?;
1522 }
1523 Ok(())
1524 }
1525
1526 pub fn save_license_cache<P: AsRef<std::path::Path>>(
1528 &self,
1529 path: P,
1530 ) -> Result<(), Box<dyn std::error::Error>> {
1531 if let Some(ref checker) = self.licensed_feature_checker {
1532 checker.save_cache(path)?;
1533 }
1534 Ok(())
1535 }
1536
1537 fn is_position_safe(&self, board: &Board) -> bool {
1542 match std::panic::catch_unwind(|| {
1544 use chess::MoveGen;
1545 let _legal_moves: Vec<ChessMove> = MoveGen::new_legal(board).collect();
1546 true
1547 }) {
1548 Ok(_) => true,
1549 Err(_) => {
1550 false
1552 }
1553 }
1554 }
1555
1556 pub fn check_gpu_acceleration(&self) -> Result<(), Box<dyn std::error::Error>> {
1558 self.feature_checker.check_feature("gpu_acceleration")?;
1559
1560 match crate::gpu_acceleration::GPUAccelerator::new() {
1562 Ok(_) => {
1563 println!("š„ GPU acceleration available and ready");
1564 Ok(())
1565 }
1566 Err(_e) => Err("Processing...".to_string().into()),
1567 }
1568 }
1569
1570 pub fn load_starter_dataset(&mut self) -> Result<(), Box<dyn std::error::Error>> {
1572 let starter_data = if let Ok(file_content) =
1574 std::fs::read_to_string("training_data/starter_dataset.json")
1575 {
1576 file_content
1577 } else {
1578 r#"[
1580 {
1581 "fen": "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1",
1582 "evaluation": 0.0,
1583 "best_move": null,
1584 "depth": 0
1585 },
1586 {
1587 "fen": "rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1",
1588 "evaluation": 0.1,
1589 "best_move": "e7e5",
1590 "depth": 2
1591 },
1592 {
1593 "fen": "rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq e6 0 2",
1594 "evaluation": 0.0,
1595 "best_move": "g1f3",
1596 "depth": 2
1597 }
1598 ]"#
1599 .to_string()
1600 };
1601
1602 let training_data: Vec<serde_json::Value> = serde_json::from_str(&starter_data)?;
1603
1604 for entry in training_data {
1605 if let (Some(fen), Some(evaluation)) = (entry.get("fen"), entry.get("evaluation")) {
1606 if let (Some(fen_str), Some(eval_f64)) = (fen.as_str(), evaluation.as_f64()) {
1607 match chess::Board::from_str(fen_str) {
1608 Ok(board) => {
1609 self.add_position(&board, eval_f64 as f32);
1610 }
1611 Err(_) => {
1612 continue;
1614 }
1615 }
1616 }
1617 }
1618 }
1619
1620 Ok(())
1621 }
1622
1623 fn load_file_by_format(
1625 &mut self,
1626 path: &std::path::Path,
1627 format: &str,
1628 ) -> Result<(), Box<dyn std::error::Error>> {
1629 let file_size = std::fs::metadata(path)?.len();
1631
1632 if file_size > 10_000_000 {
1634 println!(
1635 "š Large file detected ({:.1} MB) - using ultra-fast loader",
1636 file_size as f64 / 1_000_000.0
1637 );
1638 return self.ultra_fast_load_any_format(path);
1639 }
1640
1641 match format {
1643 "MMAP" => self.load_training_data_mmap(path),
1644 "MSGPACK" => self.load_training_data_msgpack(path),
1645 "BINARY" => self.load_training_data_streaming_binary(path),
1646 "ZSTD" => self.load_training_data_compressed(path),
1647 "JSON" => self.load_training_data_streaming_json_v2(path),
1648 _ => Err("Processing...".to_string().into()),
1649 }
1650 }
1651
1652 pub fn ultra_fast_load_any_format<P: AsRef<std::path::Path>>(
1654 &mut self,
1655 path: P,
1656 ) -> Result<(), Box<dyn std::error::Error>> {
1657 self.require_feature("ultra_fast_loading")?;
1659
1660 let mut loader = UltraFastLoader::new_for_massive_datasets();
1661 loader.ultra_load_binary(path, self)?;
1662
1663 let stats = loader.get_stats();
1664 println!("š Ultra-fast loading complete:");
1665 println!(" ā
Loaded: {} positions", stats.loaded);
1666 println!("Loading complete");
1667 println!("Loading complete");
1668 println!(" š Success rate: {:.1}%", stats.success_rate() * 100.0);
1669
1670 Ok(())
1671 }
1672
1673 pub fn load_training_data_streaming_binary<P: AsRef<std::path::Path>>(
1676 &mut self,
1677 path: P,
1678 ) -> Result<(), Box<dyn std::error::Error>> {
1679 let mut loader = StreamingLoader::new();
1680 loader.stream_load_binary(path, self)?;
1681
1682 println!("š Streaming binary load complete:");
1683 println!(" Loaded: {} new positions", loader.loaded_count);
1684 println!("Loading complete");
1685 println!("Loading complete");
1686
1687 Ok(())
1688 }
1689
1690 pub fn load_training_data_streaming_json_v2<P: AsRef<std::path::Path>>(
1693 &mut self,
1694 path: P,
1695 ) -> Result<(), Box<dyn std::error::Error>> {
1696 let mut loader = StreamingLoader::new();
1697
1698 let batch_size = if std::fs::metadata(path.as_ref())?.len() > 100_000_000 {
1700 20000 } else {
1703 5000 };
1705
1706 loader.stream_load_json(path, self, batch_size)?;
1707
1708 println!("š Streaming JSON load complete:");
1709 println!(" Loaded: {} new positions", loader.loaded_count);
1710 println!("Loading complete");
1711 println!("Loading complete");
1712
1713 Ok(())
1714 }
1715
1716 pub fn new_for_massive_datasets(
1719 vector_size: usize,
1720 ) -> Result<Self, Box<dyn std::error::Error>> {
1721 println!("š Initializing engine for MASSIVE datasets (100k-1M+ positions)...");
1722 let mut engine = Self::new(vector_size);
1723 engine.enable_opening_book();
1724
1725 let discovered_files = AutoDiscovery::discover_training_files(".", false)?;
1727
1728 if discovered_files.is_empty() {
1729 println!("ā¹ļø No training data found");
1730 return Ok(engine);
1731 }
1732
1733 let largest_file = discovered_files
1735 .iter()
1736 .max_by_key(|f| f.size_bytes)
1737 .unwrap();
1738
1739 println!(
1740 "šÆ Loading largest dataset: {} ({} bytes)",
1741 largest_file.path.display(),
1742 largest_file.size_bytes
1743 );
1744
1745 engine.ultra_fast_load_any_format(&largest_file.path)?;
1747
1748 println!(
1749 "šÆ Engine ready: {} positions loaded",
1750 engine.knowledge_base_size()
1751 );
1752 Ok(engine)
1753 }
1754
1755 pub fn convert_to_msgpack() -> Result<(), Box<dyn std::error::Error>> {
1758 use serde_json::Value;
1759 use std::fs::File;
1760 use std::io::{BufReader, BufWriter};
1761
1762 if std::path::Path::new("training_data_a100.bin").exists() {
1764 Self::convert_a100_binary_to_json()?;
1765 }
1766
1767 let input_files = [
1768 "training_data.json",
1769 "tactical_training_data.json",
1770 "training_data_a100.json",
1771 ];
1772
1773 for input_file in &input_files {
1774 let input_path = std::path::Path::new(input_file);
1775 if !input_path.exists() {
1776 continue;
1777 }
1778
1779 let output_file_path = input_file.replace(".json", ".msgpack");
1780 println!(
1781 "š Converting {} ā {} (MessagePack format)",
1782 input_file, output_file_path
1783 );
1784
1785 let file = File::open(input_path)?;
1787 let reader = BufReader::new(file);
1788 let json_value: Value = serde_json::from_reader(reader)?;
1789
1790 let data: Vec<(String, f32)> = match json_value {
1791 Value::Array(arr) if !arr.is_empty() => {
1793 if let Some(first) = arr.first() {
1794 if first.is_array() {
1795 arr.into_iter()
1797 .filter_map(|item| {
1798 if let Value::Array(tuple) = item {
1799 if tuple.len() >= 2 {
1800 let fen = tuple[0].as_str()?.to_string();
1801 let eval = tuple[1].as_f64()? as f32;
1802 Some((fen, eval))
1803 } else {
1804 None
1805 }
1806 } else {
1807 None
1808 }
1809 })
1810 .collect()
1811 } else if first.is_object() {
1812 arr.into_iter()
1814 .filter_map(|item| {
1815 if let Value::Object(obj) = item {
1816 let fen = obj.get("fen")?.as_str()?.to_string();
1817 let eval = obj.get("evaluation")?.as_f64()? as f32;
1818 Some((fen, eval))
1819 } else {
1820 None
1821 }
1822 })
1823 .collect()
1824 } else {
1825 return Err("Processing...".to_string().into());
1826 }
1827 } else {
1828 Vec::new()
1829 }
1830 }
1831 _ => return Err("Processing...".to_string().into()),
1832 };
1833
1834 if data.is_empty() {
1835 println!("Loading complete");
1836 continue;
1837 }
1838
1839 let output_file = File::create(&output_file_path)?;
1841 let mut writer = BufWriter::new(output_file);
1842 rmp_serde::encode::write(&mut writer, &data)?;
1843
1844 let input_size = input_path.metadata()?.len();
1845 let output_size = std::path::Path::new(&output_file_path).metadata()?.len();
1846 let ratio = input_size as f64 / output_size as f64;
1847
1848 println!(
1849 "ā
Converted: {} ā {} ({:.1}x size reduction, {} positions)",
1850 Self::format_bytes(input_size),
1851 Self::format_bytes(output_size),
1852 ratio,
1853 data.len()
1854 );
1855 }
1856
1857 Ok(())
1858 }
1859
1860 pub fn convert_a100_binary_to_json() -> Result<(), Box<dyn std::error::Error>> {
1862 use std::fs::File;
1863 use std::io::BufWriter;
1864
1865 let binary_path = "training_data_a100.bin";
1866 let json_path = "training_data_a100.json";
1867
1868 if !std::path::Path::new(binary_path).exists() {
1869 println!("Loading complete");
1870 return Ok(());
1871 }
1872
1873 println!(
1874 "š Converting A100 binary data {} ā {} (JSON format)",
1875 binary_path, json_path
1876 );
1877
1878 let mut engine = ChessVectorEngine::new(1024);
1880 engine.load_training_data_binary(binary_path)?;
1881
1882 let mut data = Vec::new();
1884 for (i, board) in engine.position_boards.iter().enumerate() {
1885 if i < engine.position_evaluations.len() {
1886 data.push(serde_json::json!({
1887 "fen": board.to_string(),
1888 "evaluation": engine.position_evaluations[i],
1889 "depth": 15,
1890 "game_id": i
1891 }));
1892 }
1893 }
1894
1895 let file = File::create(json_path)?;
1897 let writer = BufWriter::new(file);
1898 serde_json::to_writer(writer, &data)?;
1899
1900 println!(
1901 "ā
Converted A100 data: {} positions ā {}",
1902 data.len(),
1903 json_path
1904 );
1905 Ok(())
1906 }
1907
1908 pub fn convert_to_zstd() -> Result<(), Box<dyn std::error::Error>> {
1911 use std::fs::File;
1912 use std::io::{BufReader, BufWriter};
1913
1914 if std::path::Path::new("training_data_a100.bin").exists() {
1916 Self::convert_a100_binary_to_json()?;
1917 }
1918
1919 let input_files = [
1920 ("training_data.json", "training_data.zst"),
1921 ("tactical_training_data.json", "tactical_training_data.zst"),
1922 ("training_data_a100.json", "training_data_a100.zst"),
1923 ("training_data.bin", "training_data.bin.zst"),
1924 (
1925 "tactical_training_data.bin",
1926 "tactical_training_data.bin.zst",
1927 ),
1928 ("training_data_a100.bin", "training_data_a100.bin.zst"),
1929 ];
1930
1931 for (input_file, output_file) in &input_files {
1932 let input_path = std::path::Path::new(input_file);
1933 if !input_path.exists() {
1934 continue;
1935 }
1936
1937 println!(
1938 "š Converting {} ā {} (Zstd compression)",
1939 input_file, output_file
1940 );
1941
1942 let input_file = File::open(input_path)?;
1943 let output_file_handle = File::create(output_file)?;
1944 let writer = BufWriter::new(output_file_handle);
1945 let mut encoder = zstd::stream::Encoder::new(writer, 9)?; std::io::copy(&mut BufReader::new(input_file), &mut encoder)?;
1948 encoder.finish()?;
1949
1950 let input_size = input_path.metadata()?.len();
1951 let output_size = std::path::Path::new(output_file).metadata()?.len();
1952 let ratio = input_size as f64 / output_size as f64;
1953
1954 println!(
1955 "ā
Compressed: {} ā {} ({:.1}x size reduction)",
1956 Self::format_bytes(input_size),
1957 Self::format_bytes(output_size),
1958 ratio
1959 );
1960 }
1961
1962 Ok(())
1963 }
1964
1965 pub fn convert_to_mmap() -> Result<(), Box<dyn std::error::Error>> {
1968 use std::fs::File;
1969 use std::io::{BufReader, BufWriter};
1970
1971 if std::path::Path::new("training_data_a100.bin").exists() {
1973 Self::convert_a100_binary_to_json()?;
1974 }
1975
1976 let input_files = [
1977 ("training_data.json", "training_data.mmap"),
1978 ("tactical_training_data.json", "tactical_training_data.mmap"),
1979 ("training_data_a100.json", "training_data_a100.mmap"),
1980 ("training_data.msgpack", "training_data.mmap"),
1981 (
1982 "tactical_training_data.msgpack",
1983 "tactical_training_data.mmap",
1984 ),
1985 ("training_data_a100.msgpack", "training_data_a100.mmap"),
1986 ];
1987
1988 for (input_file, output_file) in &input_files {
1989 let input_path = std::path::Path::new(input_file);
1990 if !input_path.exists() {
1991 continue;
1992 }
1993
1994 println!(
1995 "š Converting {} ā {} (Memory-mapped format)",
1996 input_file, output_file
1997 );
1998
1999 let data: Vec<(String, f32)> = if input_file.ends_with(".json") {
2001 let file = File::open(input_path)?;
2002 let reader = BufReader::new(file);
2003 let json_value: Value = serde_json::from_reader(reader)?;
2004
2005 match json_value {
2006 Value::Array(arr) if !arr.is_empty() => {
2008 if let Some(first) = arr.first() {
2009 if first.is_array() {
2010 arr.into_iter()
2012 .filter_map(|item| {
2013 if let Value::Array(tuple) = item {
2014 if tuple.len() >= 2 {
2015 let fen = tuple[0].as_str()?.to_string();
2016 let eval = tuple[1].as_f64()? as f32;
2017 Some((fen, eval))
2018 } else {
2019 None
2020 }
2021 } else {
2022 None
2023 }
2024 })
2025 .collect()
2026 } else if first.is_object() {
2027 arr.into_iter()
2029 .filter_map(|item| {
2030 if let Value::Object(obj) = item {
2031 let fen = obj.get("fen")?.as_str()?.to_string();
2032 let eval = obj.get("evaluation")?.as_f64()? as f32;
2033 Some((fen, eval))
2034 } else {
2035 None
2036 }
2037 })
2038 .collect()
2039 } else {
2040 return Err("Failed to process training data".into());
2041 }
2042 } else {
2043 Vec::new()
2044 }
2045 }
2046 _ => return Err("Processing...".to_string().into()),
2047 }
2048 } else if input_file.ends_with(".msgpack") {
2049 let file = File::open(input_path)?;
2050 let reader = BufReader::new(file);
2051 rmp_serde::from_read(reader)?
2052 } else {
2053 return Err("Unsupported input format for memory mapping".into());
2054 };
2055
2056 let output_file_handle = File::create(output_file)?;
2058 let mut writer = BufWriter::new(output_file_handle);
2059 rmp_serde::encode::write(&mut writer, &data)?;
2060
2061 let input_size = input_path.metadata()?.len();
2062 let output_size = std::path::Path::new(output_file).metadata()?.len();
2063
2064 println!(
2065 "ā
Memory-mapped file created: {} ā {} ({} positions)",
2066 Self::format_bytes(input_size),
2067 Self::format_bytes(output_size),
2068 data.len()
2069 );
2070 }
2071
2072 Ok(())
2073 }
2074
2075 pub fn convert_json_to_binary() -> Result<Vec<String>, Box<dyn std::error::Error>> {
2077 use indicatif::{ProgressBar, ProgressStyle};
2078
2079 let json_files = [
2080 "training_data.json",
2081 "tactical_training_data.json",
2082 "engine_training.json",
2083 "chess_training.json",
2084 ];
2085
2086 let existing_json_files: Vec<_> = json_files
2088 .iter()
2089 .filter(|&file_path| std::path::Path::new(file_path).exists())
2090 .collect();
2091
2092 if existing_json_files.is_empty() {
2093 println!("ā¹ļø No JSON training files found to convert");
2094 return Ok(Vec::new());
2095 }
2096
2097 println!(
2098 "š Converting {} JSON files to binary format...",
2099 existing_json_files.len()
2100 );
2101
2102 let pb = ProgressBar::new(existing_json_files.len() as u64);
2104 pb.set_style(
2105 ProgressStyle::default_bar()
2106 .template(
2107 "š¦ Converting [{elapsed_precise}] [{bar:40.yellow/blue}] {pos}/{len} {msg}",
2108 )?
2109 .progress_chars("āāā"),
2110 );
2111
2112 let mut converted_files = Vec::new();
2113
2114 for (i, json_file) in existing_json_files.iter().enumerate() {
2115 pb.set_position(i as u64);
2116 pb.set_message("Processing...".to_string());
2117
2118 let binary_file = std::path::Path::new(json_file).with_extension("bin");
2119
2120 let mut temp_engine = Self::new(1024);
2122 if temp_engine
2123 .load_training_data_incremental(json_file)
2124 .is_ok()
2125 {
2126 if temp_engine.save_training_data_binary(&binary_file).is_ok() {
2127 converted_files.push(binary_file.to_string_lossy().to_string());
2128 println!("ā
Converted {} to binary format", json_file);
2129 } else {
2130 println!("Loading complete");
2131 }
2132 } else {
2133 println!("Loading complete");
2134 }
2135 }
2136
2137 pb.set_position(existing_json_files.len() as u64);
2138 pb.finish_with_message(format!("ā
Converted {} files", converted_files.len()));
2139
2140 if !converted_files.is_empty() {
2141 println!("š Binary conversion complete! Startup will be 5-15x faster next time.");
2142 println!("š Conversion summary:");
2143 for _conversion in &converted_files {
2144 println!("Loading complete");
2145 }
2146 }
2147
2148 Ok(converted_files)
2149 }
2150
2151 pub fn is_lsh_enabled(&self) -> bool {
2153 self.use_lsh
2154 }
2155
2156 pub fn lsh_stats(&self) -> Option<crate::lsh::LSHStats> {
2158 self.lsh_index.as_ref().map(|lsh| lsh.stats())
2159 }
2160
2161 pub fn enable_manifold_learning(&mut self, compression_ratio: f32) -> Result<(), String> {
2163 let input_dim = self.encoder.vector_size();
2164 let output_dim = ((input_dim as f32) / compression_ratio) as usize;
2165
2166 if output_dim == 0 {
2167 return Err("Compression ratio too high, output dimension would be 0".to_string());
2168 }
2169
2170 let mut learner = ManifoldLearner::new(input_dim, output_dim);
2171 learner.init_network()?;
2172
2173 self.manifold_learner = Some(learner);
2174 self.manifold_similarity_search = Some(SimilaritySearch::new(output_dim));
2175 self.use_manifold = false; Ok(())
2178 }
2179
2180 pub fn train_manifold_learning(&mut self, epochs: usize) -> Result<(), String> {
2182 if self.manifold_learner.is_none() {
2183 return Err(
2184 "Manifold learning not enabled. Call enable_manifold_learning first.".to_string(),
2185 );
2186 }
2187
2188 if self.similarity_search.size() == 0 {
2189 return Err("No positions in knowledge base to train on.".to_string());
2190 }
2191
2192 let rows = self.similarity_search.size();
2194 let cols = self.encoder.vector_size();
2195
2196 let training_matrix = Array2::from_shape_fn((rows, cols), |(row, col)| {
2197 if let Some((vector, _)) = self.similarity_search.get_position_ref(row) {
2198 vector[col]
2199 } else {
2200 0.0
2201 }
2202 });
2203
2204 if let Some(ref mut learner) = self.manifold_learner {
2206 learner.train(&training_matrix, epochs)?;
2207 let compression_ratio = learner.compression_ratio();
2208
2209 let _ = learner;
2211
2212 self.rebuild_manifold_indices()?;
2214 self.use_manifold = true;
2215
2216 println!(
2217 "Manifold learning training completed. Compression ratio: {:.1}x",
2218 compression_ratio
2219 );
2220 }
2221
2222 Ok(())
2223 }
2224
2225 fn rebuild_manifold_indices(&mut self) -> Result<(), String> {
2227 if let Some(ref learner) = self.manifold_learner {
2228 let output_dim = learner.output_dim();
2230 if let Some(ref mut search) = self.manifold_similarity_search {
2231 *search = SimilaritySearch::new(output_dim);
2232 }
2233 if let Some(ref mut lsh) = self.manifold_lsh_index {
2234 *lsh = LSH::new(output_dim, 8, 16); }
2236
2237 for (vector, eval) in self.similarity_search.iter_positions() {
2239 let compressed = learner.encode(vector);
2240
2241 if let Some(ref mut search) = self.manifold_similarity_search {
2242 search.add_position(compressed.clone(), eval);
2243 }
2244
2245 if let Some(ref mut lsh) = self.manifold_lsh_index {
2246 lsh.add_vector(compressed, eval);
2247 }
2248 }
2249 }
2250
2251 Ok(())
2252 }
2253
2254 pub fn enable_manifold_lsh(
2256 &mut self,
2257 num_tables: usize,
2258 hash_size: usize,
2259 ) -> Result<(), String> {
2260 if self.manifold_learner.is_none() {
2261 return Err("Manifold learning not enabled".to_string());
2262 }
2263
2264 let output_dim = self.manifold_learner.as_ref().unwrap().output_dim();
2265 self.manifold_lsh_index = Some(LSH::new(output_dim, num_tables, hash_size));
2266
2267 if self.use_manifold {
2269 self.rebuild_manifold_indices()?;
2270 }
2271
2272 Ok(())
2273 }
2274
2275 pub fn is_manifold_enabled(&self) -> bool {
2277 self.use_manifold && self.manifold_learner.is_some()
2278 }
2279
2280 pub fn manifold_compression_ratio(&self) -> Option<f32> {
2282 self.manifold_learner
2283 .as_ref()
2284 .map(|l| l.compression_ratio())
2285 }
2286
2287 pub fn load_manifold_models(&mut self) -> Result<(), Box<dyn std::error::Error>> {
2290 if let Some(ref db) = self.database {
2291 match crate::manifold_learner::ManifoldLearner::load_from_database(db)? {
2292 Some(learner) => {
2293 let compression_ratio = learner.compression_ratio();
2294 println!(
2295 "š§ Loaded pre-trained manifold learner (compression: {:.1}x)",
2296 compression_ratio
2297 );
2298
2299 self.manifold_learner = Some(learner);
2301 self.use_manifold = true;
2302
2303 self.rebuild_manifold_indices()?;
2305
2306 println!("ā
Manifold learning enabled with compressed vectors");
2307 Ok(())
2308 }
2309 None => Err("No pre-trained manifold models found in database".into()),
2310 }
2311 } else {
2312 Err("Database not initialized - cannot load manifold models".into())
2313 }
2314 }
2315
2316 pub fn enable_opening_book(&mut self) {
2318 self.opening_book = Some(OpeningBook::with_standard_openings());
2319 }
2320
2321 pub fn set_opening_book(&mut self, book: OpeningBook) {
2323 self.opening_book = Some(book);
2324 }
2325
2326 pub fn is_opening_position(&self, board: &Board) -> bool {
2328 self.opening_book
2329 .as_ref()
2330 .map(|book| book.contains(board))
2331 .unwrap_or(false)
2332 }
2333
2334 pub fn get_opening_entry(&self, board: &Board) -> Option<&OpeningEntry> {
2336 self.opening_book.as_ref()?.lookup(board)
2337 }
2338
2339 pub fn opening_book_stats(&self) -> Option<OpeningBookStats> {
2341 self.opening_book.as_ref().map(|book| book.stats())
2342 }
2343
2344 pub fn add_position_with_move(
2346 &mut self,
2347 board: &Board,
2348 evaluation: f32,
2349 chess_move: Option<ChessMove>,
2350 move_outcome: Option<f32>,
2351 ) {
2352 let position_index = self.knowledge_base_size();
2353
2354 self.add_position(board, evaluation);
2356
2357 if let (Some(mov), Some(outcome)) = (chess_move, move_outcome) {
2359 self.position_moves
2360 .entry(position_index)
2361 .or_default()
2362 .push((mov, outcome));
2363 }
2364 }
2365
2366 pub fn recommend_moves(
2368 &mut self,
2369 board: &Board,
2370 num_recommendations: usize,
2371 ) -> Vec<MoveRecommendation> {
2372 if let Some(entry) = self.get_opening_entry(board) {
2386 let mut recommendations = Vec::new();
2387
2388 for (chess_move, strength) in &entry.best_moves {
2389 recommendations.push(MoveRecommendation {
2390 chess_move: *chess_move,
2391 confidence: strength * 0.9, from_similar_position_count: 1,
2393 average_outcome: entry.evaluation,
2394 });
2395 }
2396
2397 recommendations.sort_by(|a, b| {
2399 b.confidence
2400 .partial_cmp(&a.confidence)
2401 .unwrap_or(std::cmp::Ordering::Equal)
2402 });
2403 recommendations.truncate(num_recommendations);
2404 return recommendations;
2405 }
2406
2407 let similar_positions = self.find_similar_positions_with_indices(board, 20);
2409
2410 let mut move_data: HashMap<ChessMove, Vec<(f32, f32)>> = HashMap::new(); use chess::MoveGen;
2415 let legal_moves: Vec<ChessMove> = match std::panic::catch_unwind(|| {
2416 MoveGen::new_legal(board).collect::<Vec<ChessMove>>()
2417 }) {
2418 Ok(moves) => moves,
2419 Err(_) => {
2420 return Vec::new();
2422 }
2423 };
2424
2425 for (position_index, _eval, similarity) in similar_positions {
2427 if let Some(moves) = self.position_moves.get(&position_index) {
2428 for &(chess_move, outcome) in moves {
2429 if legal_moves.contains(&chess_move) {
2431 move_data
2432 .entry(chess_move)
2433 .or_default()
2434 .push((similarity, outcome));
2435 }
2436 }
2437 }
2438 }
2439
2440 if move_data.is_empty() {
2442 if let Some(ref mut tactical_search) = self.tactical_search {
2443 let tactical_result = tactical_search.search(board);
2445
2446 if let Some(best_move) = tactical_result.best_move {
2448 move_data.insert(best_move, vec![(0.75, tactical_result.evaluation)]);
2449 }
2450
2451 let mut ordered_moves = legal_moves.clone();
2454
2455 ordered_moves.sort_by(|a, b| {
2457 let a_is_capture = board.piece_on(a.get_dest()).is_some();
2458 let b_is_capture = board.piece_on(b.get_dest()).is_some();
2459
2460 match (a_is_capture, b_is_capture) {
2461 (true, false) => std::cmp::Ordering::Less, (false, true) => std::cmp::Ordering::Greater, _ => {
2464 let a_centrality = move_centrality(a);
2466 let b_centrality = move_centrality(b);
2467 b_centrality
2468 .partial_cmp(&a_centrality)
2469 .unwrap_or(std::cmp::Ordering::Equal)
2470 }
2471 }
2472 });
2473
2474 for chess_move in ordered_moves.into_iter().take(num_recommendations) {
2476 move_data
2477 .entry(chess_move)
2478 .or_insert_with(|| vec![(0.6, 0.0)]);
2479 }
2480 } else {
2481 let mut ordered_moves = legal_moves.clone();
2484
2485 ordered_moves.sort_by(|a, b| {
2487 let a_is_capture = board.piece_on(a.get_dest()).is_some();
2488 let b_is_capture = board.piece_on(b.get_dest()).is_some();
2489
2490 match (a_is_capture, b_is_capture) {
2491 (true, false) => std::cmp::Ordering::Less,
2492 (false, true) => std::cmp::Ordering::Greater,
2493 _ => {
2494 let a_centrality = move_centrality(a);
2495 let b_centrality = move_centrality(b);
2496 b_centrality
2497 .partial_cmp(&a_centrality)
2498 .unwrap_or(std::cmp::Ordering::Equal)
2499 }
2500 }
2501 });
2502
2503 for chess_move in ordered_moves.into_iter().take(num_recommendations) {
2504 move_data.insert(chess_move, vec![(0.3, 0.0)]); }
2506 }
2507 }
2508
2509 let mut recommendations = Vec::new();
2511
2512 for (chess_move, outcomes) in move_data {
2513 if outcomes.is_empty() {
2514 continue;
2515 }
2516
2517 let mut weighted_sum = 0.0;
2519 let mut weight_sum = 0.0;
2520
2521 for &(similarity, outcome) in &outcomes {
2522 weighted_sum += similarity * outcome;
2523 weight_sum += similarity;
2524 }
2525
2526 let average_outcome = if weight_sum > 0.0 {
2527 weighted_sum / weight_sum
2528 } else {
2529 0.0
2530 };
2531
2532 let avg_similarity =
2534 outcomes.iter().map(|(s, _)| s).sum::<f32>() / outcomes.len() as f32;
2535 let position_count_bonus = (outcomes.len() as f32).ln().max(1.0) / 5.0; let confidence = (avg_similarity * 0.8 + position_count_bonus * 0.2).min(0.95); recommendations.push(MoveRecommendation {
2539 chess_move,
2540 confidence: confidence.min(1.0), from_similar_position_count: outcomes.len(),
2542 average_outcome,
2543 });
2544 }
2545
2546 recommendations.sort_by(|a, b| {
2548 b.confidence
2549 .partial_cmp(&a.confidence)
2550 .unwrap_or(std::cmp::Ordering::Equal)
2551 });
2552
2553 recommendations.truncate(num_recommendations);
2555 recommendations
2556 }
2557
2558 pub fn recommend_legal_moves(
2560 &mut self,
2561 board: &Board,
2562 num_recommendations: usize,
2563 ) -> Vec<MoveRecommendation> {
2564 use chess::MoveGen;
2565
2566 let legal_moves: std::collections::HashSet<ChessMove> = MoveGen::new_legal(board).collect();
2568
2569 let all_recommendations = self.recommend_moves(board, num_recommendations * 2); all_recommendations
2573 .into_iter()
2574 .filter(|rec| legal_moves.contains(&rec.chess_move))
2575 .take(num_recommendations)
2576 .collect()
2577 }
2578
2579 pub fn enable_persistence<P: AsRef<Path>>(
2581 &mut self,
2582 db_path: P,
2583 ) -> Result<(), Box<dyn std::error::Error>> {
2584 let database = Database::new(db_path)?;
2585 self.database = Some(database);
2586 println!("Persistence enabled");
2587 Ok(())
2588 }
2589
2590 pub fn save_to_database(&self) -> Result<(), Box<dyn std::error::Error>> {
2592 let db = self
2593 .database
2594 .as_ref()
2595 .ok_or("Database not enabled. Call enable_persistence() first.")?;
2596
2597 println!("š¾ Saving engine state to database (batch mode)...");
2598
2599 let current_time = std::time::SystemTime::now()
2601 .duration_since(std::time::UNIX_EPOCH)?
2602 .as_secs() as i64;
2603
2604 let mut position_data_batch = Vec::with_capacity(self.position_boards.len());
2605
2606 for (i, board) in self.position_boards.iter().enumerate() {
2607 if i < self.position_vectors.len() && i < self.position_evaluations.len() {
2608 let vector = self.position_vectors[i].as_slice().unwrap();
2609 let position_data = PositionData {
2610 fen: board.to_string(),
2611 vector: vector.iter().map(|&x| x as f64).collect(),
2612 evaluation: Some(self.position_evaluations[i] as f64),
2613 compressed_vector: None, created_at: current_time,
2615 };
2616 position_data_batch.push(position_data);
2617 }
2618 }
2619
2620 if !position_data_batch.is_empty() {
2622 let saved_count = db.save_positions_batch(&position_data_batch)?;
2623 println!("š Batch saved {} positions", saved_count);
2624 }
2625
2626 if let Some(ref lsh) = self.lsh_index {
2628 lsh.save_to_database(db)?;
2629 }
2630
2631 if let Some(ref learner) = self.manifold_learner {
2633 if learner.is_trained() {
2634 learner.save_to_database(db)?;
2635 }
2636 }
2637
2638 println!("ā
Engine state saved successfully (batch optimized)");
2639 Ok(())
2640 }
2641
2642 pub fn load_from_database(&mut self) -> Result<(), Box<dyn std::error::Error>> {
2644 let db = self
2645 .database
2646 .as_ref()
2647 .ok_or("Database not enabled. Call enable_persistence() first.")?;
2648
2649 println!("Loading engine state from database...");
2650
2651 let positions = db.load_all_positions()?;
2653 for position_data in positions {
2654 if let Ok(board) = Board::from_str(&position_data.fen) {
2655 let vector: Vec<f32> = position_data.vector.iter().map(|&x| x as f32).collect();
2656 let vector_array = Array1::from(vector);
2657 let evaluation = position_data.evaluation.unwrap_or(0.0) as f32;
2658
2659 self.similarity_search
2661 .add_position(vector_array.clone(), evaluation);
2662
2663 self.position_vectors.push(vector_array);
2665 self.position_boards.push(board);
2666 self.position_evaluations.push(evaluation);
2667 }
2668 }
2669
2670 if self.use_lsh {
2672 let positions_for_lsh: Vec<(Array1<f32>, f32)> = self
2673 .position_vectors
2674 .iter()
2675 .zip(self.position_evaluations.iter())
2676 .map(|(v, &e)| (v.clone(), e))
2677 .collect();
2678
2679 match LSH::load_from_database(db, &positions_for_lsh)? {
2680 Some(lsh) => {
2681 self.lsh_index = Some(lsh);
2682 println!("Loaded LSH configuration from database");
2683 }
2684 None => {
2685 println!("No LSH configuration found in database");
2686 }
2687 }
2688 }
2689
2690 match ManifoldLearner::load_from_database(db)? {
2692 Some(learner) => {
2693 self.manifold_learner = Some(learner);
2694 if self.use_manifold {
2695 self.rebuild_manifold_indices()?;
2696 }
2697 println!("Loaded manifold learner from database");
2698 }
2699 None => {
2700 println!("No manifold learner found in database");
2701 }
2702 }
2703
2704 println!(
2705 "Engine state loaded successfully ({} positions)",
2706 self.knowledge_base_size()
2707 );
2708 Ok(())
2709 }
2710
2711 pub fn new_with_persistence<P: AsRef<Path>>(
2713 vector_size: usize,
2714 db_path: P,
2715 ) -> Result<Self, Box<dyn std::error::Error>> {
2716 let mut engine = Self::new(vector_size);
2717 engine.enable_persistence(db_path)?;
2718
2719 match engine.load_from_database() {
2721 Ok(_) => {
2722 println!("Loaded existing engine from database");
2723 }
2724 Err(e) => {
2725 println!("Starting fresh engine (load failed: {})", e);
2726 }
2727 }
2728
2729 Ok(engine)
2730 }
2731
2732 pub fn auto_save(&self) -> Result<(), Box<dyn std::error::Error>> {
2734 if self.database.is_some() {
2735 self.save_to_database()?;
2736 }
2737 Ok(())
2738 }
2739
2740 pub fn is_persistence_enabled(&self) -> bool {
2742 self.database.is_some()
2743 }
2744
2745 pub fn database_position_count(&self) -> Result<i64, Box<dyn std::error::Error>> {
2747 let db = self.database.as_ref().ok_or("Database not enabled")?;
2748 Ok(db.get_position_count()?)
2749 }
2750
2751 pub fn enable_tactical_search(&mut self, config: TacticalConfig) {
2753 self.tactical_search = Some(TacticalSearch::new(config));
2754 }
2755
2756 pub fn enable_tactical_search_default(&mut self) {
2758 self.tactical_search = Some(TacticalSearch::new_default());
2759 }
2760
2761 pub fn configure_hybrid_evaluation(&mut self, config: HybridConfig) {
2763 self.hybrid_config = config;
2764 }
2765
2766 pub fn is_tactical_search_enabled(&self) -> bool {
2768 self.tactical_search.is_some()
2769 }
2770
2771 pub fn enable_parallel_search(&mut self, num_threads: usize) {
2773 if let Some(ref mut tactical_search) = self.tactical_search {
2774 tactical_search.config.enable_parallel_search = true;
2775 tactical_search.config.num_threads = num_threads;
2776 println!(
2777 "š§µ Parallel tactical search enabled with {} threads",
2778 num_threads
2779 );
2780 }
2781 }
2782
2783 pub fn is_parallel_search_enabled(&self) -> bool {
2785 self.tactical_search
2786 .as_ref()
2787 .map(|ts| ts.config.enable_parallel_search)
2788 .unwrap_or(false)
2789 }
2790
2791 pub fn hybrid_config(&self) -> &HybridConfig {
2812 &self.hybrid_config
2813 }
2814
2815 pub fn is_opening_book_enabled(&self) -> bool {
2817 self.opening_book.is_some()
2818 }
2819
2820 pub fn self_play_training(
2822 &mut self,
2823 config: training::SelfPlayConfig,
2824 ) -> Result<usize, Box<dyn std::error::Error>> {
2825 let mut trainer = training::SelfPlayTrainer::new(config);
2826 let new_data = trainer.generate_training_data(self);
2827
2828 let positions_added = new_data.data.len();
2829
2830 for data in &new_data.data {
2832 self.add_position(&data.board, data.evaluation);
2833 }
2834
2835 if self.database.is_some() {
2837 match self.save_to_database() {
2838 Ok(_) => println!("š¾ Saved {} positions to database", positions_added),
2839 Err(_e) => println!("Loading complete"),
2840 }
2841 }
2842
2843 println!(
2844 "š§ Self-play training complete: {} new positions learned",
2845 positions_added
2846 );
2847 Ok(positions_added)
2848 }
2849
2850 pub fn continuous_self_play(
2852 &mut self,
2853 config: training::SelfPlayConfig,
2854 iterations: usize,
2855 save_path: Option<&str>,
2856 ) -> Result<usize, Box<dyn std::error::Error>> {
2857 let mut total_positions = 0;
2858 let mut trainer = training::SelfPlayTrainer::new(config.clone());
2859
2860 println!(
2861 "š Starting continuous self-play training for {} iterations...",
2862 iterations
2863 );
2864
2865 for iteration in 1..=iterations {
2866 println!("\n--- Self-Play Iteration {}/{} ---", iteration, iterations);
2867
2868 let new_data = trainer.generate_training_data(self);
2870 let batch_size = new_data.data.len();
2871
2872 for data in &new_data.data {
2874 self.add_position(&data.board, data.evaluation);
2875 }
2876
2877 total_positions += batch_size;
2878
2879 println!(
2880 "ā
Iteration {}: Added {} positions (total: {})",
2881 iteration,
2882 batch_size,
2883 self.knowledge_base_size()
2884 );
2885
2886 if iteration % 5 == 0 || iteration == iterations {
2888 if let Some(path) = save_path {
2890 match self.save_training_data_binary(path) {
2891 Ok(_) => println!("š¾ Progress saved to {} (binary format)", path),
2892 Err(_e) => println!("Loading complete"),
2893 }
2894 }
2895
2896 if self.database.is_some() {
2898 match self.save_to_database() {
2899 Ok(_) => println!(
2900 "š¾ Database synchronized ({} total positions)",
2901 self.knowledge_base_size()
2902 ),
2903 Err(_e) => println!("Loading complete"),
2904 }
2905 }
2906 }
2907
2908 if iteration % 10 == 0
2910 && self.knowledge_base_size() > 5000
2911 && self.manifold_learner.is_some()
2912 {
2913 println!("š§ Retraining manifold learning with new data...");
2914 let _ = self.train_manifold_learning(5);
2915 }
2916 }
2917
2918 println!(
2919 "\nš Continuous self-play complete: {} total new positions",
2920 total_positions
2921 );
2922 Ok(total_positions)
2923 }
2924
2925 pub fn adaptive_self_play(
2927 &mut self,
2928 base_config: training::SelfPlayConfig,
2929 target_strength: f32,
2930 ) -> Result<usize, Box<dyn std::error::Error>> {
2931 let mut current_config = base_config;
2932 let mut total_positions = 0;
2933 let mut iteration = 1;
2934
2935 println!(
2936 "šÆ Starting adaptive self-play training (target strength: {:.2})...",
2937 target_strength
2938 );
2939
2940 loop {
2941 println!("\n--- Adaptive Iteration {} ---", iteration);
2942
2943 let positions_added = self.self_play_training(current_config.clone())?;
2945 total_positions += positions_added;
2946
2947 if self.database.is_some() {
2949 match self.save_to_database() {
2950 Ok(_) => println!("š¾ Adaptive training progress saved to database"),
2951 Err(_e) => println!("Loading complete"),
2952 }
2953 }
2954
2955 let current_strength = self.knowledge_base_size() as f32 / 10000.0; println!(
2959 "š Current strength estimate: {:.2} (target: {:.2})",
2960 current_strength, target_strength
2961 );
2962
2963 if current_strength >= target_strength {
2964 println!("š Target strength reached!");
2965 break;
2966 }
2967
2968 current_config.exploration_factor *= 0.95; current_config.temperature *= 0.98; current_config.games_per_iteration =
2972 (current_config.games_per_iteration as f32 * 1.1) as usize; iteration += 1;
2975
2976 if iteration > 50 {
2977 println!("ā ļø Maximum iterations reached");
2978 break;
2979 }
2980 }
2981
2982 Ok(total_positions)
2983 }
2984}
2985
2986#[cfg(test)]
2987mod tests {
2988 use super::*;
2989 use chess::Board;
2990
2991 #[test]
2992 fn test_engine_creation() {
2993 let engine = ChessVectorEngine::new(1024);
2994 assert_eq!(engine.knowledge_base_size(), 0);
2995 }
2996
2997 #[test]
2998 fn test_add_and_search() {
2999 let mut engine = ChessVectorEngine::new(1024);
3000 let board = Board::default();
3001
3002 engine.add_position(&board, 0.0);
3003 assert_eq!(engine.knowledge_base_size(), 1);
3004
3005 let similar = engine.find_similar_positions(&board, 1);
3006 assert_eq!(similar.len(), 1);
3007 }
3008
3009 #[test]
3010 fn test_evaluation() {
3011 let mut engine = ChessVectorEngine::new(1024);
3012 let board = Board::default();
3013
3014 engine.add_position(&board, 0.5);
3016
3017 let evaluation = engine.evaluate_position(&board);
3018 assert!(evaluation.is_some());
3019 assert!((evaluation.unwrap() - 0.5).abs() < 1e-6);
3020 }
3021
3022 #[test]
3023 fn test_move_recommendations() {
3024 let mut engine = ChessVectorEngine::new(1024);
3025 let board = Board::default();
3026
3027 use chess::ChessMove;
3029 use std::str::FromStr;
3030 let mov = ChessMove::from_str("e2e4").unwrap();
3031 engine.add_position_with_move(&board, 0.0, Some(mov), Some(0.8));
3032
3033 let recommendations = engine.recommend_moves(&board, 3);
3034 assert!(!recommendations.is_empty());
3035
3036 let legal_recommendations = engine.recommend_legal_moves(&board, 3);
3038 assert!(!legal_recommendations.is_empty());
3039 }
3040
3041 #[test]
3042 fn test_empty_knowledge_base_fallback() {
3043 let mut engine = ChessVectorEngine::new(1024);
3045
3046 use std::str::FromStr;
3048 let board =
3049 Board::from_str("r1bqkbnr/pppp1ppp/2n5/4p3/4P3/5N2/PPPP1PPP/RNBQKB1R w KQkq - 0 1")
3050 .unwrap();
3051
3052 let recommendations = engine.recommend_moves(&board, 5);
3054 assert!(
3055 !recommendations.is_empty(),
3056 "recommend_moves should not return empty even with no training data"
3057 );
3058 assert_eq!(
3059 recommendations.len(),
3060 5,
3061 "Should return exactly 5 recommendations"
3062 );
3063
3064 for rec in &recommendations {
3066 assert!(rec.confidence > 0.0, "Confidence should be greater than 0");
3067 assert_eq!(
3068 rec.from_similar_position_count, 1,
3069 "Should have count of 1 for fallback"
3070 );
3071 assert_eq!(rec.average_outcome, 0.0, "Should have neutral outcome");
3072 }
3073
3074 let starting_board = Board::default();
3076 let starting_recommendations = engine.recommend_moves(&starting_board, 3);
3077 assert!(
3078 !starting_recommendations.is_empty(),
3079 "Should work for starting position too"
3080 );
3081
3082 use chess::MoveGen;
3084 let legal_moves: std::collections::HashSet<_> = MoveGen::new_legal(&board).collect();
3085 for rec in &recommendations {
3086 assert!(
3087 legal_moves.contains(&rec.chess_move),
3088 "All recommended moves should be legal"
3089 );
3090 }
3091 }
3092
3093 #[test]
3094 fn test_opening_book_integration() {
3095 let mut engine = ChessVectorEngine::new(1024);
3096
3097 engine.enable_opening_book();
3099 assert!(engine.opening_book.is_some());
3100
3101 let board = Board::default();
3103 assert!(engine.is_opening_position(&board));
3104
3105 let entry = engine.get_opening_entry(&board);
3106 assert!(entry.is_some());
3107
3108 let stats = engine.opening_book_stats();
3109 assert!(stats.is_some());
3110 assert!(stats.unwrap().total_positions > 0);
3111
3112 let recommendations = engine.recommend_moves(&board, 3);
3114 assert!(!recommendations.is_empty());
3115 assert!(recommendations[0].confidence > 0.7); }
3117
3118 #[test]
3119 fn test_manifold_learning_integration() {
3120 let mut engine = ChessVectorEngine::new(1024);
3121
3122 let board = Board::default();
3124 for i in 0..10 {
3125 engine.add_position(&board, i as f32 * 0.1);
3126 }
3127
3128 assert!(engine.enable_manifold_learning(8.0).is_ok());
3130
3131 let ratio = engine.manifold_compression_ratio();
3133 assert!(ratio.is_some());
3134 assert!((ratio.unwrap() - 8.0).abs() < 0.1);
3135
3136 assert!(engine.train_manifold_learning(5).is_ok());
3138
3139 let original_similar = engine.find_similar_positions(&board, 3);
3141 assert!(!original_similar.is_empty());
3142 }
3143
3144 #[test]
3145 fn test_lsh_integration() {
3146 let mut engine = ChessVectorEngine::new(1024);
3147
3148 let board = Board::default();
3150 for i in 0..50 {
3151 engine.add_position(&board, i as f32 * 0.02);
3152 }
3153
3154 engine.enable_lsh(4, 8);
3156
3157 let similar = engine.find_similar_positions(&board, 5);
3159 assert!(!similar.is_empty());
3160 assert!(similar.len() <= 5);
3161
3162 let eval = engine.evaluate_position(&board);
3164 assert!(eval.is_some());
3165 }
3166
3167 #[test]
3168 fn test_manifold_lsh_integration() {
3169 let mut engine = ChessVectorEngine::new(1024);
3170
3171 let board = Board::default();
3173 for i in 0..20 {
3174 engine.add_position(&board, i as f32 * 0.05);
3175 }
3176
3177 assert!(engine.enable_manifold_learning(8.0).is_ok());
3179 assert!(engine.train_manifold_learning(3).is_ok());
3180
3181 assert!(engine.enable_manifold_lsh(4, 8).is_ok());
3183
3184 let similar = engine.find_similar_positions(&board, 3);
3186 assert!(!similar.is_empty());
3187
3188 let _recommendations = engine.recommend_moves(&board, 2);
3190 }
3192
3193 #[test]
3218 fn test_position_with_move_storage() {
3219 let mut engine = ChessVectorEngine::new(1024);
3220 let board = Board::default();
3221
3222 use chess::ChessMove;
3223 use std::str::FromStr;
3224 let move1 = ChessMove::from_str("e2e4").unwrap();
3225 let move2 = ChessMove::from_str("d2d4").unwrap();
3226
3227 engine.add_position_with_move(&board, 0.0, Some(move1), Some(0.7));
3229 engine.add_position_with_move(&board, 0.1, Some(move2), Some(0.6));
3230
3231 assert_eq!(engine.position_moves.len(), 2);
3233
3234 let recommendations = engine.recommend_moves(&board, 5);
3236 let _move_strings: Vec<String> = recommendations
3237 .iter()
3238 .map(|r| r.chess_move.to_string())
3239 .collect();
3240
3241 assert!(!recommendations.is_empty());
3243 }
3244
3245 #[test]
3246 fn test_performance_regression_basic() {
3247 use std::time::Instant;
3248
3249 let mut engine = ChessVectorEngine::new(1024);
3250 let board = Board::default();
3251
3252 for i in 0..100 {
3254 engine.add_position(&board, i as f32 * 0.01);
3255 }
3256
3257 let start = Instant::now();
3259
3260 for _ in 0..100 {
3262 engine.add_position(&board, 0.0);
3263 }
3264
3265 let encoding_time = start.elapsed();
3266
3267 let start = Instant::now();
3269 for _ in 0..10 {
3270 engine.find_similar_positions(&board, 5);
3271 }
3272 let search_time = start.elapsed();
3273
3274 assert!(
3276 encoding_time.as_millis() < 10000,
3277 "Position encoding too slow: {}ms",
3278 encoding_time.as_millis()
3279 );
3280 assert!(
3281 search_time.as_millis() < 5000,
3282 "Search too slow: {}ms",
3283 search_time.as_millis()
3284 );
3285 }
3286
3287 #[test]
3288 fn test_memory_usage_reasonable() {
3289 let mut engine = ChessVectorEngine::new(1024);
3290 let board = Board::default();
3291
3292 let initial_size = engine.knowledge_base_size();
3294
3295 for i in 0..1000 {
3296 engine.add_position(&board, i as f32 * 0.001);
3297 }
3298
3299 let final_size = engine.knowledge_base_size();
3300 assert_eq!(final_size, initial_size + 1000);
3301
3302 assert!(final_size > initial_size);
3304 }
3305
3306 #[test]
3307 fn test_incremental_training() {
3308 use std::str::FromStr;
3309
3310 let mut engine = ChessVectorEngine::new(1024);
3311 let board1 = Board::default();
3312 let board2 =
3313 Board::from_str("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq - 0 1").unwrap();
3314
3315 engine.add_position(&board1, 0.0);
3317 engine.add_position(&board2, 0.2);
3318 assert_eq!(engine.knowledge_base_size(), 2);
3319
3320 let mut dataset = crate::training::TrainingDataset::new();
3322 dataset.add_position(board1, 0.1, 15, 1); dataset.add_position(
3324 Board::from_str("rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2")
3325 .unwrap(),
3326 0.3,
3327 15,
3328 2,
3329 ); engine.train_from_dataset_incremental(&dataset);
3333
3334 assert_eq!(engine.knowledge_base_size(), 3);
3336
3337 let stats = engine.training_stats();
3339 assert_eq!(stats.total_positions, 3);
3340 assert_eq!(stats.unique_positions, 3);
3341 assert!(!stats.has_move_data); }
3343
3344 #[test]
3345 fn test_save_load_incremental() {
3346 use std::str::FromStr;
3347 use tempfile::tempdir;
3348
3349 let temp_dir = tempdir().unwrap();
3350 let file_path = temp_dir.path().join("test_training.json");
3351
3352 let mut engine1 = ChessVectorEngine::new(1024);
3354 engine1.add_position(&Board::default(), 0.0);
3355 engine1.add_position(
3356 &Board::from_str("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq - 0 1").unwrap(),
3357 0.2,
3358 );
3359
3360 engine1.save_training_data(&file_path).unwrap();
3362
3363 let mut engine2 = ChessVectorEngine::new(1024);
3365 engine2.add_position(
3366 &Board::from_str("rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2")
3367 .unwrap(),
3368 0.3,
3369 );
3370 assert_eq!(engine2.knowledge_base_size(), 1);
3371
3372 engine2.load_training_data_incremental(&file_path).unwrap();
3374
3375 assert_eq!(engine2.knowledge_base_size(), 3);
3377 }
3378
3379 #[test]
3380 fn test_training_stats() {
3381 use std::str::FromStr;
3382
3383 let mut engine = ChessVectorEngine::new(1024);
3384
3385 let stats = engine.training_stats();
3387 assert_eq!(stats.total_positions, 0);
3388 assert_eq!(stats.unique_positions, 0);
3389 assert!(!stats.has_move_data);
3390 assert!(!stats.lsh_enabled);
3391 assert!(!stats.manifold_enabled);
3392 assert!(!stats.opening_book_enabled);
3393
3394 engine.add_position(&Board::default(), 0.0);
3396 engine.add_position_with_move(
3397 &Board::default(),
3398 0.1,
3399 Some(ChessMove::from_str("e2e4").unwrap()),
3400 Some(0.8),
3401 );
3402
3403 engine.enable_opening_book();
3405 engine.enable_lsh(4, 8);
3406
3407 let stats = engine.training_stats();
3408 assert_eq!(stats.total_positions, 2);
3409 assert!(stats.has_move_data);
3410 assert!(stats.move_data_entries > 0);
3411 assert!(stats.lsh_enabled);
3412 assert!(stats.opening_book_enabled);
3413 }
3414
3415 #[test]
3416 fn test_tactical_search_integration() {
3417 let mut engine = ChessVectorEngine::new(1024);
3418 let board = Board::default();
3419
3420 assert!(!engine.is_tactical_search_enabled());
3422
3423 engine.enable_tactical_search_default();
3425 assert!(engine.is_tactical_search_enabled());
3426
3427 let evaluation = engine.evaluate_position(&board);
3429 assert!(evaluation.is_some());
3430
3431 engine.add_position(&board, 0.5);
3433 let hybrid_evaluation = engine.evaluate_position(&board);
3434 assert!(hybrid_evaluation.is_some());
3435 }
3436
3437 #[test]
3438 fn test_hybrid_evaluation_configuration() {
3439 let mut engine = ChessVectorEngine::new(1024);
3440 let board = Board::default();
3441
3442 engine.enable_tactical_search_default();
3444
3445 let custom_config = HybridConfig {
3447 pattern_confidence_threshold: 0.9, enable_tactical_refinement: true,
3449 tactical_config: TacticalConfig::default(),
3450 pattern_weight: 0.8,
3451 min_similar_positions: 5,
3452 };
3453
3454 engine.configure_hybrid_evaluation(custom_config);
3455
3456 engine.add_position(&board, 0.3);
3458
3459 let evaluation = engine.evaluate_position(&board);
3460 assert!(evaluation.is_some());
3461
3462 let no_tactical_config = HybridConfig {
3464 enable_tactical_refinement: false,
3465 ..HybridConfig::default()
3466 };
3467
3468 engine.configure_hybrid_evaluation(no_tactical_config);
3469
3470 let pattern_only_evaluation = engine.evaluate_position(&board);
3471 assert!(pattern_only_evaluation.is_some());
3472 }
3473}