1pub mod ann;
67pub mod auto_discovery;
68pub mod features;
69pub mod gpu_acceleration;
70pub mod license;
71pub mod lichess_loader;
72pub mod lsh;
73pub mod manifold_learner;
74pub mod nnue;
75pub mod opening_book;
76pub mod persistence;
77pub mod position_encoder;
78pub mod similarity_search;
79pub mod streaming_loader;
80pub mod tactical_search;
81pub mod training;
82pub mod ultra_fast_loader;
83pub mod variational_autoencoder;
84pub mod uci;
86
87pub use auto_discovery::{AutoDiscovery, FormatPriority, TrainingFile};
88pub use features::{FeatureChecker, FeatureError, FeatureRegistry, FeatureTier};
89pub use gpu_acceleration::{DeviceType, GPUAccelerator};
90pub use license::{
91 LicenseError, LicenseKey, LicenseStatus, LicenseVerifier, LicensedFeatureChecker,
92};
93pub use lichess_loader::{load_lichess_puzzles_basic, load_lichess_puzzles_premium, LichessLoader};
94pub use lsh::LSH;
95pub use manifold_learner::ManifoldLearner;
96pub use nnue::{BlendStrategy, EvalStats, HybridEvaluator, NNUEConfig, NNUE};
97pub use opening_book::{OpeningBook, OpeningBookStats, OpeningEntry};
98pub use persistence::{Database, LSHTableData, PositionData};
99pub use position_encoder::PositionEncoder;
100pub use similarity_search::SimilaritySearch;
101pub use streaming_loader::StreamingLoader;
102pub use tactical_search::{TacticalConfig, TacticalResult, TacticalSearch};
103pub use training::{
104 EngineEvaluator, GameExtractor, SelfPlayConfig, SelfPlayTrainer, TacticalPuzzle,
105 TacticalPuzzleParser, TacticalTrainingData, TrainingData, TrainingDataset,
106};
107pub use ultra_fast_loader::{LoadingStats, UltraFastLoader};
108pub use variational_autoencoder::{VAEConfig, VariationalAutoencoder};
109pub use uci::{run_uci_engine, run_uci_engine_with_config, UCIConfig, UCIEngine};
111
112use chess::{Board, ChessMove};
113use ndarray::{Array1, Array2};
114use serde_json::Value;
115use std::collections::HashMap;
116use std::path::Path;
117use std::str::FromStr;
118
119fn move_centrality(chess_move: &ChessMove) -> f32 {
122 let dest_square = chess_move.get_dest();
123 let rank = dest_square.get_rank().to_index() as f32;
124 let file = dest_square.get_file().to_index() as f32;
125
126 let center_rank = 3.5;
128 let center_file = 3.5;
129
130 let rank_distance = (rank - center_rank).abs();
131 let file_distance = (file - center_file).abs();
132
133 let max_distance = 3.5; let distance = (rank_distance + file_distance) / 2.0;
136 max_distance - distance
137}
138
139#[derive(Debug, Clone)]
141pub struct MoveRecommendation {
142 pub chess_move: ChessMove,
143 pub confidence: f32,
144 pub from_similar_position_count: usize,
145 pub average_outcome: f32,
146}
147
148#[derive(Debug, Clone)]
150pub struct TrainingStats {
151 pub total_positions: usize,
152 pub unique_positions: usize,
153 pub has_move_data: bool,
154 pub move_data_entries: usize,
155 pub lsh_enabled: bool,
156 pub manifold_enabled: bool,
157 pub opening_book_enabled: bool,
158}
159
160#[derive(Debug, Clone)]
162pub struct HybridConfig {
163 pub pattern_confidence_threshold: f32,
165 pub enable_tactical_refinement: bool,
167 pub tactical_config: TacticalConfig,
169 pub pattern_weight: f32,
171 pub min_similar_positions: usize,
173}
174
175impl Default for HybridConfig {
176 fn default() -> Self {
177 Self {
178 pattern_confidence_threshold: 0.8,
179 enable_tactical_refinement: true,
180 tactical_config: TacticalConfig::default(),
181 pattern_weight: 0.7, min_similar_positions: 3,
183 }
184 }
185}
186
187pub struct ChessVectorEngine {
241 encoder: PositionEncoder,
242 similarity_search: SimilaritySearch,
243 lsh_index: Option<LSH>,
244 manifold_learner: Option<ManifoldLearner>,
245 use_lsh: bool,
246 use_manifold: bool,
247 position_moves: HashMap<usize, Vec<(ChessMove, f32)>>,
249 manifold_similarity_search: Option<SimilaritySearch>,
251 manifold_lsh_index: Option<LSH>,
253 feature_checker: FeatureChecker,
255 licensed_feature_checker: Option<LicensedFeatureChecker>,
257 position_vectors: Vec<Array1<f32>>,
259 position_boards: Vec<Board>,
261 position_evaluations: Vec<f32>,
263 opening_book: Option<OpeningBook>,
265 database: Option<Database>,
267 tactical_search: Option<TacticalSearch>,
269 hybrid_config: HybridConfig,
273}
274
275impl Clone for ChessVectorEngine {
276 fn clone(&self) -> Self {
277 Self {
278 encoder: self.encoder.clone(),
279 similarity_search: self.similarity_search.clone(),
280 lsh_index: self.lsh_index.clone(),
281 manifold_learner: None, use_lsh: self.use_lsh,
283 use_manifold: false, position_moves: self.position_moves.clone(),
285 manifold_similarity_search: self.manifold_similarity_search.clone(),
286 manifold_lsh_index: self.manifold_lsh_index.clone(),
287 feature_checker: self.feature_checker.clone(),
288 licensed_feature_checker: None, position_vectors: self.position_vectors.clone(),
290 position_boards: self.position_boards.clone(),
291 position_evaluations: self.position_evaluations.clone(),
292 opening_book: self.opening_book.clone(),
293 database: None, tactical_search: self.tactical_search.clone(),
295 hybrid_config: self.hybrid_config.clone(),
297 }
298 }
299}
300
301impl ChessVectorEngine {
302 pub fn new(vector_size: usize) -> Self {
304 Self {
305 encoder: PositionEncoder::new(vector_size),
306 similarity_search: SimilaritySearch::new(vector_size),
307 lsh_index: None,
308 manifold_learner: None,
309 use_lsh: false,
310 use_manifold: false,
311 position_moves: HashMap::new(),
312 manifold_similarity_search: None,
313 manifold_lsh_index: None,
314 feature_checker: FeatureChecker::new(FeatureTier::OpenSource), licensed_feature_checker: None,
316 position_vectors: Vec::new(),
317 position_boards: Vec::new(),
318 position_evaluations: Vec::new(),
319 opening_book: None,
320 database: None,
321 tactical_search: None,
322 hybrid_config: HybridConfig::default(),
324 }
325 }
326
327 pub fn new_with_tier(vector_size: usize, tier: FeatureTier) -> Self {
329 let mut engine = Self::new(vector_size);
330 engine.feature_checker = FeatureChecker::new(tier);
331 engine
332 }
333
334 pub fn get_feature_tier(&self) -> &FeatureTier {
336 self.feature_checker.get_current_tier()
337 }
338
339 pub fn upgrade_tier(&mut self, new_tier: FeatureTier) {
341 self.feature_checker.upgrade_tier(new_tier);
342 }
343
344 pub fn is_feature_available(&self, feature: &str) -> bool {
346 self.feature_checker.check_feature(feature).is_ok()
347 }
348
349 pub fn require_feature(&self, feature: &str) -> Result<(), FeatureError> {
351 self.feature_checker.require_feature(feature)
352 }
353
354 pub fn new_adaptive(vector_size: usize, expected_positions: usize, use_case: &str) -> Self {
357 match use_case {
358 "training" => {
359 if expected_positions > 10000 {
360 Self::new_with_lsh(vector_size, 12, 20)
362 } else {
363 Self::new(vector_size)
364 }
365 }
366 "gameplay" => {
367 if expected_positions > 15000 {
368 Self::new_with_lsh(vector_size, 10, 18)
370 } else {
371 Self::new(vector_size)
372 }
373 }
374 "analysis" => {
375 if expected_positions > 10000 {
376 Self::new_with_lsh(vector_size, 14, 22)
378 } else {
379 Self::new(vector_size)
380 }
381 }
382 _ => Self::new(vector_size), }
384 }
385
386 pub fn new_with_lsh(vector_size: usize, num_tables: usize, hash_size: usize) -> Self {
388 Self {
389 encoder: PositionEncoder::new(vector_size),
390 similarity_search: SimilaritySearch::new(vector_size),
391 lsh_index: Some(LSH::new(vector_size, num_tables, hash_size)),
392 manifold_learner: None,
393 use_lsh: true,
394 use_manifold: false,
395 position_moves: HashMap::new(),
396 manifold_similarity_search: None,
397 manifold_lsh_index: None,
398 feature_checker: FeatureChecker::new(FeatureTier::OpenSource),
399 licensed_feature_checker: None,
400 position_vectors: Vec::new(),
401 position_boards: Vec::new(),
402 position_evaluations: Vec::new(),
403 opening_book: None,
404 database: None,
405 tactical_search: None,
406 hybrid_config: HybridConfig::default(),
408 }
409 }
410
411 pub fn enable_lsh(&mut self, num_tables: usize, hash_size: usize) {
413 self.lsh_index = Some(LSH::new(self.encoder.vector_size(), num_tables, hash_size));
414 self.use_lsh = true;
415
416 if let Some(ref mut lsh) = self.lsh_index {
418 for (vector, evaluation) in self.similarity_search.get_all_positions() {
419 lsh.add_vector(vector, evaluation);
420 }
421 }
422 }
423
424 pub fn add_position(&mut self, board: &Board, evaluation: f32) {
426 if !self.is_position_safe(board) {
428 return; }
430
431 let vector = self.encoder.encode(board);
432 self.similarity_search
433 .add_position(vector.clone(), evaluation);
434
435 self.position_vectors.push(vector.clone());
437 self.position_boards.push(*board);
438 self.position_evaluations.push(evaluation);
439
440 if let Some(ref mut lsh) = self.lsh_index {
442 lsh.add_vector(vector.clone(), evaluation);
443 }
444
445 if self.use_manifold {
447 if let Some(ref learner) = self.manifold_learner {
448 let compressed = learner.encode(&vector);
449
450 if let Some(ref mut search) = self.manifold_similarity_search {
451 search.add_position(compressed.clone(), evaluation);
452 }
453
454 if let Some(ref mut lsh) = self.manifold_lsh_index {
455 lsh.add_vector(compressed, evaluation);
456 }
457 }
458 }
459 }
460
461 pub fn find_similar_positions(&self, board: &Board, k: usize) -> Vec<(Array1<f32>, f32, f32)> {
463 let query_vector = self.encoder.encode(board);
464
465 if self.use_manifold {
467 if let Some(ref manifold_learner) = self.manifold_learner {
468 let compressed_query = manifold_learner.encode(&query_vector);
469
470 if let Some(ref lsh) = self.manifold_lsh_index {
472 return lsh.query(&compressed_query, k);
473 }
474
475 if let Some(ref search) = self.manifold_similarity_search {
477 return search.search(&compressed_query, k);
478 }
479 }
480 }
481
482 if self.use_lsh {
484 if let Some(ref lsh_index) = self.lsh_index {
485 return lsh_index.query(&query_vector, k);
486 }
487 }
488
489 self.similarity_search.search(&query_vector, k)
491 }
492
493 pub fn find_similar_positions_with_indices(
495 &self,
496 board: &Board,
497 k: usize,
498 ) -> Vec<(usize, f32, f32)> {
499 let query_vector = self.encoder.encode(board);
500
501 let mut results = Vec::new();
504
505 for (i, stored_vector) in self.position_vectors.iter().enumerate() {
506 let similarity = self.encoder.similarity(&query_vector, stored_vector);
507 let eval = self.position_evaluations.get(i).copied().unwrap_or(0.0);
508 results.push((i, eval, similarity));
509 }
510
511 results.sort_by(|a, b| b.2.partial_cmp(&a.2).unwrap_or(std::cmp::Ordering::Equal));
513 results.truncate(k);
514
515 results
516 }
517
518 pub fn evaluate_position(&mut self, board: &Board) -> Option<f32> {
520 if let Some(entry) = self.get_opening_entry(board) {
529 return Some(entry.evaluation);
530 }
531
532 let similar_positions = self.find_similar_positions(board, 5);
534
535 if similar_positions.is_empty() {
536 if let Some(ref mut tactical_search) = self.tactical_search {
538 let result = tactical_search.search(board);
539 return Some(result.evaluation);
540 }
541 return None;
542 }
543
544 let mut weighted_sum = 0.0;
546 let mut weight_sum = 0.0;
547 let mut similarity_scores = Vec::new();
548
549 for (_, evaluation, similarity) in &similar_positions {
550 let weight = *similarity;
551 weighted_sum += evaluation * weight;
552 weight_sum += weight;
553 similarity_scores.push(*similarity);
554 }
555
556 let pattern_evaluation = weighted_sum / weight_sum;
557
558 let avg_similarity = similarity_scores.iter().sum::<f32>() / similarity_scores.len() as f32;
560 let count_factor = (similar_positions.len() as f32
561 / self.hybrid_config.min_similar_positions as f32)
562 .min(1.0);
563 let pattern_confidence = avg_similarity * count_factor;
564
565 let use_tactical = self.hybrid_config.enable_tactical_refinement
567 && pattern_confidence < self.hybrid_config.pattern_confidence_threshold
568 && self.tactical_search.is_some();
569
570 if use_tactical {
571 if let Some(ref mut tactical_search) = self.tactical_search {
573 let tactical_result = if tactical_search.config.enable_parallel_search {
574 tactical_search.search_parallel(board)
575 } else {
576 tactical_search.search(board)
577 };
578
579 let pattern_weight = self.hybrid_config.pattern_weight * pattern_confidence;
581 let tactical_weight = 1.0 - pattern_weight;
582
583 let hybrid_evaluation = (pattern_evaluation * pattern_weight)
584 + (tactical_result.evaluation * tactical_weight);
585
586 Some(hybrid_evaluation)
587 } else {
588 Some(pattern_evaluation)
590 }
591 } else {
592 Some(pattern_evaluation)
594 }
595 }
596
597 pub fn encode_position(&self, board: &Board) -> Array1<f32> {
599 self.encoder.encode(board)
600 }
601
602 pub fn calculate_similarity(&self, board1: &Board, board2: &Board) -> f32 {
604 let vec1 = self.encoder.encode(board1);
605 let vec2 = self.encoder.encode(board2);
606 self.encoder.similarity(&vec1, &vec2)
607 }
608
609 pub fn knowledge_base_size(&self) -> usize {
611 self.similarity_search.size()
612 }
613
614 pub fn save_training_data<P: AsRef<std::path::Path>>(
616 &self,
617 path: P,
618 ) -> Result<(), Box<dyn std::error::Error>> {
619 use crate::training::{TrainingData, TrainingDataset};
620
621 let mut dataset = TrainingDataset::new();
622
623 for (i, board) in self.position_boards.iter().enumerate() {
625 if i < self.position_evaluations.len() {
626 dataset.data.push(TrainingData {
627 board: *board,
628 evaluation: self.position_evaluations[i],
629 depth: 15, game_id: i, });
632 }
633 }
634
635 dataset.save_incremental(path)?;
636 println!("Saved {} positions to training data", dataset.data.len());
637 Ok(())
638 }
639
640 pub fn load_training_data_incremental<P: AsRef<std::path::Path>>(
642 &mut self,
643 path: P,
644 ) -> Result<(), Box<dyn std::error::Error>> {
645 use crate::training::TrainingDataset;
646 use indicatif::{ProgressBar, ProgressStyle};
647 use std::collections::HashSet;
648
649 let existing_size = self.knowledge_base_size();
650
651 let path_ref = path.as_ref();
653 let binary_path = path_ref.with_extension("bin");
654 if binary_path.exists() {
655 println!("š Loading optimized binary format...");
656 return self.load_training_data_binary(binary_path);
657 }
658
659 println!("š Loading training data from {}...", path_ref.display());
660 let dataset = TrainingDataset::load(path)?;
661
662 let total_positions = dataset.data.len();
663 if total_positions == 0 {
664 println!("ā ļø No positions found in dataset");
665 return Ok(());
666 }
667
668 let dedup_pb = ProgressBar::new(total_positions as u64);
670 dedup_pb.set_style(
671 ProgressStyle::default_bar()
672 .template("š Checking duplicates [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({percent}%) {msg}")?
673 .progress_chars("āāā")
674 );
675
676 let mut existing_boards: HashSet<_> = self.position_boards.iter().cloned().collect();
678 let mut new_positions = Vec::new();
679 let mut new_evaluations = Vec::new();
680
681 for (i, data) in dataset.data.into_iter().enumerate() {
683 if !existing_boards.contains(&data.board) {
684 existing_boards.insert(data.board);
685 new_positions.push(data.board);
686 new_evaluations.push(data.evaluation);
687 }
688
689 if i % 1000 == 0 || i == total_positions - 1 {
690 dedup_pb.set_position((i + 1) as u64);
691 dedup_pb.set_message(format!("{} new positions found", new_positions.len()));
692 }
693 }
694 dedup_pb.finish_with_message(format!("ā
Found {} new positions", new_positions.len()));
695
696 if new_positions.is_empty() {
697 println!("ā¹ļø No new positions to add (all positions already exist)");
698 return Ok(());
699 }
700
701 let add_pb = ProgressBar::new(new_positions.len() as u64);
703 add_pb.set_style(
704 ProgressStyle::default_bar()
705 .template("ā Adding positions [{elapsed_precise}] [{bar:40.green/blue}] {pos}/{len} ({percent}%) {msg}")?
706 .progress_chars("āāā")
707 );
708
709 for (i, (board, evaluation)) in new_positions
711 .into_iter()
712 .zip(new_evaluations.into_iter())
713 .enumerate()
714 {
715 self.add_position(&board, evaluation);
716
717 if i % 500 == 0 || i == add_pb.length().unwrap() as usize - 1 {
718 add_pb.set_position((i + 1) as u64);
719 add_pb.set_message("vectors encoded".to_string());
720 }
721 }
722 add_pb.finish_with_message("ā
All positions added");
723
724 println!(
725 "šÆ Loaded {} new positions (total: {})",
726 self.knowledge_base_size() - existing_size,
727 self.knowledge_base_size()
728 );
729 Ok(())
730 }
731
732 pub fn save_training_data_binary<P: AsRef<std::path::Path>>(
734 &self,
735 path: P,
736 ) -> Result<(), Box<dyn std::error::Error>> {
737 use lz4_flex::compress_prepend_size;
738
739 println!("š¾ Saving training data in binary format (compressed)...");
740
741 #[derive(serde::Serialize)]
743 struct BinaryTrainingData {
744 positions: Vec<String>, evaluations: Vec<f32>,
746 vectors: Vec<Vec<f32>>, created_at: i64,
748 }
749
750 let current_time = std::time::SystemTime::now()
751 .duration_since(std::time::UNIX_EPOCH)?
752 .as_secs() as i64;
753
754 let mut positions = Vec::with_capacity(self.position_boards.len());
756 let mut evaluations = Vec::with_capacity(self.position_boards.len());
757 let mut vectors = Vec::with_capacity(self.position_boards.len());
758
759 for (i, board) in self.position_boards.iter().enumerate() {
760 if i < self.position_evaluations.len() {
761 positions.push(board.to_string());
762 evaluations.push(self.position_evaluations[i]);
763
764 if i < self.position_vectors.len() {
766 if let Some(vector_slice) = self.position_vectors[i].as_slice() {
767 vectors.push(vector_slice.to_vec());
768 }
769 }
770 }
771 }
772
773 let binary_data = BinaryTrainingData {
774 positions,
775 evaluations,
776 vectors,
777 created_at: current_time,
778 };
779
780 let serialized = bincode::serialize(&binary_data)?;
782
783 let compressed = compress_prepend_size(&serialized);
785
786 std::fs::write(path, &compressed)?;
788
789 println!(
790 "ā
Saved {} positions to binary file ({} bytes compressed)",
791 binary_data.positions.len(),
792 compressed.len()
793 );
794 Ok(())
795 }
796
797 pub fn load_training_data_binary<P: AsRef<std::path::Path>>(
799 &mut self,
800 path: P,
801 ) -> Result<(), Box<dyn std::error::Error>> {
802 use indicatif::{ProgressBar, ProgressStyle};
803 use lz4_flex::decompress_size_prepended;
804
805 println!("š Loading training data from binary format...");
806
807 #[derive(serde::Deserialize)]
808 struct BinaryTrainingData {
809 positions: Vec<String>,
810 evaluations: Vec<f32>,
811 #[allow(dead_code)]
812 vectors: Vec<Vec<f32>>,
813 #[allow(dead_code)]
814 created_at: i64,
815 }
816
817 let existing_size = self.knowledge_base_size();
818
819 let file_size = std::fs::metadata(&path)?.len();
821 println!(
822 "š¦ Reading {} compressed file...",
823 Self::format_bytes(file_size)
824 );
825
826 let compressed_data = std::fs::read(path)?;
827 println!("š Decompressing data...");
828 let serialized = decompress_size_prepended(&compressed_data)?;
829
830 println!("š Deserializing binary data...");
831 let binary_data: BinaryTrainingData = bincode::deserialize(&serialized)?;
832
833 let total_positions = binary_data.positions.len();
834 if total_positions == 0 {
835 println!("ā ļø No positions found in binary file");
836 return Ok(());
837 }
838
839 println!(
840 "š Processing {total_positions} positions from binary format..."
841 );
842
843 let pb = ProgressBar::new(total_positions as u64);
845 pb.set_style(
846 ProgressStyle::default_bar()
847 .template("ā” Loading positions [{elapsed_precise}] [{bar:40.green/blue}] {pos}/{len} ({percent}%) {msg}")?
848 .progress_chars("āāā")
849 );
850
851 let mut added_count = 0;
852
853 for (i, fen) in binary_data.positions.iter().enumerate() {
855 if i < binary_data.evaluations.len() {
856 if let Ok(board) = fen.parse() {
857 if !self.position_boards.contains(&board) {
859 let mut evaluation = binary_data.evaluations[i];
860
861 if evaluation.abs() > 15.0 {
865 evaluation /= 100.0;
866 }
867
868 self.add_position(&board, evaluation);
869 added_count += 1;
870 }
871 }
872 }
873
874 if i % 1000 == 0 || i == total_positions - 1 {
875 pb.set_position((i + 1) as u64);
876 pb.set_message(format!("{added_count} new positions"));
877 }
878 }
879 pb.finish_with_message(format!("ā
Loaded {added_count} new positions"));
880
881 println!(
882 "šÆ Binary loading complete: {} new positions (total: {})",
883 self.knowledge_base_size() - existing_size,
884 self.knowledge_base_size()
885 );
886 Ok(())
887 }
888
889 pub fn load_training_data_mmap<P: AsRef<Path>>(
892 &mut self,
893 path: P,
894 ) -> Result<(), Box<dyn std::error::Error>> {
895 self.require_feature("memory_mapped_files")?;
897
898 use memmap2::Mmap;
899 use std::fs::File;
900
901 let path_ref = path.as_ref();
902 println!(
903 "š Loading training data via memory mapping: {}",
904 path_ref.display()
905 );
906
907 let file = File::open(path_ref)?;
908 let mmap = unsafe { Mmap::map(&file)? };
909
910 if let Ok(data) = rmp_serde::from_slice::<Vec<(String, f32)>>(&mmap) {
912 println!("š¦ Detected MessagePack format");
913 return self.load_positions_from_tuples(data);
914 }
915
916 if let Ok(data) = bincode::deserialize::<Vec<(String, f32)>>(&mmap) {
918 println!("š¦ Detected bincode format");
919 return self.load_positions_from_tuples(data);
920 }
921
922 let decompressed = lz4_flex::decompress_size_prepended(&mmap)?;
924 let data: Vec<(String, f32)> = bincode::deserialize(&decompressed)?;
925 println!("š¦ Detected LZ4+bincode format");
926 self.load_positions_from_tuples(data)
927 }
928
929 pub fn load_training_data_msgpack<P: AsRef<Path>>(
932 &mut self,
933 path: P,
934 ) -> Result<(), Box<dyn std::error::Error>> {
935 use std::fs::File;
936 use std::io::BufReader;
937
938 let path_ref = path.as_ref();
939 println!(
940 "š Loading MessagePack training data: {}",
941 path_ref.display()
942 );
943
944 let file = File::open(path_ref)?;
945 let reader = BufReader::new(file);
946 let data: Vec<(String, f32)> = rmp_serde::from_read(reader)?;
947
948 println!("š¦ MessagePack data loaded: {} positions", data.len());
949 self.load_positions_from_tuples(data)
950 }
951
952 pub fn load_training_data_streaming_json<P: AsRef<Path>>(
955 &mut self,
956 path: P,
957 ) -> Result<(), Box<dyn std::error::Error>> {
958 use dashmap::DashMap;
959 use rayon::prelude::*;
960 use std::fs::File;
961 use std::io::{BufRead, BufReader};
962 use std::sync::Arc;
963
964 let path_ref = path.as_ref();
965 println!(
966 "š Loading JSON with streaming parallel processing: {}",
967 path_ref.display()
968 );
969
970 let file = File::open(path_ref)?;
971 let reader = BufReader::new(file);
972
973 let chunk_size = 10000;
975 let position_map = Arc::new(DashMap::new());
976
977 let lines: Vec<String> = reader.lines().collect::<Result<Vec<_>, _>>()?;
978 let total_lines = lines.len();
979
980 lines.par_chunks(chunk_size).for_each(|chunk| {
982 for line in chunk {
983 if let Ok(data) = serde_json::from_str::<serde_json::Value>(line) {
984 if let (Some(fen), Some(eval)) = (
985 data.get("fen").and_then(|v| v.as_str()),
986 data.get("evaluation").and_then(|v| v.as_f64()),
987 ) {
988 position_map.insert(fen.to_string(), eval as f32);
989 }
990 }
991 }
992 });
993
994 println!(
995 "š¦ Parallel JSON processing complete: {} positions from {} lines",
996 position_map.len(),
997 total_lines
998 );
999
1000 let data: Vec<(String, f32)> = match Arc::try_unwrap(position_map) {
1003 Ok(map) => map.into_iter().collect(),
1004 Err(arc_map) => {
1005 arc_map
1007 .iter()
1008 .map(|entry| (entry.key().clone(), *entry.value()))
1009 .collect()
1010 }
1011 };
1012 self.load_positions_from_tuples(data)
1013 }
1014
1015 pub fn load_training_data_compressed<P: AsRef<Path>>(
1018 &mut self,
1019 path: P,
1020 ) -> Result<(), Box<dyn std::error::Error>> {
1021 use std::fs::File;
1022 use std::io::BufReader;
1023
1024 let path_ref = path.as_ref();
1025 println!(
1026 "š Loading zstd compressed training data: {}",
1027 path_ref.display()
1028 );
1029
1030 let file = File::open(path_ref)?;
1031 let reader = BufReader::new(file);
1032 let decoder = zstd::stream::Decoder::new(reader)?;
1033
1034 if let Ok(data) = rmp_serde::from_read::<_, Vec<(String, f32)>>(decoder) {
1036 println!("š¦ Zstd+MessagePack data loaded: {} positions", data.len());
1037 return self.load_positions_from_tuples(data);
1038 }
1039
1040 let file = File::open(path_ref)?;
1042 let reader = BufReader::new(file);
1043 let decoder = zstd::stream::Decoder::new(reader)?;
1044 let data: Vec<(String, f32)> = bincode::deserialize_from(decoder)?;
1045
1046 println!("š¦ Zstd+bincode data loaded: {} positions", data.len());
1047 self.load_positions_from_tuples(data)
1048 }
1049
1050 fn load_positions_from_tuples(
1053 &mut self,
1054 data: Vec<(String, f32)>,
1055 ) -> Result<(), Box<dyn std::error::Error>> {
1056 use indicatif::{ProgressBar, ProgressStyle};
1057 use std::collections::HashSet;
1058
1059 let existing_size = self.knowledge_base_size();
1060 let mut seen_positions = HashSet::new();
1061 let mut loaded_count = 0;
1062
1063 let pb = ProgressBar::new(data.len() as u64);
1065 pb.set_style(ProgressStyle::with_template(
1066 "{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({per_sec}) {msg}"
1067 )?);
1068
1069 for (fen, evaluation) in data {
1070 pb.inc(1);
1071
1072 if seen_positions.contains(&fen) {
1074 continue;
1075 }
1076 seen_positions.insert(fen.clone());
1077
1078 if let Ok(board) = Board::from_str(&fen) {
1080 self.add_position(&board, evaluation);
1081 loaded_count += 1;
1082
1083 if loaded_count % 1000 == 0 {
1084 pb.set_message(format!("Loaded {loaded_count} positions"));
1085 }
1086 }
1087 }
1088
1089 pb.finish_with_message(format!("ā
Loaded {loaded_count} new positions"));
1090
1091 println!(
1092 "šÆ Ultra-fast loading complete: {} new positions (total: {})",
1093 self.knowledge_base_size() - existing_size,
1094 self.knowledge_base_size()
1095 );
1096
1097 Ok(())
1098 }
1099
1100 fn format_bytes(bytes: u64) -> String {
1102 const UNITS: &[&str] = &["B", "KB", "MB", "GB"];
1103 let mut size = bytes as f64;
1104 let mut unit_index = 0;
1105
1106 while size >= 1024.0 && unit_index < UNITS.len() - 1 {
1107 size /= 1024.0;
1108 unit_index += 1;
1109 }
1110
1111 format!("{:.1} {}", size, UNITS[unit_index])
1112 }
1113
1114 pub fn train_from_dataset_incremental(&mut self, dataset: &crate::training::TrainingDataset) {
1116 let _existing_size = self.knowledge_base_size();
1117 let mut added = 0;
1118
1119 for data in &dataset.data {
1120 if !self.position_boards.contains(&data.board) {
1122 self.add_position(&data.board, data.evaluation);
1123 added += 1;
1124 }
1125 }
1126
1127 println!(
1128 "Added {} new positions from dataset (total: {})",
1129 added,
1130 self.knowledge_base_size()
1131 );
1132 }
1133
1134 pub fn training_stats(&self) -> TrainingStats {
1136 TrainingStats {
1137 total_positions: self.knowledge_base_size(),
1138 unique_positions: self.position_boards.len(),
1139 has_move_data: !self.position_moves.is_empty(),
1140 move_data_entries: self.position_moves.len(),
1141 lsh_enabled: self.use_lsh,
1142 manifold_enabled: self.use_manifold,
1143 opening_book_enabled: self.opening_book.is_some(),
1144 }
1145 }
1146
1147 pub fn auto_load_training_data(&mut self) -> Result<Vec<String>, Box<dyn std::error::Error>> {
1149 use indicatif::{ProgressBar, ProgressStyle};
1150
1151 let common_files = vec![
1152 "training_data.json",
1153 "tactical_training_data.json",
1154 "engine_training.json",
1155 "chess_training.json",
1156 "my_training.json",
1157 ];
1158
1159 let tactical_files = vec![
1160 "tactical_puzzles.json",
1161 "lichess_puzzles.json",
1162 "my_puzzles.json",
1163 ];
1164
1165 let mut available_files = Vec::new();
1167 for file_path in &common_files {
1168 if std::path::Path::new(file_path).exists() {
1169 available_files.push((file_path, "training"));
1170 }
1171 }
1172 for file_path in &tactical_files {
1173 if std::path::Path::new(file_path).exists() {
1174 available_files.push((file_path, "tactical"));
1175 }
1176 }
1177
1178 if available_files.is_empty() {
1179 return Ok(Vec::new());
1180 }
1181
1182 println!(
1183 "š Found {} training files to auto-load",
1184 available_files.len()
1185 );
1186
1187 let pb = ProgressBar::new(available_files.len() as u64);
1189 pb.set_style(
1190 ProgressStyle::default_bar()
1191 .template("š Auto-loading files [{elapsed_precise}] [{bar:40.blue/cyan}] {pos}/{len} {msg}")?
1192 .progress_chars("āāā")
1193 );
1194
1195 let mut loaded_files = Vec::new();
1196
1197 for (i, (file_path, file_type)) in available_files.iter().enumerate() {
1198 pb.set_position(i as u64);
1199 pb.set_message("Processing...".to_string());
1200
1201 let result = match *file_type {
1202 "training" => self.load_training_data_incremental(file_path).map(|_| {
1203 loaded_files.push(file_path.to_string());
1204 println!("Loading complete");
1205 }),
1206 "tactical" => crate::training::TacticalPuzzleParser::load_tactical_puzzles(
1207 file_path,
1208 )
1209 .map(|puzzles| {
1210 crate::training::TacticalPuzzleParser::load_into_engine_incremental(
1211 &puzzles, self,
1212 );
1213 loaded_files.push(file_path.to_string());
1214 println!("Loading complete");
1215 }),
1216 _ => Ok(()),
1217 };
1218
1219 if let Err(_e) = result {
1220 println!("Loading complete");
1221 }
1222 }
1223
1224 pb.set_position(available_files.len() as u64);
1225 pb.finish_with_message(format!("ā
Auto-loaded {} files", loaded_files.len()));
1226
1227 Ok(loaded_files)
1228 }
1229
1230 pub fn load_lichess_puzzles_premium<P: AsRef<std::path::Path>>(
1232 &mut self,
1233 csv_path: P,
1234 ) -> Result<(), Box<dyn std::error::Error>> {
1235 self.require_feature("ultra_fast_loading")?; println!("š„ Loading Lichess puzzles with premium performance...");
1238 let puzzle_entries =
1239 crate::lichess_loader::load_lichess_puzzles_premium_with_moves(csv_path)?;
1240
1241 for (board, evaluation, best_move) in puzzle_entries {
1242 self.add_position_with_move(&board, evaluation, Some(best_move), Some(evaluation));
1243 }
1244
1245 println!("ā
Premium Lichess puzzle loading complete!");
1246 Ok(())
1247 }
1248
1249 pub fn load_lichess_puzzles_basic<P: AsRef<std::path::Path>>(
1251 &mut self,
1252 csv_path: P,
1253 max_puzzles: usize,
1254 ) -> Result<(), Box<dyn std::error::Error>> {
1255 println!(
1256 "š Loading Lichess puzzles (basic tier, limited to {max_puzzles} puzzles)..."
1257 );
1258 let puzzle_entries =
1259 crate::lichess_loader::load_lichess_puzzles_basic_with_moves(csv_path, max_puzzles)?;
1260
1261 for (board, evaluation, best_move) in puzzle_entries {
1262 self.add_position_with_move(&board, evaluation, Some(best_move), Some(evaluation));
1263 }
1264
1265 println!("ā
Basic Lichess puzzle loading complete!");
1266 Ok(())
1267 }
1268
1269 pub fn new_with_auto_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1271 let mut engine = Self::new(vector_size);
1272 engine.enable_opening_book();
1273
1274 let loaded_files = engine.auto_load_training_data()?;
1276
1277 if loaded_files.is_empty() {
1278 println!("š¤ Created fresh engine (no training data found)");
1279 } else {
1280 println!(
1281 "š Created engine with auto-loaded training data from {} files",
1282 loaded_files.len()
1283 );
1284 let _stats = engine.training_stats();
1285 println!("Loading complete");
1286 println!("Loading complete");
1287 }
1288
1289 Ok(engine)
1290 }
1291
1292 pub fn new_with_fast_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1295 use indicatif::{ProgressBar, ProgressStyle};
1296
1297 let mut engine = Self::new(vector_size);
1298 engine.enable_opening_book();
1299
1300 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1302 println!("Loading complete");
1303 }
1304
1305 let binary_files = [
1307 "training_data_a100.bin", "training_data.bin",
1309 "tactical_training_data.bin",
1310 "engine_training.bin",
1311 "chess_training.bin",
1312 ];
1313
1314 let existing_binary_files: Vec<_> = binary_files
1316 .iter()
1317 .filter(|&file_path| std::path::Path::new(file_path).exists())
1318 .collect();
1319
1320 let mut loaded_count = 0;
1321
1322 if !existing_binary_files.is_empty() {
1323 println!(
1324 "ā” Fast loading: Found {} binary files",
1325 existing_binary_files.len()
1326 );
1327
1328 let pb = ProgressBar::new(existing_binary_files.len() as u64);
1330 pb.set_style(
1331 ProgressStyle::default_bar()
1332 .template("š Fast loading [{elapsed_precise}] [{bar:40.green/cyan}] {pos}/{len} {msg}")?
1333 .progress_chars("āāā")
1334 );
1335
1336 for (i, file_path) in existing_binary_files.iter().enumerate() {
1337 pb.set_position(i as u64);
1338 pb.set_message("Processing...".to_string());
1339
1340 if engine.load_training_data_binary(file_path).is_ok() {
1341 loaded_count += 1;
1342 }
1343 }
1344
1345 pb.set_position(existing_binary_files.len() as u64);
1346 pb.finish_with_message(format!("ā
Loaded {loaded_count} binary files"));
1347 } else {
1348 println!("š¦ No binary files found, falling back to JSON auto-loading...");
1349 let _ = engine.auto_load_training_data()?;
1350 }
1351
1352 if let Err(e) = engine.load_manifold_models() {
1354 println!("ā ļø No pre-trained manifold models found ({e})");
1355 println!(" Use --rebuild-models flag to train new models");
1356 }
1357
1358 let stats = engine.training_stats();
1359 println!(
1360 "ā” Fast engine ready with {} positions ({} binary files loaded)",
1361 stats.total_positions, loaded_count
1362 );
1363
1364 Ok(engine)
1365 }
1366
1367 pub fn new_with_auto_discovery(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1370 println!("š Initializing engine with AUTO-DISCOVERY and format consolidation...");
1371 let mut engine = Self::new(vector_size);
1372 engine.enable_opening_book();
1373
1374 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1376 println!("Loading complete");
1377 }
1378
1379 let discovered_files = AutoDiscovery::discover_training_files(".", true)?;
1381
1382 if discovered_files.is_empty() {
1383 println!("ā¹ļø No training data found. Use convert methods to create optimized files.");
1384 return Ok(engine);
1385 }
1386
1387 let consolidated = AutoDiscovery::consolidate_by_base_name(discovered_files.clone());
1389
1390 let mut total_loaded = 0;
1391 for (base_name, best_file) in &consolidated {
1392 println!("š Loading {} ({})", base_name, best_file.format);
1393
1394 let initial_size = engine.knowledge_base_size();
1395 engine.load_file_by_format(&best_file.path, &best_file.format)?;
1396 let loaded_count = engine.knowledge_base_size() - initial_size;
1397 total_loaded += loaded_count;
1398
1399 println!(" ā
Loaded {loaded_count} positions");
1400 }
1401
1402 let cleanup_candidates = AutoDiscovery::get_cleanup_candidates(&discovered_files);
1404 if !cleanup_candidates.is_empty() {
1405 println!(
1406 "š§¹ Found {} old format files that can be cleaned up:",
1407 cleanup_candidates.len()
1408 );
1409 AutoDiscovery::cleanup_old_formats(&cleanup_candidates, true)?; println!(" š” To actually remove old files, run: cargo run --bin cleanup_formats");
1412 }
1413
1414 if let Err(e) = engine.load_manifold_models() {
1416 println!("ā ļø No pre-trained manifold models found ({e})");
1417 }
1418
1419 println!(
1420 "šÆ Engine ready: {} positions loaded from {} datasets",
1421 total_loaded,
1422 consolidated.len()
1423 );
1424 Ok(engine)
1425 }
1426
1427 pub fn new_with_instant_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1430 println!("š Initializing engine with INSTANT loading...");
1431 let mut engine = Self::new(vector_size);
1432 engine.enable_opening_book();
1433
1434 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1436 println!("Loading complete");
1437 }
1438
1439 let discovered_files = AutoDiscovery::discover_training_files(".", false)?;
1441
1442 if discovered_files.is_empty() {
1443 println!("ā¹ļø No user training data found, loading starter dataset...");
1445 if let Err(_e) = engine.load_starter_dataset() {
1446 println!("Loading complete");
1447 println!("ā¹ļø Starting with empty engine");
1448 } else {
1449 println!(
1450 "ā
Loaded starter dataset with {} positions",
1451 engine.knowledge_base_size()
1452 );
1453 }
1454 return Ok(engine);
1455 }
1456
1457 if let Some(best_file) = discovered_files.first() {
1459 println!(
1460 "ā” Loading {} format: {}",
1461 best_file.format,
1462 best_file.path.display()
1463 );
1464 engine.load_file_by_format(&best_file.path, &best_file.format)?;
1465 println!(
1466 "ā
Loaded {} positions from {} format",
1467 engine.knowledge_base_size(),
1468 best_file.format
1469 );
1470 }
1471
1472 if let Err(e) = engine.load_manifold_models() {
1474 println!("ā ļø No pre-trained manifold models found ({e})");
1475 }
1476
1477 println!(
1478 "šÆ Engine ready: {} positions loaded",
1479 engine.knowledge_base_size()
1480 );
1481 Ok(engine)
1482 }
1483
1484 pub fn new_with_license(vector_size: usize, license_url: String) -> Self {
1486 let mut engine = Self::new(vector_size);
1487 engine.licensed_feature_checker = Some(LicensedFeatureChecker::new(license_url));
1488 engine
1489 }
1490
1491 pub fn new_with_offline_license(vector_size: usize) -> Self {
1493 let mut engine = Self::new(vector_size);
1494 engine.licensed_feature_checker = Some(LicensedFeatureChecker::new_offline());
1495 engine
1496 }
1497
1498 pub async fn activate_license(&mut self, key: &str) -> Result<FeatureTier, LicenseError> {
1500 if let Some(ref mut checker) = self.licensed_feature_checker {
1501 let tier = checker.activate_license(key).await?;
1502 self.feature_checker.upgrade_tier(tier.clone());
1504 Ok(tier)
1505 } else {
1506 Err(LicenseError::InvalidFormat(
1507 "No license checker initialized".to_string(),
1508 ))
1509 }
1510 }
1511
1512 pub async fn check_licensed_feature(&mut self, feature: &str) -> Result<(), FeatureError> {
1514 if let Some(ref mut checker) = self.licensed_feature_checker {
1515 checker.check_feature(feature).await
1516 } else {
1517 self.feature_checker.check_feature(feature)
1519 }
1520 }
1521
1522 pub fn load_license_cache<P: AsRef<std::path::Path>>(
1524 &mut self,
1525 path: P,
1526 ) -> Result<(), Box<dyn std::error::Error>> {
1527 if let Some(ref mut checker) = self.licensed_feature_checker {
1528 checker.load_cache(path)?;
1529 }
1530 Ok(())
1531 }
1532
1533 pub fn save_license_cache<P: AsRef<std::path::Path>>(
1535 &self,
1536 path: P,
1537 ) -> Result<(), Box<dyn std::error::Error>> {
1538 if let Some(ref checker) = self.licensed_feature_checker {
1539 checker.save_cache(path)?;
1540 }
1541 Ok(())
1542 }
1543
1544 fn is_position_safe(&self, board: &Board) -> bool {
1549 match std::panic::catch_unwind(|| {
1551 use chess::MoveGen;
1552 let _legal_moves: Vec<ChessMove> = MoveGen::new_legal(board).collect();
1553 true
1554 }) {
1555 Ok(_) => true,
1556 Err(_) => {
1557 false
1559 }
1560 }
1561 }
1562
1563 pub fn check_gpu_acceleration(&self) -> Result<(), Box<dyn std::error::Error>> {
1565 self.feature_checker.check_feature("gpu_acceleration")?;
1566
1567 match crate::gpu_acceleration::GPUAccelerator::new() {
1569 Ok(_) => {
1570 println!("š„ GPU acceleration available and ready");
1571 Ok(())
1572 }
1573 Err(_e) => Err("Processing...".to_string().into()),
1574 }
1575 }
1576
1577 pub fn load_starter_dataset(&mut self) -> Result<(), Box<dyn std::error::Error>> {
1579 let starter_data = if let Ok(file_content) =
1581 std::fs::read_to_string("training_data/starter_dataset.json")
1582 {
1583 file_content
1584 } else {
1585 r#"[
1587 {
1588 "fen": "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1",
1589 "evaluation": 0.0,
1590 "best_move": null,
1591 "depth": 0
1592 },
1593 {
1594 "fen": "rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1",
1595 "evaluation": 0.1,
1596 "best_move": "e7e5",
1597 "depth": 2
1598 },
1599 {
1600 "fen": "rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq e6 0 2",
1601 "evaluation": 0.0,
1602 "best_move": "g1f3",
1603 "depth": 2
1604 }
1605 ]"#
1606 .to_string()
1607 };
1608
1609 let training_data: Vec<serde_json::Value> = serde_json::from_str(&starter_data)?;
1610
1611 for entry in training_data {
1612 if let (Some(fen), Some(evaluation)) = (entry.get("fen"), entry.get("evaluation")) {
1613 if let (Some(fen_str), Some(eval_f64)) = (fen.as_str(), evaluation.as_f64()) {
1614 match chess::Board::from_str(fen_str) {
1615 Ok(board) => {
1616 let mut eval = eval_f64 as f32;
1618
1619 if eval.abs() > 15.0 {
1622 eval /= 100.0;
1623 }
1624
1625 self.add_position(&board, eval);
1626 }
1627 Err(_) => {
1628 continue;
1630 }
1631 }
1632 }
1633 }
1634 }
1635
1636 Ok(())
1637 }
1638
1639 fn load_file_by_format(
1641 &mut self,
1642 path: &std::path::Path,
1643 format: &str,
1644 ) -> Result<(), Box<dyn std::error::Error>> {
1645 let file_size = std::fs::metadata(path)?.len();
1647
1648 if file_size > 10_000_000 {
1650 println!(
1651 "š Large file detected ({:.1} MB) - using ultra-fast loader",
1652 file_size as f64 / 1_000_000.0
1653 );
1654 return self.ultra_fast_load_any_format(path);
1655 }
1656
1657 match format {
1659 "MMAP" => self.load_training_data_mmap(path),
1660 "MSGPACK" => self.load_training_data_msgpack(path),
1661 "BINARY" => self.load_training_data_streaming_binary(path),
1662 "ZSTD" => self.load_training_data_compressed(path),
1663 "JSON" => self.load_training_data_streaming_json_v2(path),
1664 _ => Err("Processing...".to_string().into()),
1665 }
1666 }
1667
1668 pub fn ultra_fast_load_any_format<P: AsRef<std::path::Path>>(
1670 &mut self,
1671 path: P,
1672 ) -> Result<(), Box<dyn std::error::Error>> {
1673 self.require_feature("ultra_fast_loading")?;
1675
1676 let mut loader = UltraFastLoader::new_for_massive_datasets();
1677 loader.ultra_load_binary(path, self)?;
1678
1679 let stats = loader.get_stats();
1680 println!("š Ultra-fast loading complete:");
1681 println!(" ā
Loaded: {} positions", stats.loaded);
1682 println!("Loading complete");
1683 println!("Loading complete");
1684 println!(" š Success rate: {:.1}%", stats.success_rate() * 100.0);
1685
1686 Ok(())
1687 }
1688
1689 pub fn load_training_data_streaming_binary<P: AsRef<std::path::Path>>(
1692 &mut self,
1693 path: P,
1694 ) -> Result<(), Box<dyn std::error::Error>> {
1695 let mut loader = StreamingLoader::new();
1696 loader.stream_load_binary(path, self)?;
1697
1698 println!("š Streaming binary load complete:");
1699 println!(" Loaded: {} new positions", loader.loaded_count);
1700 println!("Loading complete");
1701 println!("Loading complete");
1702
1703 Ok(())
1704 }
1705
1706 pub fn load_training_data_streaming_json_v2<P: AsRef<std::path::Path>>(
1709 &mut self,
1710 path: P,
1711 ) -> Result<(), Box<dyn std::error::Error>> {
1712 let mut loader = StreamingLoader::new();
1713
1714 let batch_size = if std::fs::metadata(path.as_ref())?.len() > 100_000_000 {
1716 20000 } else {
1719 5000 };
1721
1722 loader.stream_load_json(path, self, batch_size)?;
1723
1724 println!("š Streaming JSON load complete:");
1725 println!(" Loaded: {} new positions", loader.loaded_count);
1726 println!("Loading complete");
1727 println!("Loading complete");
1728
1729 Ok(())
1730 }
1731
1732 pub fn new_for_massive_datasets(
1735 vector_size: usize,
1736 ) -> Result<Self, Box<dyn std::error::Error>> {
1737 println!("š Initializing engine for MASSIVE datasets (100k-1M+ positions)...");
1738 let mut engine = Self::new(vector_size);
1739 engine.enable_opening_book();
1740
1741 let discovered_files = AutoDiscovery::discover_training_files(".", false)?;
1743
1744 if discovered_files.is_empty() {
1745 println!("ā¹ļø No training data found");
1746 return Ok(engine);
1747 }
1748
1749 let largest_file = discovered_files
1751 .iter()
1752 .max_by_key(|f| f.size_bytes)
1753 .unwrap();
1754
1755 println!(
1756 "šÆ Loading largest dataset: {} ({} bytes)",
1757 largest_file.path.display(),
1758 largest_file.size_bytes
1759 );
1760
1761 engine.ultra_fast_load_any_format(&largest_file.path)?;
1763
1764 println!(
1765 "šÆ Engine ready: {} positions loaded",
1766 engine.knowledge_base_size()
1767 );
1768 Ok(engine)
1769 }
1770
1771 pub fn convert_to_msgpack() -> Result<(), Box<dyn std::error::Error>> {
1774 use serde_json::Value;
1775 use std::fs::File;
1776 use std::io::{BufReader, BufWriter};
1777
1778 if std::path::Path::new("training_data_a100.bin").exists() {
1780 Self::convert_a100_binary_to_json()?;
1781 }
1782
1783 let input_files = [
1784 "training_data.json",
1785 "tactical_training_data.json",
1786 "training_data_a100.json",
1787 ];
1788
1789 for input_file in &input_files {
1790 let input_path = std::path::Path::new(input_file);
1791 if !input_path.exists() {
1792 continue;
1793 }
1794
1795 let output_file_path = input_file.replace(".json", ".msgpack");
1796 println!(
1797 "š Converting {input_file} ā {output_file_path} (MessagePack format)"
1798 );
1799
1800 let file = File::open(input_path)?;
1802 let reader = BufReader::new(file);
1803 let json_value: Value = serde_json::from_reader(reader)?;
1804
1805 let data: Vec<(String, f32)> = match json_value {
1806 Value::Array(arr) if !arr.is_empty() => {
1808 if let Some(first) = arr.first() {
1809 if first.is_array() {
1810 arr.into_iter()
1812 .filter_map(|item| {
1813 if let Value::Array(tuple) = item {
1814 if tuple.len() >= 2 {
1815 let fen = tuple[0].as_str()?.to_string();
1816 let mut eval = tuple[1].as_f64()? as f32;
1817
1818 if eval.abs() > 15.0 {
1822 eval /= 100.0;
1823 }
1824
1825 Some((fen, eval))
1826 } else {
1827 None
1828 }
1829 } else {
1830 None
1831 }
1832 })
1833 .collect()
1834 } else if first.is_object() {
1835 arr.into_iter()
1837 .filter_map(|item| {
1838 if let Value::Object(obj) = item {
1839 let fen = obj.get("fen")?.as_str()?.to_string();
1840 let mut eval = obj.get("evaluation")?.as_f64()? as f32;
1841
1842 if eval.abs() > 15.0 {
1846 eval /= 100.0;
1847 }
1848
1849 Some((fen, eval))
1850 } else {
1851 None
1852 }
1853 })
1854 .collect()
1855 } else {
1856 return Err("Processing...".to_string().into());
1857 }
1858 } else {
1859 Vec::new()
1860 }
1861 }
1862 _ => return Err("Processing...".to_string().into()),
1863 };
1864
1865 if data.is_empty() {
1866 println!("Loading complete");
1867 continue;
1868 }
1869
1870 let output_file = File::create(&output_file_path)?;
1872 let mut writer = BufWriter::new(output_file);
1873 rmp_serde::encode::write(&mut writer, &data)?;
1874
1875 let input_size = input_path.metadata()?.len();
1876 let output_size = std::path::Path::new(&output_file_path).metadata()?.len();
1877 let ratio = input_size as f64 / output_size as f64;
1878
1879 println!(
1880 "ā
Converted: {} ā {} ({:.1}x size reduction, {} positions)",
1881 Self::format_bytes(input_size),
1882 Self::format_bytes(output_size),
1883 ratio,
1884 data.len()
1885 );
1886 }
1887
1888 Ok(())
1889 }
1890
1891 pub fn convert_a100_binary_to_json() -> Result<(), Box<dyn std::error::Error>> {
1893 use std::fs::File;
1894 use std::io::BufWriter;
1895
1896 let binary_path = "training_data_a100.bin";
1897 let json_path = "training_data_a100.json";
1898
1899 if !std::path::Path::new(binary_path).exists() {
1900 println!("Loading complete");
1901 return Ok(());
1902 }
1903
1904 println!(
1905 "š Converting A100 binary data {binary_path} ā {json_path} (JSON format)"
1906 );
1907
1908 let mut engine = ChessVectorEngine::new(1024);
1910 engine.load_training_data_binary(binary_path)?;
1911
1912 let mut data = Vec::new();
1914 for (i, board) in engine.position_boards.iter().enumerate() {
1915 if i < engine.position_evaluations.len() {
1916 data.push(serde_json::json!({
1917 "fen": board.to_string(),
1918 "evaluation": engine.position_evaluations[i],
1919 "depth": 15,
1920 "game_id": i
1921 }));
1922 }
1923 }
1924
1925 let file = File::create(json_path)?;
1927 let writer = BufWriter::new(file);
1928 serde_json::to_writer(writer, &data)?;
1929
1930 println!(
1931 "ā
Converted A100 data: {} positions ā {}",
1932 data.len(),
1933 json_path
1934 );
1935 Ok(())
1936 }
1937
1938 pub fn convert_to_zstd() -> Result<(), Box<dyn std::error::Error>> {
1941 use std::fs::File;
1942 use std::io::{BufReader, BufWriter};
1943
1944 if std::path::Path::new("training_data_a100.bin").exists() {
1946 Self::convert_a100_binary_to_json()?;
1947 }
1948
1949 let input_files = [
1950 ("training_data.json", "training_data.zst"),
1951 ("tactical_training_data.json", "tactical_training_data.zst"),
1952 ("training_data_a100.json", "training_data_a100.zst"),
1953 ("training_data.bin", "training_data.bin.zst"),
1954 (
1955 "tactical_training_data.bin",
1956 "tactical_training_data.bin.zst",
1957 ),
1958 ("training_data_a100.bin", "training_data_a100.bin.zst"),
1959 ];
1960
1961 for (input_file, output_file) in &input_files {
1962 let input_path = std::path::Path::new(input_file);
1963 if !input_path.exists() {
1964 continue;
1965 }
1966
1967 println!(
1968 "š Converting {input_file} ā {output_file} (Zstd compression)"
1969 );
1970
1971 let input_file = File::open(input_path)?;
1972 let output_file_handle = File::create(output_file)?;
1973 let writer = BufWriter::new(output_file_handle);
1974 let mut encoder = zstd::stream::Encoder::new(writer, 9)?; std::io::copy(&mut BufReader::new(input_file), &mut encoder)?;
1977 encoder.finish()?;
1978
1979 let input_size = input_path.metadata()?.len();
1980 let output_size = std::path::Path::new(output_file).metadata()?.len();
1981 let ratio = input_size as f64 / output_size as f64;
1982
1983 println!(
1984 "ā
Compressed: {} ā {} ({:.1}x size reduction)",
1985 Self::format_bytes(input_size),
1986 Self::format_bytes(output_size),
1987 ratio
1988 );
1989 }
1990
1991 Ok(())
1992 }
1993
1994 pub fn convert_to_mmap() -> Result<(), Box<dyn std::error::Error>> {
1997 use std::fs::File;
1998 use std::io::{BufReader, BufWriter};
1999
2000 if std::path::Path::new("training_data_a100.bin").exists() {
2002 Self::convert_a100_binary_to_json()?;
2003 }
2004
2005 let input_files = [
2006 ("training_data.json", "training_data.mmap"),
2007 ("tactical_training_data.json", "tactical_training_data.mmap"),
2008 ("training_data_a100.json", "training_data_a100.mmap"),
2009 ("training_data.msgpack", "training_data.mmap"),
2010 (
2011 "tactical_training_data.msgpack",
2012 "tactical_training_data.mmap",
2013 ),
2014 ("training_data_a100.msgpack", "training_data_a100.mmap"),
2015 ];
2016
2017 for (input_file, output_file) in &input_files {
2018 let input_path = std::path::Path::new(input_file);
2019 if !input_path.exists() {
2020 continue;
2021 }
2022
2023 println!(
2024 "š Converting {input_file} ā {output_file} (Memory-mapped format)"
2025 );
2026
2027 let data: Vec<(String, f32)> = if input_file.ends_with(".json") {
2029 let file = File::open(input_path)?;
2030 let reader = BufReader::new(file);
2031 let json_value: Value = serde_json::from_reader(reader)?;
2032
2033 match json_value {
2034 Value::Array(arr) if !arr.is_empty() => {
2036 if let Some(first) = arr.first() {
2037 if first.is_array() {
2038 arr.into_iter()
2040 .filter_map(|item| {
2041 if let Value::Array(tuple) = item {
2042 if tuple.len() >= 2 {
2043 let fen = tuple[0].as_str()?.to_string();
2044 let mut eval = tuple[1].as_f64()? as f32;
2045
2046 if eval.abs() > 15.0 {
2050 eval /= 100.0;
2051 }
2052
2053 Some((fen, eval))
2054 } else {
2055 None
2056 }
2057 } else {
2058 None
2059 }
2060 })
2061 .collect()
2062 } else if first.is_object() {
2063 arr.into_iter()
2065 .filter_map(|item| {
2066 if let Value::Object(obj) = item {
2067 let fen = obj.get("fen")?.as_str()?.to_string();
2068 let mut eval = obj.get("evaluation")?.as_f64()? as f32;
2069
2070 if eval.abs() > 15.0 {
2074 eval /= 100.0;
2075 }
2076
2077 Some((fen, eval))
2078 } else {
2079 None
2080 }
2081 })
2082 .collect()
2083 } else {
2084 return Err("Failed to process training data".into());
2085 }
2086 } else {
2087 Vec::new()
2088 }
2089 }
2090 _ => return Err("Processing...".to_string().into()),
2091 }
2092 } else if input_file.ends_with(".msgpack") {
2093 let file = File::open(input_path)?;
2094 let reader = BufReader::new(file);
2095 rmp_serde::from_read(reader)?
2096 } else {
2097 return Err("Unsupported input format for memory mapping".into());
2098 };
2099
2100 let output_file_handle = File::create(output_file)?;
2102 let mut writer = BufWriter::new(output_file_handle);
2103 rmp_serde::encode::write(&mut writer, &data)?;
2104
2105 let input_size = input_path.metadata()?.len();
2106 let output_size = std::path::Path::new(output_file).metadata()?.len();
2107
2108 println!(
2109 "ā
Memory-mapped file created: {} ā {} ({} positions)",
2110 Self::format_bytes(input_size),
2111 Self::format_bytes(output_size),
2112 data.len()
2113 );
2114 }
2115
2116 Ok(())
2117 }
2118
2119 pub fn convert_json_to_binary() -> Result<Vec<String>, Box<dyn std::error::Error>> {
2121 use indicatif::{ProgressBar, ProgressStyle};
2122
2123 let json_files = [
2124 "training_data.json",
2125 "tactical_training_data.json",
2126 "engine_training.json",
2127 "chess_training.json",
2128 ];
2129
2130 let existing_json_files: Vec<_> = json_files
2132 .iter()
2133 .filter(|&file_path| std::path::Path::new(file_path).exists())
2134 .collect();
2135
2136 if existing_json_files.is_empty() {
2137 println!("ā¹ļø No JSON training files found to convert");
2138 return Ok(Vec::new());
2139 }
2140
2141 println!(
2142 "š Converting {} JSON files to binary format...",
2143 existing_json_files.len()
2144 );
2145
2146 let pb = ProgressBar::new(existing_json_files.len() as u64);
2148 pb.set_style(
2149 ProgressStyle::default_bar()
2150 .template(
2151 "š¦ Converting [{elapsed_precise}] [{bar:40.yellow/blue}] {pos}/{len} {msg}",
2152 )?
2153 .progress_chars("āāā"),
2154 );
2155
2156 let mut converted_files = Vec::new();
2157
2158 for (i, json_file) in existing_json_files.iter().enumerate() {
2159 pb.set_position(i as u64);
2160 pb.set_message("Processing...".to_string());
2161
2162 let binary_file = std::path::Path::new(json_file).with_extension("bin");
2163
2164 let mut temp_engine = Self::new(1024);
2166 if temp_engine
2167 .load_training_data_incremental(json_file)
2168 .is_ok()
2169 {
2170 if temp_engine.save_training_data_binary(&binary_file).is_ok() {
2171 converted_files.push(binary_file.to_string_lossy().to_string());
2172 println!("ā
Converted {json_file} to binary format");
2173 } else {
2174 println!("Loading complete");
2175 }
2176 } else {
2177 println!("Loading complete");
2178 }
2179 }
2180
2181 pb.set_position(existing_json_files.len() as u64);
2182 pb.finish_with_message(format!("ā
Converted {} files", converted_files.len()));
2183
2184 if !converted_files.is_empty() {
2185 println!("š Binary conversion complete! Startup will be 5-15x faster next time.");
2186 println!("š Conversion summary:");
2187 for _conversion in &converted_files {
2188 println!("Loading complete");
2189 }
2190 }
2191
2192 Ok(converted_files)
2193 }
2194
2195 pub fn is_lsh_enabled(&self) -> bool {
2197 self.use_lsh
2198 }
2199
2200 pub fn lsh_stats(&self) -> Option<crate::lsh::LSHStats> {
2202 self.lsh_index.as_ref().map(|lsh| lsh.stats())
2203 }
2204
2205 pub fn enable_manifold_learning(&mut self, compression_ratio: f32) -> Result<(), String> {
2207 let input_dim = self.encoder.vector_size();
2208 let output_dim = ((input_dim as f32) / compression_ratio) as usize;
2209
2210 if output_dim == 0 {
2211 return Err("Compression ratio too high, output dimension would be 0".to_string());
2212 }
2213
2214 let mut learner = ManifoldLearner::new(input_dim, output_dim);
2215 learner.init_network()?;
2216
2217 self.manifold_learner = Some(learner);
2218 self.manifold_similarity_search = Some(SimilaritySearch::new(output_dim));
2219 self.use_manifold = false; Ok(())
2222 }
2223
2224 pub fn train_manifold_learning(&mut self, epochs: usize) -> Result<(), String> {
2226 if self.manifold_learner.is_none() {
2227 return Err(
2228 "Manifold learning not enabled. Call enable_manifold_learning first.".to_string(),
2229 );
2230 }
2231
2232 if self.similarity_search.size() == 0 {
2233 return Err("No positions in knowledge base to train on.".to_string());
2234 }
2235
2236 let rows = self.similarity_search.size();
2238 let cols = self.encoder.vector_size();
2239
2240 let training_matrix = Array2::from_shape_fn((rows, cols), |(row, col)| {
2241 if let Some((vector, _)) = self.similarity_search.get_position_ref(row) {
2242 vector[col]
2243 } else {
2244 0.0
2245 }
2246 });
2247
2248 if let Some(ref mut learner) = self.manifold_learner {
2250 learner.train(&training_matrix, epochs)?;
2251 let compression_ratio = learner.compression_ratio();
2252
2253 let _ = learner;
2255
2256 self.rebuild_manifold_indices()?;
2258 self.use_manifold = true;
2259
2260 println!(
2261 "Manifold learning training completed. Compression ratio: {compression_ratio:.1}x"
2262 );
2263 }
2264
2265 Ok(())
2266 }
2267
2268 fn rebuild_manifold_indices(&mut self) -> Result<(), String> {
2270 if let Some(ref learner) = self.manifold_learner {
2271 let output_dim = learner.output_dim();
2273 if let Some(ref mut search) = self.manifold_similarity_search {
2274 *search = SimilaritySearch::new(output_dim);
2275 }
2276 if let Some(ref mut lsh) = self.manifold_lsh_index {
2277 *lsh = LSH::new(output_dim, 8, 16); }
2279
2280 for (vector, eval) in self.similarity_search.iter_positions() {
2282 let compressed = learner.encode(vector);
2283
2284 if let Some(ref mut search) = self.manifold_similarity_search {
2285 search.add_position(compressed.clone(), eval);
2286 }
2287
2288 if let Some(ref mut lsh) = self.manifold_lsh_index {
2289 lsh.add_vector(compressed, eval);
2290 }
2291 }
2292 }
2293
2294 Ok(())
2295 }
2296
2297 pub fn enable_manifold_lsh(
2299 &mut self,
2300 num_tables: usize,
2301 hash_size: usize,
2302 ) -> Result<(), String> {
2303 if self.manifold_learner.is_none() {
2304 return Err("Manifold learning not enabled".to_string());
2305 }
2306
2307 let output_dim = self.manifold_learner.as_ref().unwrap().output_dim();
2308 self.manifold_lsh_index = Some(LSH::new(output_dim, num_tables, hash_size));
2309
2310 if self.use_manifold {
2312 self.rebuild_manifold_indices()?;
2313 }
2314
2315 Ok(())
2316 }
2317
2318 pub fn is_manifold_enabled(&self) -> bool {
2320 self.use_manifold && self.manifold_learner.is_some()
2321 }
2322
2323 pub fn manifold_compression_ratio(&self) -> Option<f32> {
2325 self.manifold_learner
2326 .as_ref()
2327 .map(|l| l.compression_ratio())
2328 }
2329
2330 pub fn load_manifold_models(&mut self) -> Result<(), Box<dyn std::error::Error>> {
2333 if let Some(ref db) = self.database {
2334 match crate::manifold_learner::ManifoldLearner::load_from_database(db)? {
2335 Some(learner) => {
2336 let compression_ratio = learner.compression_ratio();
2337 println!(
2338 "š§ Loaded pre-trained manifold learner (compression: {compression_ratio:.1}x)"
2339 );
2340
2341 self.manifold_learner = Some(learner);
2343 self.use_manifold = true;
2344
2345 self.rebuild_manifold_indices()?;
2347
2348 println!("ā
Manifold learning enabled with compressed vectors");
2349 Ok(())
2350 }
2351 None => Err("No pre-trained manifold models found in database".into()),
2352 }
2353 } else {
2354 Err("Database not initialized - cannot load manifold models".into())
2355 }
2356 }
2357
2358 pub fn enable_opening_book(&mut self) {
2360 self.opening_book = Some(OpeningBook::with_standard_openings());
2361 }
2362
2363 pub fn set_opening_book(&mut self, book: OpeningBook) {
2365 self.opening_book = Some(book);
2366 }
2367
2368 pub fn is_opening_position(&self, board: &Board) -> bool {
2370 self.opening_book
2371 .as_ref()
2372 .map(|book| book.contains(board))
2373 .unwrap_or(false)
2374 }
2375
2376 pub fn get_opening_entry(&self, board: &Board) -> Option<&OpeningEntry> {
2378 self.opening_book.as_ref()?.lookup(board)
2379 }
2380
2381 pub fn opening_book_stats(&self) -> Option<OpeningBookStats> {
2383 self.opening_book.as_ref().map(|book| book.stats())
2384 }
2385
2386 pub fn add_position_with_move(
2388 &mut self,
2389 board: &Board,
2390 evaluation: f32,
2391 chess_move: Option<ChessMove>,
2392 move_outcome: Option<f32>,
2393 ) {
2394 let position_index = self.knowledge_base_size();
2395
2396 self.add_position(board, evaluation);
2398
2399 if let (Some(mov), Some(outcome)) = (chess_move, move_outcome) {
2401 self.position_moves
2402 .entry(position_index)
2403 .or_default()
2404 .push((mov, outcome));
2405 }
2406 }
2407
2408 pub fn recommend_moves(
2410 &mut self,
2411 board: &Board,
2412 num_recommendations: usize,
2413 ) -> Vec<MoveRecommendation> {
2414 if let Some(entry) = self.get_opening_entry(board) {
2428 let mut recommendations = Vec::new();
2429
2430 for (chess_move, strength) in &entry.best_moves {
2431 recommendations.push(MoveRecommendation {
2432 chess_move: *chess_move,
2433 confidence: strength * 0.9, from_similar_position_count: 1,
2435 average_outcome: entry.evaluation,
2436 });
2437 }
2438
2439 recommendations.sort_by(|a, b| {
2441 b.confidence
2442 .partial_cmp(&a.confidence)
2443 .unwrap_or(std::cmp::Ordering::Equal)
2444 });
2445 recommendations.truncate(num_recommendations);
2446 return recommendations;
2447 }
2448
2449 let similar_positions = self.find_similar_positions_with_indices(board, 20);
2451
2452 let mut move_data: HashMap<ChessMove, Vec<(f32, f32)>> = HashMap::new(); use chess::MoveGen;
2457 let legal_moves: Vec<ChessMove> = match std::panic::catch_unwind(|| {
2458 MoveGen::new_legal(board).collect::<Vec<ChessMove>>()
2459 }) {
2460 Ok(moves) => moves,
2461 Err(_) => {
2462 return Vec::new();
2464 }
2465 };
2466
2467 for (position_index, _eval, similarity) in similar_positions {
2469 if let Some(moves) = self.position_moves.get(&position_index) {
2470 for &(chess_move, outcome) in moves {
2471 if legal_moves.contains(&chess_move) {
2473 move_data
2474 .entry(chess_move)
2475 .or_default()
2476 .push((similarity, outcome));
2477 }
2478 }
2479 }
2480 }
2481
2482 if move_data.is_empty() {
2484 if let Some(ref mut tactical_search) = self.tactical_search {
2485 let tactical_result = tactical_search.search(board);
2487
2488 if let Some(best_move) = tactical_result.best_move {
2490 move_data.insert(best_move, vec![(0.75, tactical_result.evaluation)]);
2491 }
2492
2493 let mut ordered_moves = legal_moves.clone();
2496
2497 ordered_moves.sort_by(|a, b| {
2499 let a_is_capture = board.piece_on(a.get_dest()).is_some();
2500 let b_is_capture = board.piece_on(b.get_dest()).is_some();
2501
2502 match (a_is_capture, b_is_capture) {
2503 (true, false) => std::cmp::Ordering::Less, (false, true) => std::cmp::Ordering::Greater, _ => {
2506 let a_centrality = move_centrality(a);
2508 let b_centrality = move_centrality(b);
2509 b_centrality
2510 .partial_cmp(&a_centrality)
2511 .unwrap_or(std::cmp::Ordering::Equal)
2512 }
2513 }
2514 });
2515
2516 for chess_move in ordered_moves.into_iter().take(num_recommendations) {
2518 move_data
2519 .entry(chess_move)
2520 .or_insert_with(|| vec![(0.6, 0.0)]);
2521 }
2522 } else {
2523 let mut ordered_moves = legal_moves.clone();
2526
2527 ordered_moves.sort_by(|a, b| {
2529 let a_is_capture = board.piece_on(a.get_dest()).is_some();
2530 let b_is_capture = board.piece_on(b.get_dest()).is_some();
2531
2532 match (a_is_capture, b_is_capture) {
2533 (true, false) => std::cmp::Ordering::Less,
2534 (false, true) => std::cmp::Ordering::Greater,
2535 _ => {
2536 let a_centrality = move_centrality(a);
2537 let b_centrality = move_centrality(b);
2538 b_centrality
2539 .partial_cmp(&a_centrality)
2540 .unwrap_or(std::cmp::Ordering::Equal)
2541 }
2542 }
2543 });
2544
2545 for chess_move in ordered_moves.into_iter().take(num_recommendations) {
2546 move_data.insert(chess_move, vec![(0.3, 0.0)]); }
2548 }
2549 }
2550
2551 let mut recommendations = Vec::new();
2553
2554 for (chess_move, outcomes) in move_data {
2555 if outcomes.is_empty() {
2556 continue;
2557 }
2558
2559 let mut weighted_sum = 0.0;
2561 let mut weight_sum = 0.0;
2562
2563 for &(similarity, outcome) in &outcomes {
2564 weighted_sum += similarity * outcome;
2565 weight_sum += similarity;
2566 }
2567
2568 let average_outcome = if weight_sum > 0.0 {
2569 weighted_sum / weight_sum
2570 } else {
2571 0.0
2572 };
2573
2574 let avg_similarity =
2576 outcomes.iter().map(|(s, _)| s).sum::<f32>() / outcomes.len() as f32;
2577 let position_count_bonus = (outcomes.len() as f32).ln().max(1.0) / 5.0; let confidence = (avg_similarity * 0.8 + position_count_bonus * 0.2).min(0.95); recommendations.push(MoveRecommendation {
2581 chess_move,
2582 confidence: confidence.min(1.0), from_similar_position_count: outcomes.len(),
2584 average_outcome,
2585 });
2586 }
2587
2588 recommendations.sort_by(|a, b| {
2590 b.confidence
2591 .partial_cmp(&a.confidence)
2592 .unwrap_or(std::cmp::Ordering::Equal)
2593 });
2594
2595 recommendations.truncate(num_recommendations);
2597 recommendations
2598 }
2599
2600 pub fn recommend_legal_moves(
2602 &mut self,
2603 board: &Board,
2604 num_recommendations: usize,
2605 ) -> Vec<MoveRecommendation> {
2606 use chess::MoveGen;
2607
2608 let legal_moves: std::collections::HashSet<ChessMove> = MoveGen::new_legal(board).collect();
2610
2611 let all_recommendations = self.recommend_moves(board, num_recommendations * 2); all_recommendations
2615 .into_iter()
2616 .filter(|rec| legal_moves.contains(&rec.chess_move))
2617 .take(num_recommendations)
2618 .collect()
2619 }
2620
2621 pub fn enable_persistence<P: AsRef<Path>>(
2623 &mut self,
2624 db_path: P,
2625 ) -> Result<(), Box<dyn std::error::Error>> {
2626 let database = Database::new(db_path)?;
2627 self.database = Some(database);
2628 println!("Persistence enabled");
2629 Ok(())
2630 }
2631
2632 pub fn save_to_database(&self) -> Result<(), Box<dyn std::error::Error>> {
2634 let db = self
2635 .database
2636 .as_ref()
2637 .ok_or("Database not enabled. Call enable_persistence() first.")?;
2638
2639 println!("š¾ Saving engine state to database (batch mode)...");
2640
2641 let current_time = std::time::SystemTime::now()
2643 .duration_since(std::time::UNIX_EPOCH)?
2644 .as_secs() as i64;
2645
2646 let mut position_data_batch = Vec::with_capacity(self.position_boards.len());
2647
2648 for (i, board) in self.position_boards.iter().enumerate() {
2649 if i < self.position_vectors.len() && i < self.position_evaluations.len() {
2650 let vector = self.position_vectors[i].as_slice().unwrap();
2651 let position_data = PositionData {
2652 fen: board.to_string(),
2653 vector: vector.iter().map(|&x| x as f64).collect(),
2654 evaluation: Some(self.position_evaluations[i] as f64),
2655 compressed_vector: None, created_at: current_time,
2657 };
2658 position_data_batch.push(position_data);
2659 }
2660 }
2661
2662 if !position_data_batch.is_empty() {
2664 let saved_count = db.save_positions_batch(&position_data_batch)?;
2665 println!("š Batch saved {saved_count} positions");
2666 }
2667
2668 if let Some(ref lsh) = self.lsh_index {
2670 lsh.save_to_database(db)?;
2671 }
2672
2673 if let Some(ref learner) = self.manifold_learner {
2675 if learner.is_trained() {
2676 learner.save_to_database(db)?;
2677 }
2678 }
2679
2680 println!("ā
Engine state saved successfully (batch optimized)");
2681 Ok(())
2682 }
2683
2684 pub fn load_from_database(&mut self) -> Result<(), Box<dyn std::error::Error>> {
2686 let db = self
2687 .database
2688 .as_ref()
2689 .ok_or("Database not enabled. Call enable_persistence() first.")?;
2690
2691 println!("Loading engine state from database...");
2692
2693 let positions = db.load_all_positions()?;
2695 for position_data in positions {
2696 if let Ok(board) = Board::from_str(&position_data.fen) {
2697 let vector: Vec<f32> = position_data.vector.iter().map(|&x| x as f32).collect();
2698 let vector_array = Array1::from(vector);
2699 let mut evaluation = position_data.evaluation.unwrap_or(0.0) as f32;
2700
2701 if evaluation.abs() > 15.0 {
2705 evaluation /= 100.0;
2706 }
2707
2708 self.similarity_search
2710 .add_position(vector_array.clone(), evaluation);
2711
2712 self.position_vectors.push(vector_array);
2714 self.position_boards.push(board);
2715 self.position_evaluations.push(evaluation);
2716 }
2717 }
2718
2719 if self.use_lsh {
2721 let positions_for_lsh: Vec<(Array1<f32>, f32)> = self
2722 .position_vectors
2723 .iter()
2724 .zip(self.position_evaluations.iter())
2725 .map(|(v, &e)| (v.clone(), e))
2726 .collect();
2727
2728 match LSH::load_from_database(db, &positions_for_lsh)? {
2729 Some(lsh) => {
2730 self.lsh_index = Some(lsh);
2731 println!("Loaded LSH configuration from database");
2732 }
2733 None => {
2734 println!("No LSH configuration found in database");
2735 }
2736 }
2737 }
2738
2739 match ManifoldLearner::load_from_database(db)? {
2741 Some(learner) => {
2742 self.manifold_learner = Some(learner);
2743 if self.use_manifold {
2744 self.rebuild_manifold_indices()?;
2745 }
2746 println!("Loaded manifold learner from database");
2747 }
2748 None => {
2749 println!("No manifold learner found in database");
2750 }
2751 }
2752
2753 println!(
2754 "Engine state loaded successfully ({} positions)",
2755 self.knowledge_base_size()
2756 );
2757 Ok(())
2758 }
2759
2760 pub fn new_with_persistence<P: AsRef<Path>>(
2762 vector_size: usize,
2763 db_path: P,
2764 ) -> Result<Self, Box<dyn std::error::Error>> {
2765 let mut engine = Self::new(vector_size);
2766 engine.enable_persistence(db_path)?;
2767
2768 match engine.load_from_database() {
2770 Ok(_) => {
2771 println!("Loaded existing engine from database");
2772 }
2773 Err(e) => {
2774 println!("Starting fresh engine (load failed: {e})");
2775 }
2776 }
2777
2778 Ok(engine)
2779 }
2780
2781 pub fn auto_save(&self) -> Result<(), Box<dyn std::error::Error>> {
2783 if self.database.is_some() {
2784 self.save_to_database()?;
2785 }
2786 Ok(())
2787 }
2788
2789 pub fn is_persistence_enabled(&self) -> bool {
2791 self.database.is_some()
2792 }
2793
2794 pub fn database_position_count(&self) -> Result<i64, Box<dyn std::error::Error>> {
2796 let db = self.database.as_ref().ok_or("Database not enabled")?;
2797 Ok(db.get_position_count()?)
2798 }
2799
2800 pub fn enable_tactical_search(&mut self, config: TacticalConfig) {
2802 self.tactical_search = Some(TacticalSearch::new(config));
2803 }
2804
2805 pub fn enable_tactical_search_default(&mut self) {
2807 self.tactical_search = Some(TacticalSearch::new_default());
2808 }
2809
2810 pub fn configure_hybrid_evaluation(&mut self, config: HybridConfig) {
2812 self.hybrid_config = config;
2813 }
2814
2815 pub fn is_tactical_search_enabled(&self) -> bool {
2817 self.tactical_search.is_some()
2818 }
2819
2820 pub fn enable_parallel_search(&mut self, num_threads: usize) {
2822 if let Some(ref mut tactical_search) = self.tactical_search {
2823 tactical_search.config.enable_parallel_search = true;
2824 tactical_search.config.num_threads = num_threads;
2825 println!(
2826 "š§µ Parallel tactical search enabled with {num_threads} threads"
2827 );
2828 }
2829 }
2830
2831 pub fn is_parallel_search_enabled(&self) -> bool {
2833 self.tactical_search
2834 .as_ref()
2835 .map(|ts| ts.config.enable_parallel_search)
2836 .unwrap_or(false)
2837 }
2838
2839 pub fn hybrid_config(&self) -> &HybridConfig {
2860 &self.hybrid_config
2861 }
2862
2863 pub fn is_opening_book_enabled(&self) -> bool {
2865 self.opening_book.is_some()
2866 }
2867
2868 pub fn self_play_training(
2870 &mut self,
2871 config: training::SelfPlayConfig,
2872 ) -> Result<usize, Box<dyn std::error::Error>> {
2873 let mut trainer = training::SelfPlayTrainer::new(config);
2874 let new_data = trainer.generate_training_data(self);
2875
2876 let positions_added = new_data.data.len();
2877
2878 for data in &new_data.data {
2880 self.add_position(&data.board, data.evaluation);
2881 }
2882
2883 if self.database.is_some() {
2885 match self.save_to_database() {
2886 Ok(_) => println!("š¾ Saved {positions_added} positions to database"),
2887 Err(_e) => println!("Loading complete"),
2888 }
2889 }
2890
2891 println!(
2892 "š§ Self-play training complete: {positions_added} new positions learned"
2893 );
2894 Ok(positions_added)
2895 }
2896
2897 pub fn continuous_self_play(
2899 &mut self,
2900 config: training::SelfPlayConfig,
2901 iterations: usize,
2902 save_path: Option<&str>,
2903 ) -> Result<usize, Box<dyn std::error::Error>> {
2904 let mut total_positions = 0;
2905 let mut trainer = training::SelfPlayTrainer::new(config.clone());
2906
2907 println!(
2908 "š Starting continuous self-play training for {iterations} iterations..."
2909 );
2910
2911 for iteration in 1..=iterations {
2912 println!("\n--- Self-Play Iteration {iteration}/{iterations} ---");
2913
2914 let new_data = trainer.generate_training_data(self);
2916 let batch_size = new_data.data.len();
2917
2918 for data in &new_data.data {
2920 self.add_position(&data.board, data.evaluation);
2921 }
2922
2923 total_positions += batch_size;
2924
2925 println!(
2926 "ā
Iteration {}: Added {} positions (total: {})",
2927 iteration,
2928 batch_size,
2929 self.knowledge_base_size()
2930 );
2931
2932 if iteration % 5 == 0 || iteration == iterations {
2934 if let Some(path) = save_path {
2936 match self.save_training_data_binary(path) {
2937 Ok(_) => println!("š¾ Progress saved to {path} (binary format)"),
2938 Err(_e) => println!("Loading complete"),
2939 }
2940 }
2941
2942 if self.database.is_some() {
2944 match self.save_to_database() {
2945 Ok(_) => println!(
2946 "š¾ Database synchronized ({} total positions)",
2947 self.knowledge_base_size()
2948 ),
2949 Err(_e) => println!("Loading complete"),
2950 }
2951 }
2952 }
2953
2954 if iteration % 10 == 0
2956 && self.knowledge_base_size() > 5000
2957 && self.manifold_learner.is_some()
2958 {
2959 println!("š§ Retraining manifold learning with new data...");
2960 let _ = self.train_manifold_learning(5);
2961 }
2962 }
2963
2964 println!(
2965 "\nš Continuous self-play complete: {total_positions} total new positions"
2966 );
2967 Ok(total_positions)
2968 }
2969
2970 pub fn adaptive_self_play(
2972 &mut self,
2973 base_config: training::SelfPlayConfig,
2974 target_strength: f32,
2975 ) -> Result<usize, Box<dyn std::error::Error>> {
2976 let mut current_config = base_config;
2977 let mut total_positions = 0;
2978 let mut iteration = 1;
2979
2980 println!(
2981 "šÆ Starting adaptive self-play training (target strength: {target_strength:.2})..."
2982 );
2983
2984 loop {
2985 println!("\n--- Adaptive Iteration {iteration} ---");
2986
2987 let positions_added = self.self_play_training(current_config.clone())?;
2989 total_positions += positions_added;
2990
2991 if self.database.is_some() {
2993 match self.save_to_database() {
2994 Ok(_) => println!("š¾ Adaptive training progress saved to database"),
2995 Err(_e) => println!("Loading complete"),
2996 }
2997 }
2998
2999 let current_strength = self.knowledge_base_size() as f32 / 10000.0; println!(
3003 "š Current strength estimate: {current_strength:.2} (target: {target_strength:.2})"
3004 );
3005
3006 if current_strength >= target_strength {
3007 println!("š Target strength reached!");
3008 break;
3009 }
3010
3011 current_config.exploration_factor *= 0.95; current_config.temperature *= 0.98; current_config.games_per_iteration =
3015 (current_config.games_per_iteration as f32 * 1.1) as usize; iteration += 1;
3018
3019 if iteration > 50 {
3020 println!("ā ļø Maximum iterations reached");
3021 break;
3022 }
3023 }
3024
3025 Ok(total_positions)
3026 }
3027}
3028
3029#[cfg(test)]
3030mod tests {
3031 use super::*;
3032 use chess::Board;
3033
3034 #[test]
3035 fn test_engine_creation() {
3036 let engine = ChessVectorEngine::new(1024);
3037 assert_eq!(engine.knowledge_base_size(), 0);
3038 }
3039
3040 #[test]
3041 fn test_add_and_search() {
3042 let mut engine = ChessVectorEngine::new(1024);
3043 let board = Board::default();
3044
3045 engine.add_position(&board, 0.0);
3046 assert_eq!(engine.knowledge_base_size(), 1);
3047
3048 let similar = engine.find_similar_positions(&board, 1);
3049 assert_eq!(similar.len(), 1);
3050 }
3051
3052 #[test]
3053 fn test_evaluation() {
3054 let mut engine = ChessVectorEngine::new(1024);
3055 let board = Board::default();
3056
3057 engine.add_position(&board, 0.5);
3059
3060 let evaluation = engine.evaluate_position(&board);
3061 assert!(evaluation.is_some());
3062 assert!((evaluation.unwrap() - 0.5).abs() < 1e-6);
3063 }
3064
3065 #[test]
3066 fn test_move_recommendations() {
3067 let mut engine = ChessVectorEngine::new(1024);
3068 let board = Board::default();
3069
3070 use chess::ChessMove;
3072 use std::str::FromStr;
3073 let mov = ChessMove::from_str("e2e4").unwrap();
3074 engine.add_position_with_move(&board, 0.0, Some(mov), Some(0.8));
3075
3076 let recommendations = engine.recommend_moves(&board, 3);
3077 assert!(!recommendations.is_empty());
3078
3079 let legal_recommendations = engine.recommend_legal_moves(&board, 3);
3081 assert!(!legal_recommendations.is_empty());
3082 }
3083
3084 #[test]
3085 fn test_empty_knowledge_base_fallback() {
3086 let mut engine = ChessVectorEngine::new(1024);
3088
3089 use std::str::FromStr;
3091 let board =
3092 Board::from_str("r1bqkbnr/pppp1ppp/2n5/4p3/4P3/5N2/PPPP1PPP/RNBQKB1R w KQkq - 0 1")
3093 .unwrap();
3094
3095 let recommendations = engine.recommend_moves(&board, 5);
3097 assert!(
3098 !recommendations.is_empty(),
3099 "recommend_moves should not return empty even with no training data"
3100 );
3101 assert_eq!(
3102 recommendations.len(),
3103 5,
3104 "Should return exactly 5 recommendations"
3105 );
3106
3107 for rec in &recommendations {
3109 assert!(rec.confidence > 0.0, "Confidence should be greater than 0");
3110 assert_eq!(
3111 rec.from_similar_position_count, 1,
3112 "Should have count of 1 for fallback"
3113 );
3114 assert_eq!(rec.average_outcome, 0.0, "Should have neutral outcome");
3115 }
3116
3117 let starting_board = Board::default();
3119 let starting_recommendations = engine.recommend_moves(&starting_board, 3);
3120 assert!(
3121 !starting_recommendations.is_empty(),
3122 "Should work for starting position too"
3123 );
3124
3125 use chess::MoveGen;
3127 let legal_moves: std::collections::HashSet<_> = MoveGen::new_legal(&board).collect();
3128 for rec in &recommendations {
3129 assert!(
3130 legal_moves.contains(&rec.chess_move),
3131 "All recommended moves should be legal"
3132 );
3133 }
3134 }
3135
3136 #[test]
3137 fn test_opening_book_integration() {
3138 let mut engine = ChessVectorEngine::new(1024);
3139
3140 engine.enable_opening_book();
3142 assert!(engine.opening_book.is_some());
3143
3144 let board = Board::default();
3146 assert!(engine.is_opening_position(&board));
3147
3148 let entry = engine.get_opening_entry(&board);
3149 assert!(entry.is_some());
3150
3151 let stats = engine.opening_book_stats();
3152 assert!(stats.is_some());
3153 assert!(stats.unwrap().total_positions > 0);
3154
3155 let recommendations = engine.recommend_moves(&board, 3);
3157 assert!(!recommendations.is_empty());
3158 assert!(recommendations[0].confidence > 0.7); }
3160
3161 #[test]
3162 fn test_manifold_learning_integration() {
3163 let mut engine = ChessVectorEngine::new(1024);
3164
3165 let board = Board::default();
3167 for i in 0..10 {
3168 engine.add_position(&board, i as f32 * 0.1);
3169 }
3170
3171 assert!(engine.enable_manifold_learning(8.0).is_ok());
3173
3174 let ratio = engine.manifold_compression_ratio();
3176 assert!(ratio.is_some());
3177 assert!((ratio.unwrap() - 8.0).abs() < 0.1);
3178
3179 assert!(engine.train_manifold_learning(5).is_ok());
3181
3182 let original_similar = engine.find_similar_positions(&board, 3);
3184 assert!(!original_similar.is_empty());
3185 }
3186
3187 #[test]
3188 fn test_lsh_integration() {
3189 let mut engine = ChessVectorEngine::new(1024);
3190
3191 let board = Board::default();
3193 for i in 0..50 {
3194 engine.add_position(&board, i as f32 * 0.02);
3195 }
3196
3197 engine.enable_lsh(4, 8);
3199
3200 let similar = engine.find_similar_positions(&board, 5);
3202 assert!(!similar.is_empty());
3203 assert!(similar.len() <= 5);
3204
3205 let eval = engine.evaluate_position(&board);
3207 assert!(eval.is_some());
3208 }
3209
3210 #[test]
3211 fn test_manifold_lsh_integration() {
3212 let mut engine = ChessVectorEngine::new(1024);
3213
3214 let board = Board::default();
3216 for i in 0..20 {
3217 engine.add_position(&board, i as f32 * 0.05);
3218 }
3219
3220 assert!(engine.enable_manifold_learning(8.0).is_ok());
3222 assert!(engine.train_manifold_learning(3).is_ok());
3223
3224 assert!(engine.enable_manifold_lsh(4, 8).is_ok());
3226
3227 let similar = engine.find_similar_positions(&board, 3);
3229 assert!(!similar.is_empty());
3230
3231 let _recommendations = engine.recommend_moves(&board, 2);
3233 }
3235
3236 #[test]
3261 fn test_position_with_move_storage() {
3262 let mut engine = ChessVectorEngine::new(1024);
3263 let board = Board::default();
3264
3265 use chess::ChessMove;
3266 use std::str::FromStr;
3267 let move1 = ChessMove::from_str("e2e4").unwrap();
3268 let move2 = ChessMove::from_str("d2d4").unwrap();
3269
3270 engine.add_position_with_move(&board, 0.0, Some(move1), Some(0.7));
3272 engine.add_position_with_move(&board, 0.1, Some(move2), Some(0.6));
3273
3274 assert_eq!(engine.position_moves.len(), 2);
3276
3277 let recommendations = engine.recommend_moves(&board, 5);
3279 let _move_strings: Vec<String> = recommendations
3280 .iter()
3281 .map(|r| r.chess_move.to_string())
3282 .collect();
3283
3284 assert!(!recommendations.is_empty());
3286 }
3287
3288 #[test]
3289 fn test_performance_regression_basic() {
3290 use std::time::Instant;
3291
3292 let mut engine = ChessVectorEngine::new(1024);
3293 let board = Board::default();
3294
3295 for i in 0..100 {
3297 engine.add_position(&board, i as f32 * 0.01);
3298 }
3299
3300 let start = Instant::now();
3302
3303 for _ in 0..100 {
3305 engine.add_position(&board, 0.0);
3306 }
3307
3308 let encoding_time = start.elapsed();
3309
3310 let start = Instant::now();
3312 for _ in 0..10 {
3313 engine.find_similar_positions(&board, 5);
3314 }
3315 let search_time = start.elapsed();
3316
3317 assert!(
3319 encoding_time.as_millis() < 10000,
3320 "Position encoding too slow: {}ms",
3321 encoding_time.as_millis()
3322 );
3323 assert!(
3324 search_time.as_millis() < 5000,
3325 "Search too slow: {}ms",
3326 search_time.as_millis()
3327 );
3328 }
3329
3330 #[test]
3331 fn test_memory_usage_reasonable() {
3332 let mut engine = ChessVectorEngine::new(1024);
3333 let board = Board::default();
3334
3335 let initial_size = engine.knowledge_base_size();
3337
3338 for i in 0..1000 {
3339 engine.add_position(&board, i as f32 * 0.001);
3340 }
3341
3342 let final_size = engine.knowledge_base_size();
3343 assert_eq!(final_size, initial_size + 1000);
3344
3345 assert!(final_size > initial_size);
3347 }
3348
3349 #[test]
3350 fn test_incremental_training() {
3351 use std::str::FromStr;
3352
3353 let mut engine = ChessVectorEngine::new(1024);
3354 let board1 = Board::default();
3355 let board2 =
3356 Board::from_str("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq - 0 1").unwrap();
3357
3358 engine.add_position(&board1, 0.0);
3360 engine.add_position(&board2, 0.2);
3361 assert_eq!(engine.knowledge_base_size(), 2);
3362
3363 let mut dataset = crate::training::TrainingDataset::new();
3365 dataset.add_position(board1, 0.1, 15, 1); dataset.add_position(
3367 Board::from_str("rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2")
3368 .unwrap(),
3369 0.3,
3370 15,
3371 2,
3372 ); engine.train_from_dataset_incremental(&dataset);
3376
3377 assert_eq!(engine.knowledge_base_size(), 3);
3379
3380 let stats = engine.training_stats();
3382 assert_eq!(stats.total_positions, 3);
3383 assert_eq!(stats.unique_positions, 3);
3384 assert!(!stats.has_move_data); }
3386
3387 #[test]
3388 fn test_save_load_incremental() {
3389 use std::str::FromStr;
3390 use tempfile::tempdir;
3391
3392 let temp_dir = tempdir().unwrap();
3393 let file_path = temp_dir.path().join("test_training.json");
3394
3395 let mut engine1 = ChessVectorEngine::new(1024);
3397 engine1.add_position(&Board::default(), 0.0);
3398 engine1.add_position(
3399 &Board::from_str("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq - 0 1").unwrap(),
3400 0.2,
3401 );
3402
3403 engine1.save_training_data(&file_path).unwrap();
3405
3406 let mut engine2 = ChessVectorEngine::new(1024);
3408 engine2.add_position(
3409 &Board::from_str("rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2")
3410 .unwrap(),
3411 0.3,
3412 );
3413 assert_eq!(engine2.knowledge_base_size(), 1);
3414
3415 engine2.load_training_data_incremental(&file_path).unwrap();
3417
3418 assert_eq!(engine2.knowledge_base_size(), 3);
3420 }
3421
3422 #[test]
3423 fn test_training_stats() {
3424 use std::str::FromStr;
3425
3426 let mut engine = ChessVectorEngine::new(1024);
3427
3428 let stats = engine.training_stats();
3430 assert_eq!(stats.total_positions, 0);
3431 assert_eq!(stats.unique_positions, 0);
3432 assert!(!stats.has_move_data);
3433 assert!(!stats.lsh_enabled);
3434 assert!(!stats.manifold_enabled);
3435 assert!(!stats.opening_book_enabled);
3436
3437 engine.add_position(&Board::default(), 0.0);
3439 engine.add_position_with_move(
3440 &Board::default(),
3441 0.1,
3442 Some(ChessMove::from_str("e2e4").unwrap()),
3443 Some(0.8),
3444 );
3445
3446 engine.enable_opening_book();
3448 engine.enable_lsh(4, 8);
3449
3450 let stats = engine.training_stats();
3451 assert_eq!(stats.total_positions, 2);
3452 assert!(stats.has_move_data);
3453 assert!(stats.move_data_entries > 0);
3454 assert!(stats.lsh_enabled);
3455 assert!(stats.opening_book_enabled);
3456 }
3457
3458 #[test]
3459 fn test_tactical_search_integration() {
3460 let mut engine = ChessVectorEngine::new(1024);
3461 let board = Board::default();
3462
3463 assert!(!engine.is_tactical_search_enabled());
3465
3466 engine.enable_tactical_search_default();
3468 assert!(engine.is_tactical_search_enabled());
3469
3470 let evaluation = engine.evaluate_position(&board);
3472 assert!(evaluation.is_some());
3473
3474 engine.add_position(&board, 0.5);
3476 let hybrid_evaluation = engine.evaluate_position(&board);
3477 assert!(hybrid_evaluation.is_some());
3478 }
3479
3480 #[test]
3481 fn test_hybrid_evaluation_configuration() {
3482 let mut engine = ChessVectorEngine::new(1024);
3483 let board = Board::default();
3484
3485 engine.enable_tactical_search_default();
3487
3488 let custom_config = HybridConfig {
3490 pattern_confidence_threshold: 0.9, enable_tactical_refinement: true,
3492 tactical_config: TacticalConfig::default(),
3493 pattern_weight: 0.8,
3494 min_similar_positions: 5,
3495 };
3496
3497 engine.configure_hybrid_evaluation(custom_config);
3498
3499 engine.add_position(&board, 0.3);
3501
3502 let evaluation = engine.evaluate_position(&board);
3503 assert!(evaluation.is_some());
3504
3505 let no_tactical_config = HybridConfig {
3507 enable_tactical_refinement: false,
3508 ..HybridConfig::default()
3509 };
3510
3511 engine.configure_hybrid_evaluation(no_tactical_config);
3512
3513 let pattern_only_evaluation = engine.evaluate_position(&board);
3514 assert!(pattern_only_evaluation.is_some());
3515 }
3516}