1pub mod ann;
67pub mod auto_discovery;
68pub mod features;
69pub mod gpu_acceleration;
70pub mod license;
71pub mod lichess_loader;
72pub mod lsh;
73pub mod manifold_learner;
74pub mod nnue;
75pub mod opening_book;
76pub mod persistence;
77pub mod position_encoder;
78pub mod similarity_search;
79pub mod streaming_loader;
80pub mod tactical_search;
81pub mod training;
82pub mod ultra_fast_loader;
83pub mod variational_autoencoder;
84pub mod uci;
86
87pub use auto_discovery::{AutoDiscovery, FormatPriority, TrainingFile};
88pub use features::{FeatureChecker, FeatureError, FeatureRegistry, FeatureTier};
89pub use gpu_acceleration::{DeviceType, GPUAccelerator};
90pub use license::{
91 LicenseError, LicenseKey, LicenseStatus, LicenseVerifier, LicensedFeatureChecker,
92};
93pub use lichess_loader::{load_lichess_puzzles_basic, load_lichess_puzzles_premium, LichessLoader};
94pub use lsh::LSH;
95pub use manifold_learner::ManifoldLearner;
96pub use nnue::{BlendStrategy, EvalStats, HybridEvaluator, NNUEConfig, NNUE};
97pub use opening_book::{OpeningBook, OpeningBookStats, OpeningEntry};
98pub use persistence::{Database, LSHTableData, PositionData};
99pub use position_encoder::PositionEncoder;
100pub use similarity_search::SimilaritySearch;
101pub use streaming_loader::StreamingLoader;
102pub use tactical_search::{TacticalConfig, TacticalResult, TacticalSearch};
103pub use training::{
104 EngineEvaluator, GameExtractor, SelfPlayConfig, SelfPlayTrainer, TacticalPuzzle,
105 TacticalPuzzleParser, TacticalTrainingData, TrainingData, TrainingDataset,
106};
107pub use ultra_fast_loader::{LoadingStats, UltraFastLoader};
108pub use variational_autoencoder::{VAEConfig, VariationalAutoencoder};
109pub use uci::{run_uci_engine, run_uci_engine_with_config, UCIConfig, UCIEngine};
111
112use chess::{Board, ChessMove};
113use ndarray::{Array1, Array2};
114use serde_json::Value;
115use std::collections::HashMap;
116use std::path::Path;
117use std::str::FromStr;
118
119fn move_centrality(chess_move: &ChessMove) -> f32 {
122 let dest_square = chess_move.get_dest();
123 let rank = dest_square.get_rank().to_index() as f32;
124 let file = dest_square.get_file().to_index() as f32;
125
126 let center_rank = 3.5;
128 let center_file = 3.5;
129
130 let rank_distance = (rank - center_rank).abs();
131 let file_distance = (file - center_file).abs();
132
133 let max_distance = 3.5; let distance = (rank_distance + file_distance) / 2.0;
136 max_distance - distance
137}
138
139#[derive(Debug, Clone)]
141pub struct MoveRecommendation {
142 pub chess_move: ChessMove,
143 pub confidence: f32,
144 pub from_similar_position_count: usize,
145 pub average_outcome: f32,
146}
147
148#[derive(Debug, Clone)]
150pub struct TrainingStats {
151 pub total_positions: usize,
152 pub unique_positions: usize,
153 pub has_move_data: bool,
154 pub move_data_entries: usize,
155 pub lsh_enabled: bool,
156 pub manifold_enabled: bool,
157 pub opening_book_enabled: bool,
158}
159
160#[derive(Debug, Clone)]
162pub struct HybridConfig {
163 pub pattern_confidence_threshold: f32,
165 pub enable_tactical_refinement: bool,
167 pub tactical_config: TacticalConfig,
169 pub pattern_weight: f32,
171 pub min_similar_positions: usize,
173}
174
175impl Default for HybridConfig {
176 fn default() -> Self {
177 Self {
178 pattern_confidence_threshold: 0.8,
179 enable_tactical_refinement: true,
180 tactical_config: TacticalConfig::default(),
181 pattern_weight: 0.7, min_similar_positions: 3,
183 }
184 }
185}
186
187pub struct ChessVectorEngine {
241 encoder: PositionEncoder,
242 similarity_search: SimilaritySearch,
243 lsh_index: Option<LSH>,
244 manifold_learner: Option<ManifoldLearner>,
245 use_lsh: bool,
246 use_manifold: bool,
247 position_moves: HashMap<usize, Vec<(ChessMove, f32)>>,
249 manifold_similarity_search: Option<SimilaritySearch>,
251 manifold_lsh_index: Option<LSH>,
253 feature_checker: FeatureChecker,
255 licensed_feature_checker: Option<LicensedFeatureChecker>,
257 position_vectors: Vec<Array1<f32>>,
259 position_boards: Vec<Board>,
261 position_evaluations: Vec<f32>,
263 opening_book: Option<OpeningBook>,
265 database: Option<Database>,
267 tactical_search: Option<TacticalSearch>,
269 hybrid_config: HybridConfig,
273}
274
275impl Clone for ChessVectorEngine {
276 fn clone(&self) -> Self {
277 Self {
278 encoder: self.encoder.clone(),
279 similarity_search: self.similarity_search.clone(),
280 lsh_index: self.lsh_index.clone(),
281 manifold_learner: None, use_lsh: self.use_lsh,
283 use_manifold: false, position_moves: self.position_moves.clone(),
285 manifold_similarity_search: self.manifold_similarity_search.clone(),
286 manifold_lsh_index: self.manifold_lsh_index.clone(),
287 feature_checker: self.feature_checker.clone(),
288 licensed_feature_checker: None, position_vectors: self.position_vectors.clone(),
290 position_boards: self.position_boards.clone(),
291 position_evaluations: self.position_evaluations.clone(),
292 opening_book: self.opening_book.clone(),
293 database: None, tactical_search: self.tactical_search.clone(),
295 hybrid_config: self.hybrid_config.clone(),
297 }
298 }
299}
300
301impl ChessVectorEngine {
302 pub fn new(vector_size: usize) -> Self {
304 Self {
305 encoder: PositionEncoder::new(vector_size),
306 similarity_search: SimilaritySearch::new(vector_size),
307 lsh_index: None,
308 manifold_learner: None,
309 use_lsh: false,
310 use_manifold: false,
311 position_moves: HashMap::new(),
312 manifold_similarity_search: None,
313 manifold_lsh_index: None,
314 feature_checker: FeatureChecker::new(FeatureTier::OpenSource), licensed_feature_checker: None,
316 position_vectors: Vec::new(),
317 position_boards: Vec::new(),
318 position_evaluations: Vec::new(),
319 opening_book: None,
320 database: None,
321 tactical_search: None,
322 hybrid_config: HybridConfig::default(),
324 }
325 }
326
327 pub fn new_with_tier(vector_size: usize, tier: FeatureTier) -> Self {
329 let mut engine = Self::new(vector_size);
330 engine.feature_checker = FeatureChecker::new(tier);
331 engine
332 }
333
334 pub fn get_feature_tier(&self) -> &FeatureTier {
336 self.feature_checker.get_current_tier()
337 }
338
339 pub fn upgrade_tier(&mut self, new_tier: FeatureTier) {
341 self.feature_checker.upgrade_tier(new_tier);
342 }
343
344 pub fn is_feature_available(&self, feature: &str) -> bool {
346 self.feature_checker.check_feature(feature).is_ok()
347 }
348
349 pub fn require_feature(&self, feature: &str) -> Result<(), FeatureError> {
351 self.feature_checker.require_feature(feature)
352 }
353
354 pub fn new_adaptive(vector_size: usize, expected_positions: usize, use_case: &str) -> Self {
357 match use_case {
358 "training" => {
359 if expected_positions > 10000 {
360 Self::new_with_lsh(vector_size, 12, 20)
362 } else {
363 Self::new(vector_size)
364 }
365 }
366 "gameplay" => {
367 if expected_positions > 15000 {
368 Self::new_with_lsh(vector_size, 10, 18)
370 } else {
371 Self::new(vector_size)
372 }
373 }
374 "analysis" => {
375 if expected_positions > 10000 {
376 Self::new_with_lsh(vector_size, 14, 22)
378 } else {
379 Self::new(vector_size)
380 }
381 }
382 _ => Self::new(vector_size), }
384 }
385
386 pub fn new_with_lsh(vector_size: usize, num_tables: usize, hash_size: usize) -> Self {
388 Self {
389 encoder: PositionEncoder::new(vector_size),
390 similarity_search: SimilaritySearch::new(vector_size),
391 lsh_index: Some(LSH::new(vector_size, num_tables, hash_size)),
392 manifold_learner: None,
393 use_lsh: true,
394 use_manifold: false,
395 position_moves: HashMap::new(),
396 manifold_similarity_search: None,
397 manifold_lsh_index: None,
398 feature_checker: FeatureChecker::new(FeatureTier::OpenSource),
399 licensed_feature_checker: None,
400 position_vectors: Vec::new(),
401 position_boards: Vec::new(),
402 position_evaluations: Vec::new(),
403 opening_book: None,
404 database: None,
405 tactical_search: None,
406 hybrid_config: HybridConfig::default(),
408 }
409 }
410
411 pub fn enable_lsh(&mut self, num_tables: usize, hash_size: usize) {
413 self.lsh_index = Some(LSH::new(self.encoder.vector_size(), num_tables, hash_size));
414 self.use_lsh = true;
415
416 if let Some(ref mut lsh) = self.lsh_index {
418 for (vector, evaluation) in self.similarity_search.get_all_positions() {
419 lsh.add_vector(vector, evaluation);
420 }
421 }
422 }
423
424 pub fn add_position(&mut self, board: &Board, evaluation: f32) {
426 if !self.is_position_safe(board) {
428 return; }
430
431 let vector = self.encoder.encode(board);
432 self.similarity_search
433 .add_position(vector.clone(), evaluation);
434
435 self.position_vectors.push(vector.clone());
437 self.position_boards.push(*board);
438 self.position_evaluations.push(evaluation);
439
440 if let Some(ref mut lsh) = self.lsh_index {
442 lsh.add_vector(vector.clone(), evaluation);
443 }
444
445 if self.use_manifold {
447 if let Some(ref learner) = self.manifold_learner {
448 let compressed = learner.encode(&vector);
449
450 if let Some(ref mut search) = self.manifold_similarity_search {
451 search.add_position(compressed.clone(), evaluation);
452 }
453
454 if let Some(ref mut lsh) = self.manifold_lsh_index {
455 lsh.add_vector(compressed, evaluation);
456 }
457 }
458 }
459 }
460
461 pub fn find_similar_positions(&self, board: &Board, k: usize) -> Vec<(Array1<f32>, f32, f32)> {
463 let query_vector = self.encoder.encode(board);
464
465 if self.use_manifold {
467 if let Some(ref manifold_learner) = self.manifold_learner {
468 let compressed_query = manifold_learner.encode(&query_vector);
469
470 if let Some(ref lsh) = self.manifold_lsh_index {
472 return lsh.query(&compressed_query, k);
473 }
474
475 if let Some(ref search) = self.manifold_similarity_search {
477 return search.search(&compressed_query, k);
478 }
479 }
480 }
481
482 if self.use_lsh {
484 if let Some(ref lsh_index) = self.lsh_index {
485 return lsh_index.query(&query_vector, k);
486 }
487 }
488
489 self.similarity_search.search(&query_vector, k)
491 }
492
493 pub fn find_similar_positions_with_indices(
495 &self,
496 board: &Board,
497 k: usize,
498 ) -> Vec<(usize, f32, f32)> {
499 let query_vector = self.encoder.encode(board);
500
501 let mut results = Vec::new();
504
505 for (i, stored_vector) in self.position_vectors.iter().enumerate() {
506 let similarity = self.encoder.similarity(&query_vector, stored_vector);
507 let eval = self.position_evaluations.get(i).copied().unwrap_or(0.0);
508 results.push((i, eval, similarity));
509 }
510
511 results.sort_by(|a, b| b.2.partial_cmp(&a.2).unwrap_or(std::cmp::Ordering::Equal));
513 results.truncate(k);
514
515 results
516 }
517
518 pub fn evaluate_position(&mut self, board: &Board) -> Option<f32> {
520 if let Some(entry) = self.get_opening_entry(board) {
529 return Some(entry.evaluation);
530 }
531
532 let similar_positions = self.find_similar_positions(board, 5);
534
535 if similar_positions.is_empty() {
536 if let Some(ref mut tactical_search) = self.tactical_search {
538 let result = tactical_search.search(board);
539 return Some(result.evaluation);
540 }
541 return None;
542 }
543
544 let mut weighted_sum = 0.0;
546 let mut weight_sum = 0.0;
547 let mut similarity_scores = Vec::new();
548
549 for (_, evaluation, similarity) in &similar_positions {
550 let weight = *similarity;
551 weighted_sum += evaluation * weight;
552 weight_sum += weight;
553 similarity_scores.push(*similarity);
554 }
555
556 let pattern_evaluation = weighted_sum / weight_sum;
557
558 let avg_similarity = similarity_scores.iter().sum::<f32>() / similarity_scores.len() as f32;
560 let count_factor = (similar_positions.len() as f32
561 / self.hybrid_config.min_similar_positions as f32)
562 .min(1.0);
563 let pattern_confidence = avg_similarity * count_factor;
564
565 let use_tactical = self.hybrid_config.enable_tactical_refinement
567 && pattern_confidence < self.hybrid_config.pattern_confidence_threshold
568 && self.tactical_search.is_some();
569
570 if use_tactical {
571 if let Some(ref mut tactical_search) = self.tactical_search {
573 let tactical_result = if tactical_search.config.enable_parallel_search {
574 tactical_search.search_parallel(board)
575 } else {
576 tactical_search.search(board)
577 };
578
579 let pattern_weight = self.hybrid_config.pattern_weight * pattern_confidence;
581 let tactical_weight = 1.0 - pattern_weight;
582
583 let hybrid_evaluation = (pattern_evaluation * pattern_weight)
584 + (tactical_result.evaluation * tactical_weight);
585
586 Some(hybrid_evaluation)
587 } else {
588 Some(pattern_evaluation)
590 }
591 } else {
592 Some(pattern_evaluation)
594 }
595 }
596
597 pub fn encode_position(&self, board: &Board) -> Array1<f32> {
599 self.encoder.encode(board)
600 }
601
602 pub fn calculate_similarity(&self, board1: &Board, board2: &Board) -> f32 {
604 let vec1 = self.encoder.encode(board1);
605 let vec2 = self.encoder.encode(board2);
606 self.encoder.similarity(&vec1, &vec2)
607 }
608
609 pub fn knowledge_base_size(&self) -> usize {
611 self.similarity_search.size()
612 }
613
614 pub fn save_training_data<P: AsRef<std::path::Path>>(
616 &self,
617 path: P,
618 ) -> Result<(), Box<dyn std::error::Error>> {
619 use crate::training::{TrainingData, TrainingDataset};
620
621 let mut dataset = TrainingDataset::new();
622
623 for (i, board) in self.position_boards.iter().enumerate() {
625 if i < self.position_evaluations.len() {
626 dataset.data.push(TrainingData {
627 board: *board,
628 evaluation: self.position_evaluations[i],
629 depth: 15, game_id: i, });
632 }
633 }
634
635 dataset.save_incremental(path)?;
636 println!("Saved {} positions to training data", dataset.data.len());
637 Ok(())
638 }
639
640 pub fn load_training_data_incremental<P: AsRef<std::path::Path>>(
642 &mut self,
643 path: P,
644 ) -> Result<(), Box<dyn std::error::Error>> {
645 use crate::training::TrainingDataset;
646 use indicatif::{ProgressBar, ProgressStyle};
647 use std::collections::HashSet;
648
649 let existing_size = self.knowledge_base_size();
650
651 let path_ref = path.as_ref();
653 let binary_path = path_ref.with_extension("bin");
654 if binary_path.exists() {
655 println!("š Loading optimized binary format...");
656 return self.load_training_data_binary(binary_path);
657 }
658
659 println!("š Loading training data from {}...", path_ref.display());
660 let dataset = TrainingDataset::load(path)?;
661
662 let total_positions = dataset.data.len();
663 if total_positions == 0 {
664 println!("ā ļø No positions found in dataset");
665 return Ok(());
666 }
667
668 let dedup_pb = ProgressBar::new(total_positions as u64);
670 dedup_pb.set_style(
671 ProgressStyle::default_bar()
672 .template("š Checking duplicates [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({percent}%) {msg}")?
673 .progress_chars("āāā")
674 );
675
676 let mut existing_boards: HashSet<_> = self.position_boards.iter().cloned().collect();
678 let mut new_positions = Vec::new();
679 let mut new_evaluations = Vec::new();
680
681 for (i, data) in dataset.data.into_iter().enumerate() {
683 if !existing_boards.contains(&data.board) {
684 existing_boards.insert(data.board);
685 new_positions.push(data.board);
686 new_evaluations.push(data.evaluation);
687 }
688
689 if i % 1000 == 0 || i == total_positions - 1 {
690 dedup_pb.set_position((i + 1) as u64);
691 dedup_pb.set_message(format!("{} new positions found", new_positions.len()));
692 }
693 }
694 dedup_pb.finish_with_message(format!("ā
Found {} new positions", new_positions.len()));
695
696 if new_positions.is_empty() {
697 println!("ā¹ļø No new positions to add (all positions already exist)");
698 return Ok(());
699 }
700
701 let add_pb = ProgressBar::new(new_positions.len() as u64);
703 add_pb.set_style(
704 ProgressStyle::default_bar()
705 .template("ā Adding positions [{elapsed_precise}] [{bar:40.green/blue}] {pos}/{len} ({percent}%) {msg}")?
706 .progress_chars("āāā")
707 );
708
709 for (i, (board, evaluation)) in new_positions
711 .into_iter()
712 .zip(new_evaluations.into_iter())
713 .enumerate()
714 {
715 self.add_position(&board, evaluation);
716
717 if i % 500 == 0 || i == add_pb.length().unwrap() as usize - 1 {
718 add_pb.set_position((i + 1) as u64);
719 add_pb.set_message("vectors encoded".to_string());
720 }
721 }
722 add_pb.finish_with_message("ā
All positions added");
723
724 println!(
725 "šÆ Loaded {} new positions (total: {})",
726 self.knowledge_base_size() - existing_size,
727 self.knowledge_base_size()
728 );
729 Ok(())
730 }
731
732 pub fn save_training_data_binary<P: AsRef<std::path::Path>>(
734 &self,
735 path: P,
736 ) -> Result<(), Box<dyn std::error::Error>> {
737 use lz4_flex::compress_prepend_size;
738
739 println!("š¾ Saving training data in binary format (compressed)...");
740
741 #[derive(serde::Serialize)]
743 struct BinaryTrainingData {
744 positions: Vec<String>, evaluations: Vec<f32>,
746 vectors: Vec<Vec<f32>>, created_at: i64,
748 }
749
750 let current_time = std::time::SystemTime::now()
751 .duration_since(std::time::UNIX_EPOCH)?
752 .as_secs() as i64;
753
754 let mut positions = Vec::with_capacity(self.position_boards.len());
756 let mut evaluations = Vec::with_capacity(self.position_boards.len());
757 let mut vectors = Vec::with_capacity(self.position_boards.len());
758
759 for (i, board) in self.position_boards.iter().enumerate() {
760 if i < self.position_evaluations.len() {
761 positions.push(board.to_string());
762 evaluations.push(self.position_evaluations[i]);
763
764 if i < self.position_vectors.len() {
766 if let Some(vector_slice) = self.position_vectors[i].as_slice() {
767 vectors.push(vector_slice.to_vec());
768 }
769 }
770 }
771 }
772
773 let binary_data = BinaryTrainingData {
774 positions,
775 evaluations,
776 vectors,
777 created_at: current_time,
778 };
779
780 let serialized = bincode::serialize(&binary_data)?;
782
783 let compressed = compress_prepend_size(&serialized);
785
786 std::fs::write(path, &compressed)?;
788
789 println!(
790 "ā
Saved {} positions to binary file ({} bytes compressed)",
791 binary_data.positions.len(),
792 compressed.len()
793 );
794 Ok(())
795 }
796
797 pub fn load_training_data_binary<P: AsRef<std::path::Path>>(
799 &mut self,
800 path: P,
801 ) -> Result<(), Box<dyn std::error::Error>> {
802 use indicatif::{ProgressBar, ProgressStyle};
803 use lz4_flex::decompress_size_prepended;
804
805 println!("š Loading training data from binary format...");
806
807 #[derive(serde::Deserialize)]
808 struct BinaryTrainingData {
809 positions: Vec<String>,
810 evaluations: Vec<f32>,
811 #[allow(dead_code)]
812 vectors: Vec<Vec<f32>>,
813 #[allow(dead_code)]
814 created_at: i64,
815 }
816
817 let existing_size = self.knowledge_base_size();
818
819 let file_size = std::fs::metadata(&path)?.len();
821 println!(
822 "š¦ Reading {} compressed file...",
823 Self::format_bytes(file_size)
824 );
825
826 let compressed_data = std::fs::read(path)?;
827 println!("š Decompressing data...");
828 let serialized = decompress_size_prepended(&compressed_data)?;
829
830 println!("š Deserializing binary data...");
831 let binary_data: BinaryTrainingData = bincode::deserialize(&serialized)?;
832
833 let total_positions = binary_data.positions.len();
834 if total_positions == 0 {
835 println!("ā ļø No positions found in binary file");
836 return Ok(());
837 }
838
839 println!(
840 "š Processing {} positions from binary format...",
841 total_positions
842 );
843
844 let pb = ProgressBar::new(total_positions as u64);
846 pb.set_style(
847 ProgressStyle::default_bar()
848 .template("ā” Loading positions [{elapsed_precise}] [{bar:40.green/blue}] {pos}/{len} ({percent}%) {msg}")?
849 .progress_chars("āāā")
850 );
851
852 let mut added_count = 0;
853
854 for (i, fen) in binary_data.positions.iter().enumerate() {
856 if i < binary_data.evaluations.len() {
857 if let Ok(board) = fen.parse() {
858 if !self.position_boards.contains(&board) {
860 let mut evaluation = binary_data.evaluations[i];
861
862 if evaluation.abs() > 15.0 {
866 evaluation /= 100.0;
867 }
868
869 self.add_position(&board, evaluation);
870 added_count += 1;
871 }
872 }
873 }
874
875 if i % 1000 == 0 || i == total_positions - 1 {
876 pb.set_position((i + 1) as u64);
877 pb.set_message(format!("{} new positions", added_count));
878 }
879 }
880 pb.finish_with_message(format!("ā
Loaded {} new positions", added_count));
881
882 println!(
883 "šÆ Binary loading complete: {} new positions (total: {})",
884 self.knowledge_base_size() - existing_size,
885 self.knowledge_base_size()
886 );
887 Ok(())
888 }
889
890 pub fn load_training_data_mmap<P: AsRef<Path>>(
893 &mut self,
894 path: P,
895 ) -> Result<(), Box<dyn std::error::Error>> {
896 self.require_feature("memory_mapped_files")?;
898
899 use memmap2::Mmap;
900 use std::fs::File;
901
902 let path_ref = path.as_ref();
903 println!(
904 "š Loading training data via memory mapping: {}",
905 path_ref.display()
906 );
907
908 let file = File::open(path_ref)?;
909 let mmap = unsafe { Mmap::map(&file)? };
910
911 if let Ok(data) = rmp_serde::from_slice::<Vec<(String, f32)>>(&mmap) {
913 println!("š¦ Detected MessagePack format");
914 return self.load_positions_from_tuples(data);
915 }
916
917 if let Ok(data) = bincode::deserialize::<Vec<(String, f32)>>(&mmap) {
919 println!("š¦ Detected bincode format");
920 return self.load_positions_from_tuples(data);
921 }
922
923 let decompressed = lz4_flex::decompress_size_prepended(&mmap)?;
925 let data: Vec<(String, f32)> = bincode::deserialize(&decompressed)?;
926 println!("š¦ Detected LZ4+bincode format");
927 self.load_positions_from_tuples(data)
928 }
929
930 pub fn load_training_data_msgpack<P: AsRef<Path>>(
933 &mut self,
934 path: P,
935 ) -> Result<(), Box<dyn std::error::Error>> {
936 use std::fs::File;
937 use std::io::BufReader;
938
939 let path_ref = path.as_ref();
940 println!(
941 "š Loading MessagePack training data: {}",
942 path_ref.display()
943 );
944
945 let file = File::open(path_ref)?;
946 let reader = BufReader::new(file);
947 let data: Vec<(String, f32)> = rmp_serde::from_read(reader)?;
948
949 println!("š¦ MessagePack data loaded: {} positions", data.len());
950 self.load_positions_from_tuples(data)
951 }
952
953 pub fn load_training_data_streaming_json<P: AsRef<Path>>(
956 &mut self,
957 path: P,
958 ) -> Result<(), Box<dyn std::error::Error>> {
959 use dashmap::DashMap;
960 use rayon::prelude::*;
961 use std::fs::File;
962 use std::io::{BufRead, BufReader};
963 use std::sync::Arc;
964
965 let path_ref = path.as_ref();
966 println!(
967 "š Loading JSON with streaming parallel processing: {}",
968 path_ref.display()
969 );
970
971 let file = File::open(path_ref)?;
972 let reader = BufReader::new(file);
973
974 let chunk_size = 10000;
976 let position_map = Arc::new(DashMap::new());
977
978 let lines: Vec<String> = reader.lines().collect::<Result<Vec<_>, _>>()?;
979 let total_lines = lines.len();
980
981 lines.par_chunks(chunk_size).for_each(|chunk| {
983 for line in chunk {
984 if let Ok(data) = serde_json::from_str::<serde_json::Value>(line) {
985 if let (Some(fen), Some(eval)) = (
986 data.get("fen").and_then(|v| v.as_str()),
987 data.get("evaluation").and_then(|v| v.as_f64()),
988 ) {
989 position_map.insert(fen.to_string(), eval as f32);
990 }
991 }
992 }
993 });
994
995 println!(
996 "š¦ Parallel JSON processing complete: {} positions from {} lines",
997 position_map.len(),
998 total_lines
999 );
1000
1001 let data: Vec<(String, f32)> = match Arc::try_unwrap(position_map) {
1004 Ok(map) => map.into_iter().collect(),
1005 Err(arc_map) => {
1006 arc_map
1008 .iter()
1009 .map(|entry| (entry.key().clone(), *entry.value()))
1010 .collect()
1011 }
1012 };
1013 self.load_positions_from_tuples(data)
1014 }
1015
1016 pub fn load_training_data_compressed<P: AsRef<Path>>(
1019 &mut self,
1020 path: P,
1021 ) -> Result<(), Box<dyn std::error::Error>> {
1022 use std::fs::File;
1023 use std::io::BufReader;
1024
1025 let path_ref = path.as_ref();
1026 println!(
1027 "š Loading zstd compressed training data: {}",
1028 path_ref.display()
1029 );
1030
1031 let file = File::open(path_ref)?;
1032 let reader = BufReader::new(file);
1033 let decoder = zstd::stream::Decoder::new(reader)?;
1034
1035 if let Ok(data) = rmp_serde::from_read::<_, Vec<(String, f32)>>(decoder) {
1037 println!("š¦ Zstd+MessagePack data loaded: {} positions", data.len());
1038 return self.load_positions_from_tuples(data);
1039 }
1040
1041 let file = File::open(path_ref)?;
1043 let reader = BufReader::new(file);
1044 let decoder = zstd::stream::Decoder::new(reader)?;
1045 let data: Vec<(String, f32)> = bincode::deserialize_from(decoder)?;
1046
1047 println!("š¦ Zstd+bincode data loaded: {} positions", data.len());
1048 self.load_positions_from_tuples(data)
1049 }
1050
1051 fn load_positions_from_tuples(
1054 &mut self,
1055 data: Vec<(String, f32)>,
1056 ) -> Result<(), Box<dyn std::error::Error>> {
1057 use indicatif::{ProgressBar, ProgressStyle};
1058 use std::collections::HashSet;
1059
1060 let existing_size = self.knowledge_base_size();
1061 let mut seen_positions = HashSet::new();
1062 let mut loaded_count = 0;
1063
1064 let pb = ProgressBar::new(data.len() as u64);
1066 pb.set_style(ProgressStyle::with_template(
1067 "{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({per_sec}) {msg}"
1068 )?);
1069
1070 for (fen, evaluation) in data {
1071 pb.inc(1);
1072
1073 if seen_positions.contains(&fen) {
1075 continue;
1076 }
1077 seen_positions.insert(fen.clone());
1078
1079 if let Ok(board) = Board::from_str(&fen) {
1081 self.add_position(&board, evaluation);
1082 loaded_count += 1;
1083
1084 if loaded_count % 1000 == 0 {
1085 pb.set_message(format!("Loaded {} positions", loaded_count));
1086 }
1087 }
1088 }
1089
1090 pb.finish_with_message(format!("ā
Loaded {} new positions", loaded_count));
1091
1092 println!(
1093 "šÆ Ultra-fast loading complete: {} new positions (total: {})",
1094 self.knowledge_base_size() - existing_size,
1095 self.knowledge_base_size()
1096 );
1097
1098 Ok(())
1099 }
1100
1101 fn format_bytes(bytes: u64) -> String {
1103 const UNITS: &[&str] = &["B", "KB", "MB", "GB"];
1104 let mut size = bytes as f64;
1105 let mut unit_index = 0;
1106
1107 while size >= 1024.0 && unit_index < UNITS.len() - 1 {
1108 size /= 1024.0;
1109 unit_index += 1;
1110 }
1111
1112 format!("{:.1} {}", size, UNITS[unit_index])
1113 }
1114
1115 pub fn train_from_dataset_incremental(&mut self, dataset: &crate::training::TrainingDataset) {
1117 let _existing_size = self.knowledge_base_size();
1118 let mut added = 0;
1119
1120 for data in &dataset.data {
1121 if !self.position_boards.contains(&data.board) {
1123 self.add_position(&data.board, data.evaluation);
1124 added += 1;
1125 }
1126 }
1127
1128 println!(
1129 "Added {} new positions from dataset (total: {})",
1130 added,
1131 self.knowledge_base_size()
1132 );
1133 }
1134
1135 pub fn training_stats(&self) -> TrainingStats {
1137 TrainingStats {
1138 total_positions: self.knowledge_base_size(),
1139 unique_positions: self.position_boards.len(),
1140 has_move_data: !self.position_moves.is_empty(),
1141 move_data_entries: self.position_moves.len(),
1142 lsh_enabled: self.use_lsh,
1143 manifold_enabled: self.use_manifold,
1144 opening_book_enabled: self.opening_book.is_some(),
1145 }
1146 }
1147
1148 pub fn auto_load_training_data(&mut self) -> Result<Vec<String>, Box<dyn std::error::Error>> {
1150 use indicatif::{ProgressBar, ProgressStyle};
1151
1152 let common_files = vec![
1153 "training_data.json",
1154 "tactical_training_data.json",
1155 "engine_training.json",
1156 "chess_training.json",
1157 "my_training.json",
1158 ];
1159
1160 let tactical_files = vec![
1161 "tactical_puzzles.json",
1162 "lichess_puzzles.json",
1163 "my_puzzles.json",
1164 ];
1165
1166 let mut available_files = Vec::new();
1168 for file_path in &common_files {
1169 if std::path::Path::new(file_path).exists() {
1170 available_files.push((file_path, "training"));
1171 }
1172 }
1173 for file_path in &tactical_files {
1174 if std::path::Path::new(file_path).exists() {
1175 available_files.push((file_path, "tactical"));
1176 }
1177 }
1178
1179 if available_files.is_empty() {
1180 return Ok(Vec::new());
1181 }
1182
1183 println!(
1184 "š Found {} training files to auto-load",
1185 available_files.len()
1186 );
1187
1188 let pb = ProgressBar::new(available_files.len() as u64);
1190 pb.set_style(
1191 ProgressStyle::default_bar()
1192 .template("š Auto-loading files [{elapsed_precise}] [{bar:40.blue/cyan}] {pos}/{len} {msg}")?
1193 .progress_chars("āāā")
1194 );
1195
1196 let mut loaded_files = Vec::new();
1197
1198 for (i, (file_path, file_type)) in available_files.iter().enumerate() {
1199 pb.set_position(i as u64);
1200 pb.set_message("Processing...".to_string());
1201
1202 let result = match *file_type {
1203 "training" => self.load_training_data_incremental(file_path).map(|_| {
1204 loaded_files.push(file_path.to_string());
1205 println!("Loading complete");
1206 }),
1207 "tactical" => crate::training::TacticalPuzzleParser::load_tactical_puzzles(
1208 file_path,
1209 )
1210 .map(|puzzles| {
1211 crate::training::TacticalPuzzleParser::load_into_engine_incremental(
1212 &puzzles, self,
1213 );
1214 loaded_files.push(file_path.to_string());
1215 println!("Loading complete");
1216 }),
1217 _ => Ok(()),
1218 };
1219
1220 if let Err(_e) = result {
1221 println!("Loading complete");
1222 }
1223 }
1224
1225 pb.set_position(available_files.len() as u64);
1226 pb.finish_with_message(format!("ā
Auto-loaded {} files", loaded_files.len()));
1227
1228 Ok(loaded_files)
1229 }
1230
1231 pub fn load_lichess_puzzles_premium<P: AsRef<std::path::Path>>(
1233 &mut self,
1234 csv_path: P,
1235 ) -> Result<(), Box<dyn std::error::Error>> {
1236 self.require_feature("ultra_fast_loading")?; println!("š„ Loading Lichess puzzles with premium performance...");
1239 let puzzle_entries =
1240 crate::lichess_loader::load_lichess_puzzles_premium_with_moves(csv_path)?;
1241
1242 for (board, evaluation, best_move) in puzzle_entries {
1243 self.add_position_with_move(&board, evaluation, Some(best_move), Some(evaluation));
1244 }
1245
1246 println!("ā
Premium Lichess puzzle loading complete!");
1247 Ok(())
1248 }
1249
1250 pub fn load_lichess_puzzles_basic<P: AsRef<std::path::Path>>(
1252 &mut self,
1253 csv_path: P,
1254 max_puzzles: usize,
1255 ) -> Result<(), Box<dyn std::error::Error>> {
1256 println!(
1257 "š Loading Lichess puzzles (basic tier, limited to {} puzzles)...",
1258 max_puzzles
1259 );
1260 let puzzle_entries =
1261 crate::lichess_loader::load_lichess_puzzles_basic_with_moves(csv_path, max_puzzles)?;
1262
1263 for (board, evaluation, best_move) in puzzle_entries {
1264 self.add_position_with_move(&board, evaluation, Some(best_move), Some(evaluation));
1265 }
1266
1267 println!("ā
Basic Lichess puzzle loading complete!");
1268 Ok(())
1269 }
1270
1271 pub fn new_with_auto_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1273 let mut engine = Self::new(vector_size);
1274 engine.enable_opening_book();
1275
1276 let loaded_files = engine.auto_load_training_data()?;
1278
1279 if loaded_files.is_empty() {
1280 println!("š¤ Created fresh engine (no training data found)");
1281 } else {
1282 println!(
1283 "š Created engine with auto-loaded training data from {} files",
1284 loaded_files.len()
1285 );
1286 let _stats = engine.training_stats();
1287 println!("Loading complete");
1288 println!("Loading complete");
1289 }
1290
1291 Ok(engine)
1292 }
1293
1294 pub fn new_with_fast_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1297 use indicatif::{ProgressBar, ProgressStyle};
1298
1299 let mut engine = Self::new(vector_size);
1300 engine.enable_opening_book();
1301
1302 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1304 println!("Loading complete");
1305 }
1306
1307 let binary_files = [
1309 "training_data_a100.bin", "training_data.bin",
1311 "tactical_training_data.bin",
1312 "engine_training.bin",
1313 "chess_training.bin",
1314 ];
1315
1316 let existing_binary_files: Vec<_> = binary_files
1318 .iter()
1319 .filter(|&file_path| std::path::Path::new(file_path).exists())
1320 .collect();
1321
1322 let mut loaded_count = 0;
1323
1324 if !existing_binary_files.is_empty() {
1325 println!(
1326 "ā” Fast loading: Found {} binary files",
1327 existing_binary_files.len()
1328 );
1329
1330 let pb = ProgressBar::new(existing_binary_files.len() as u64);
1332 pb.set_style(
1333 ProgressStyle::default_bar()
1334 .template("š Fast loading [{elapsed_precise}] [{bar:40.green/cyan}] {pos}/{len} {msg}")?
1335 .progress_chars("āāā")
1336 );
1337
1338 for (i, file_path) in existing_binary_files.iter().enumerate() {
1339 pb.set_position(i as u64);
1340 pb.set_message("Processing...".to_string());
1341
1342 if engine.load_training_data_binary(file_path).is_ok() {
1343 loaded_count += 1;
1344 }
1345 }
1346
1347 pb.set_position(existing_binary_files.len() as u64);
1348 pb.finish_with_message(format!("ā
Loaded {} binary files", loaded_count));
1349 } else {
1350 println!("š¦ No binary files found, falling back to JSON auto-loading...");
1351 let _ = engine.auto_load_training_data()?;
1352 }
1353
1354 if let Err(e) = engine.load_manifold_models() {
1356 println!("ā ļø No pre-trained manifold models found ({})", e);
1357 println!(" Use --rebuild-models flag to train new models");
1358 }
1359
1360 let stats = engine.training_stats();
1361 println!(
1362 "ā” Fast engine ready with {} positions ({} binary files loaded)",
1363 stats.total_positions, loaded_count
1364 );
1365
1366 Ok(engine)
1367 }
1368
1369 pub fn new_with_auto_discovery(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1372 println!("š Initializing engine with AUTO-DISCOVERY and format consolidation...");
1373 let mut engine = Self::new(vector_size);
1374 engine.enable_opening_book();
1375
1376 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1378 println!("Loading complete");
1379 }
1380
1381 let discovered_files = AutoDiscovery::discover_training_files(".", true)?;
1383
1384 if discovered_files.is_empty() {
1385 println!("ā¹ļø No training data found. Use convert methods to create optimized files.");
1386 return Ok(engine);
1387 }
1388
1389 let consolidated = AutoDiscovery::consolidate_by_base_name(discovered_files.clone());
1391
1392 let mut total_loaded = 0;
1393 for (base_name, best_file) in &consolidated {
1394 println!("š Loading {} ({})", base_name, best_file.format);
1395
1396 let initial_size = engine.knowledge_base_size();
1397 engine.load_file_by_format(&best_file.path, &best_file.format)?;
1398 let loaded_count = engine.knowledge_base_size() - initial_size;
1399 total_loaded += loaded_count;
1400
1401 println!(" ā
Loaded {} positions", loaded_count);
1402 }
1403
1404 let cleanup_candidates = AutoDiscovery::get_cleanup_candidates(&discovered_files);
1406 if !cleanup_candidates.is_empty() {
1407 println!(
1408 "š§¹ Found {} old format files that can be cleaned up:",
1409 cleanup_candidates.len()
1410 );
1411 AutoDiscovery::cleanup_old_formats(&cleanup_candidates, true)?; println!(" š” To actually remove old files, run: cargo run --bin cleanup_formats");
1414 }
1415
1416 if let Err(e) = engine.load_manifold_models() {
1418 println!("ā ļø No pre-trained manifold models found ({})", e);
1419 }
1420
1421 println!(
1422 "šÆ Engine ready: {} positions loaded from {} datasets",
1423 total_loaded,
1424 consolidated.len()
1425 );
1426 Ok(engine)
1427 }
1428
1429 pub fn new_with_instant_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1432 println!("š Initializing engine with INSTANT loading...");
1433 let mut engine = Self::new(vector_size);
1434 engine.enable_opening_book();
1435
1436 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1438 println!("Loading complete");
1439 }
1440
1441 let discovered_files = AutoDiscovery::discover_training_files(".", false)?;
1443
1444 if discovered_files.is_empty() {
1445 println!("ā¹ļø No user training data found, loading starter dataset...");
1447 if let Err(_e) = engine.load_starter_dataset() {
1448 println!("Loading complete");
1449 println!("ā¹ļø Starting with empty engine");
1450 } else {
1451 println!(
1452 "ā
Loaded starter dataset with {} positions",
1453 engine.knowledge_base_size()
1454 );
1455 }
1456 return Ok(engine);
1457 }
1458
1459 if let Some(best_file) = discovered_files.first() {
1461 println!(
1462 "ā” Loading {} format: {}",
1463 best_file.format,
1464 best_file.path.display()
1465 );
1466 engine.load_file_by_format(&best_file.path, &best_file.format)?;
1467 println!(
1468 "ā
Loaded {} positions from {} format",
1469 engine.knowledge_base_size(),
1470 best_file.format
1471 );
1472 }
1473
1474 if let Err(e) = engine.load_manifold_models() {
1476 println!("ā ļø No pre-trained manifold models found ({})", e);
1477 }
1478
1479 println!(
1480 "šÆ Engine ready: {} positions loaded",
1481 engine.knowledge_base_size()
1482 );
1483 Ok(engine)
1484 }
1485
1486 pub fn new_with_license(vector_size: usize, license_url: String) -> Self {
1488 let mut engine = Self::new(vector_size);
1489 engine.licensed_feature_checker = Some(LicensedFeatureChecker::new(license_url));
1490 engine
1491 }
1492
1493 pub fn new_with_offline_license(vector_size: usize) -> Self {
1495 let mut engine = Self::new(vector_size);
1496 engine.licensed_feature_checker = Some(LicensedFeatureChecker::new_offline());
1497 engine
1498 }
1499
1500 pub async fn activate_license(&mut self, key: &str) -> Result<FeatureTier, LicenseError> {
1502 if let Some(ref mut checker) = self.licensed_feature_checker {
1503 let tier = checker.activate_license(key).await?;
1504 self.feature_checker.upgrade_tier(tier.clone());
1506 Ok(tier)
1507 } else {
1508 Err(LicenseError::InvalidFormat(
1509 "No license checker initialized".to_string(),
1510 ))
1511 }
1512 }
1513
1514 pub async fn check_licensed_feature(&mut self, feature: &str) -> Result<(), FeatureError> {
1516 if let Some(ref mut checker) = self.licensed_feature_checker {
1517 checker.check_feature(feature).await
1518 } else {
1519 self.feature_checker.check_feature(feature)
1521 }
1522 }
1523
1524 pub fn load_license_cache<P: AsRef<std::path::Path>>(
1526 &mut self,
1527 path: P,
1528 ) -> Result<(), Box<dyn std::error::Error>> {
1529 if let Some(ref mut checker) = self.licensed_feature_checker {
1530 checker.load_cache(path)?;
1531 }
1532 Ok(())
1533 }
1534
1535 pub fn save_license_cache<P: AsRef<std::path::Path>>(
1537 &self,
1538 path: P,
1539 ) -> Result<(), Box<dyn std::error::Error>> {
1540 if let Some(ref checker) = self.licensed_feature_checker {
1541 checker.save_cache(path)?;
1542 }
1543 Ok(())
1544 }
1545
1546 fn is_position_safe(&self, board: &Board) -> bool {
1551 match std::panic::catch_unwind(|| {
1553 use chess::MoveGen;
1554 let _legal_moves: Vec<ChessMove> = MoveGen::new_legal(board).collect();
1555 true
1556 }) {
1557 Ok(_) => true,
1558 Err(_) => {
1559 false
1561 }
1562 }
1563 }
1564
1565 pub fn check_gpu_acceleration(&self) -> Result<(), Box<dyn std::error::Error>> {
1567 self.feature_checker.check_feature("gpu_acceleration")?;
1568
1569 match crate::gpu_acceleration::GPUAccelerator::new() {
1571 Ok(_) => {
1572 println!("š„ GPU acceleration available and ready");
1573 Ok(())
1574 }
1575 Err(_e) => Err("Processing...".to_string().into()),
1576 }
1577 }
1578
1579 pub fn load_starter_dataset(&mut self) -> Result<(), Box<dyn std::error::Error>> {
1581 let starter_data = if let Ok(file_content) =
1583 std::fs::read_to_string("training_data/starter_dataset.json")
1584 {
1585 file_content
1586 } else {
1587 r#"[
1589 {
1590 "fen": "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1",
1591 "evaluation": 0.0,
1592 "best_move": null,
1593 "depth": 0
1594 },
1595 {
1596 "fen": "rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1",
1597 "evaluation": 0.1,
1598 "best_move": "e7e5",
1599 "depth": 2
1600 },
1601 {
1602 "fen": "rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq e6 0 2",
1603 "evaluation": 0.0,
1604 "best_move": "g1f3",
1605 "depth": 2
1606 }
1607 ]"#
1608 .to_string()
1609 };
1610
1611 let training_data: Vec<serde_json::Value> = serde_json::from_str(&starter_data)?;
1612
1613 for entry in training_data {
1614 if let (Some(fen), Some(evaluation)) = (entry.get("fen"), entry.get("evaluation")) {
1615 if let (Some(fen_str), Some(eval_f64)) = (fen.as_str(), evaluation.as_f64()) {
1616 match chess::Board::from_str(fen_str) {
1617 Ok(board) => {
1618 let mut eval = eval_f64 as f32;
1620
1621 if eval.abs() > 15.0 {
1624 eval /= 100.0;
1625 }
1626
1627 self.add_position(&board, eval);
1628 }
1629 Err(_) => {
1630 continue;
1632 }
1633 }
1634 }
1635 }
1636 }
1637
1638 Ok(())
1639 }
1640
1641 fn load_file_by_format(
1643 &mut self,
1644 path: &std::path::Path,
1645 format: &str,
1646 ) -> Result<(), Box<dyn std::error::Error>> {
1647 let file_size = std::fs::metadata(path)?.len();
1649
1650 if file_size > 10_000_000 {
1652 println!(
1653 "š Large file detected ({:.1} MB) - using ultra-fast loader",
1654 file_size as f64 / 1_000_000.0
1655 );
1656 return self.ultra_fast_load_any_format(path);
1657 }
1658
1659 match format {
1661 "MMAP" => self.load_training_data_mmap(path),
1662 "MSGPACK" => self.load_training_data_msgpack(path),
1663 "BINARY" => self.load_training_data_streaming_binary(path),
1664 "ZSTD" => self.load_training_data_compressed(path),
1665 "JSON" => self.load_training_data_streaming_json_v2(path),
1666 _ => Err("Processing...".to_string().into()),
1667 }
1668 }
1669
1670 pub fn ultra_fast_load_any_format<P: AsRef<std::path::Path>>(
1672 &mut self,
1673 path: P,
1674 ) -> Result<(), Box<dyn std::error::Error>> {
1675 self.require_feature("ultra_fast_loading")?;
1677
1678 let mut loader = UltraFastLoader::new_for_massive_datasets();
1679 loader.ultra_load_binary(path, self)?;
1680
1681 let stats = loader.get_stats();
1682 println!("š Ultra-fast loading complete:");
1683 println!(" ā
Loaded: {} positions", stats.loaded);
1684 println!("Loading complete");
1685 println!("Loading complete");
1686 println!(" š Success rate: {:.1}%", stats.success_rate() * 100.0);
1687
1688 Ok(())
1689 }
1690
1691 pub fn load_training_data_streaming_binary<P: AsRef<std::path::Path>>(
1694 &mut self,
1695 path: P,
1696 ) -> Result<(), Box<dyn std::error::Error>> {
1697 let mut loader = StreamingLoader::new();
1698 loader.stream_load_binary(path, self)?;
1699
1700 println!("š Streaming binary load complete:");
1701 println!(" Loaded: {} new positions", loader.loaded_count);
1702 println!("Loading complete");
1703 println!("Loading complete");
1704
1705 Ok(())
1706 }
1707
1708 pub fn load_training_data_streaming_json_v2<P: AsRef<std::path::Path>>(
1711 &mut self,
1712 path: P,
1713 ) -> Result<(), Box<dyn std::error::Error>> {
1714 let mut loader = StreamingLoader::new();
1715
1716 let batch_size = if std::fs::metadata(path.as_ref())?.len() > 100_000_000 {
1718 20000 } else {
1721 5000 };
1723
1724 loader.stream_load_json(path, self, batch_size)?;
1725
1726 println!("š Streaming JSON load complete:");
1727 println!(" Loaded: {} new positions", loader.loaded_count);
1728 println!("Loading complete");
1729 println!("Loading complete");
1730
1731 Ok(())
1732 }
1733
1734 pub fn new_for_massive_datasets(
1737 vector_size: usize,
1738 ) -> Result<Self, Box<dyn std::error::Error>> {
1739 println!("š Initializing engine for MASSIVE datasets (100k-1M+ positions)...");
1740 let mut engine = Self::new(vector_size);
1741 engine.enable_opening_book();
1742
1743 let discovered_files = AutoDiscovery::discover_training_files(".", false)?;
1745
1746 if discovered_files.is_empty() {
1747 println!("ā¹ļø No training data found");
1748 return Ok(engine);
1749 }
1750
1751 let largest_file = discovered_files
1753 .iter()
1754 .max_by_key(|f| f.size_bytes)
1755 .unwrap();
1756
1757 println!(
1758 "šÆ Loading largest dataset: {} ({} bytes)",
1759 largest_file.path.display(),
1760 largest_file.size_bytes
1761 );
1762
1763 engine.ultra_fast_load_any_format(&largest_file.path)?;
1765
1766 println!(
1767 "šÆ Engine ready: {} positions loaded",
1768 engine.knowledge_base_size()
1769 );
1770 Ok(engine)
1771 }
1772
1773 pub fn convert_to_msgpack() -> Result<(), Box<dyn std::error::Error>> {
1776 use serde_json::Value;
1777 use std::fs::File;
1778 use std::io::{BufReader, BufWriter};
1779
1780 if std::path::Path::new("training_data_a100.bin").exists() {
1782 Self::convert_a100_binary_to_json()?;
1783 }
1784
1785 let input_files = [
1786 "training_data.json",
1787 "tactical_training_data.json",
1788 "training_data_a100.json",
1789 ];
1790
1791 for input_file in &input_files {
1792 let input_path = std::path::Path::new(input_file);
1793 if !input_path.exists() {
1794 continue;
1795 }
1796
1797 let output_file_path = input_file.replace(".json", ".msgpack");
1798 println!(
1799 "š Converting {} ā {} (MessagePack format)",
1800 input_file, output_file_path
1801 );
1802
1803 let file = File::open(input_path)?;
1805 let reader = BufReader::new(file);
1806 let json_value: Value = serde_json::from_reader(reader)?;
1807
1808 let data: Vec<(String, f32)> = match json_value {
1809 Value::Array(arr) if !arr.is_empty() => {
1811 if let Some(first) = arr.first() {
1812 if first.is_array() {
1813 arr.into_iter()
1815 .filter_map(|item| {
1816 if let Value::Array(tuple) = item {
1817 if tuple.len() >= 2 {
1818 let fen = tuple[0].as_str()?.to_string();
1819 let mut eval = tuple[1].as_f64()? as f32;
1820
1821 if eval.abs() > 15.0 {
1825 eval /= 100.0;
1826 }
1827
1828 Some((fen, eval))
1829 } else {
1830 None
1831 }
1832 } else {
1833 None
1834 }
1835 })
1836 .collect()
1837 } else if first.is_object() {
1838 arr.into_iter()
1840 .filter_map(|item| {
1841 if let Value::Object(obj) = item {
1842 let fen = obj.get("fen")?.as_str()?.to_string();
1843 let mut eval = obj.get("evaluation")?.as_f64()? as f32;
1844
1845 if eval.abs() > 15.0 {
1849 eval /= 100.0;
1850 }
1851
1852 Some((fen, eval))
1853 } else {
1854 None
1855 }
1856 })
1857 .collect()
1858 } else {
1859 return Err("Processing...".to_string().into());
1860 }
1861 } else {
1862 Vec::new()
1863 }
1864 }
1865 _ => return Err("Processing...".to_string().into()),
1866 };
1867
1868 if data.is_empty() {
1869 println!("Loading complete");
1870 continue;
1871 }
1872
1873 let output_file = File::create(&output_file_path)?;
1875 let mut writer = BufWriter::new(output_file);
1876 rmp_serde::encode::write(&mut writer, &data)?;
1877
1878 let input_size = input_path.metadata()?.len();
1879 let output_size = std::path::Path::new(&output_file_path).metadata()?.len();
1880 let ratio = input_size as f64 / output_size as f64;
1881
1882 println!(
1883 "ā
Converted: {} ā {} ({:.1}x size reduction, {} positions)",
1884 Self::format_bytes(input_size),
1885 Self::format_bytes(output_size),
1886 ratio,
1887 data.len()
1888 );
1889 }
1890
1891 Ok(())
1892 }
1893
1894 pub fn convert_a100_binary_to_json() -> Result<(), Box<dyn std::error::Error>> {
1896 use std::fs::File;
1897 use std::io::BufWriter;
1898
1899 let binary_path = "training_data_a100.bin";
1900 let json_path = "training_data_a100.json";
1901
1902 if !std::path::Path::new(binary_path).exists() {
1903 println!("Loading complete");
1904 return Ok(());
1905 }
1906
1907 println!(
1908 "š Converting A100 binary data {} ā {} (JSON format)",
1909 binary_path, json_path
1910 );
1911
1912 let mut engine = ChessVectorEngine::new(1024);
1914 engine.load_training_data_binary(binary_path)?;
1915
1916 let mut data = Vec::new();
1918 for (i, board) in engine.position_boards.iter().enumerate() {
1919 if i < engine.position_evaluations.len() {
1920 data.push(serde_json::json!({
1921 "fen": board.to_string(),
1922 "evaluation": engine.position_evaluations[i],
1923 "depth": 15,
1924 "game_id": i
1925 }));
1926 }
1927 }
1928
1929 let file = File::create(json_path)?;
1931 let writer = BufWriter::new(file);
1932 serde_json::to_writer(writer, &data)?;
1933
1934 println!(
1935 "ā
Converted A100 data: {} positions ā {}",
1936 data.len(),
1937 json_path
1938 );
1939 Ok(())
1940 }
1941
1942 pub fn convert_to_zstd() -> Result<(), Box<dyn std::error::Error>> {
1945 use std::fs::File;
1946 use std::io::{BufReader, BufWriter};
1947
1948 if std::path::Path::new("training_data_a100.bin").exists() {
1950 Self::convert_a100_binary_to_json()?;
1951 }
1952
1953 let input_files = [
1954 ("training_data.json", "training_data.zst"),
1955 ("tactical_training_data.json", "tactical_training_data.zst"),
1956 ("training_data_a100.json", "training_data_a100.zst"),
1957 ("training_data.bin", "training_data.bin.zst"),
1958 (
1959 "tactical_training_data.bin",
1960 "tactical_training_data.bin.zst",
1961 ),
1962 ("training_data_a100.bin", "training_data_a100.bin.zst"),
1963 ];
1964
1965 for (input_file, output_file) in &input_files {
1966 let input_path = std::path::Path::new(input_file);
1967 if !input_path.exists() {
1968 continue;
1969 }
1970
1971 println!(
1972 "š Converting {} ā {} (Zstd compression)",
1973 input_file, output_file
1974 );
1975
1976 let input_file = File::open(input_path)?;
1977 let output_file_handle = File::create(output_file)?;
1978 let writer = BufWriter::new(output_file_handle);
1979 let mut encoder = zstd::stream::Encoder::new(writer, 9)?; std::io::copy(&mut BufReader::new(input_file), &mut encoder)?;
1982 encoder.finish()?;
1983
1984 let input_size = input_path.metadata()?.len();
1985 let output_size = std::path::Path::new(output_file).metadata()?.len();
1986 let ratio = input_size as f64 / output_size as f64;
1987
1988 println!(
1989 "ā
Compressed: {} ā {} ({:.1}x size reduction)",
1990 Self::format_bytes(input_size),
1991 Self::format_bytes(output_size),
1992 ratio
1993 );
1994 }
1995
1996 Ok(())
1997 }
1998
1999 pub fn convert_to_mmap() -> Result<(), Box<dyn std::error::Error>> {
2002 use std::fs::File;
2003 use std::io::{BufReader, BufWriter};
2004
2005 if std::path::Path::new("training_data_a100.bin").exists() {
2007 Self::convert_a100_binary_to_json()?;
2008 }
2009
2010 let input_files = [
2011 ("training_data.json", "training_data.mmap"),
2012 ("tactical_training_data.json", "tactical_training_data.mmap"),
2013 ("training_data_a100.json", "training_data_a100.mmap"),
2014 ("training_data.msgpack", "training_data.mmap"),
2015 (
2016 "tactical_training_data.msgpack",
2017 "tactical_training_data.mmap",
2018 ),
2019 ("training_data_a100.msgpack", "training_data_a100.mmap"),
2020 ];
2021
2022 for (input_file, output_file) in &input_files {
2023 let input_path = std::path::Path::new(input_file);
2024 if !input_path.exists() {
2025 continue;
2026 }
2027
2028 println!(
2029 "š Converting {} ā {} (Memory-mapped format)",
2030 input_file, output_file
2031 );
2032
2033 let data: Vec<(String, f32)> = if input_file.ends_with(".json") {
2035 let file = File::open(input_path)?;
2036 let reader = BufReader::new(file);
2037 let json_value: Value = serde_json::from_reader(reader)?;
2038
2039 match json_value {
2040 Value::Array(arr) if !arr.is_empty() => {
2042 if let Some(first) = arr.first() {
2043 if first.is_array() {
2044 arr.into_iter()
2046 .filter_map(|item| {
2047 if let Value::Array(tuple) = item {
2048 if tuple.len() >= 2 {
2049 let fen = tuple[0].as_str()?.to_string();
2050 let mut eval = tuple[1].as_f64()? as f32;
2051
2052 if eval.abs() > 15.0 {
2056 eval /= 100.0;
2057 }
2058
2059 Some((fen, eval))
2060 } else {
2061 None
2062 }
2063 } else {
2064 None
2065 }
2066 })
2067 .collect()
2068 } else if first.is_object() {
2069 arr.into_iter()
2071 .filter_map(|item| {
2072 if let Value::Object(obj) = item {
2073 let fen = obj.get("fen")?.as_str()?.to_string();
2074 let mut eval = obj.get("evaluation")?.as_f64()? as f32;
2075
2076 if eval.abs() > 15.0 {
2080 eval /= 100.0;
2081 }
2082
2083 Some((fen, eval))
2084 } else {
2085 None
2086 }
2087 })
2088 .collect()
2089 } else {
2090 return Err("Failed to process training data".into());
2091 }
2092 } else {
2093 Vec::new()
2094 }
2095 }
2096 _ => return Err("Processing...".to_string().into()),
2097 }
2098 } else if input_file.ends_with(".msgpack") {
2099 let file = File::open(input_path)?;
2100 let reader = BufReader::new(file);
2101 rmp_serde::from_read(reader)?
2102 } else {
2103 return Err("Unsupported input format for memory mapping".into());
2104 };
2105
2106 let output_file_handle = File::create(output_file)?;
2108 let mut writer = BufWriter::new(output_file_handle);
2109 rmp_serde::encode::write(&mut writer, &data)?;
2110
2111 let input_size = input_path.metadata()?.len();
2112 let output_size = std::path::Path::new(output_file).metadata()?.len();
2113
2114 println!(
2115 "ā
Memory-mapped file created: {} ā {} ({} positions)",
2116 Self::format_bytes(input_size),
2117 Self::format_bytes(output_size),
2118 data.len()
2119 );
2120 }
2121
2122 Ok(())
2123 }
2124
2125 pub fn convert_json_to_binary() -> Result<Vec<String>, Box<dyn std::error::Error>> {
2127 use indicatif::{ProgressBar, ProgressStyle};
2128
2129 let json_files = [
2130 "training_data.json",
2131 "tactical_training_data.json",
2132 "engine_training.json",
2133 "chess_training.json",
2134 ];
2135
2136 let existing_json_files: Vec<_> = json_files
2138 .iter()
2139 .filter(|&file_path| std::path::Path::new(file_path).exists())
2140 .collect();
2141
2142 if existing_json_files.is_empty() {
2143 println!("ā¹ļø No JSON training files found to convert");
2144 return Ok(Vec::new());
2145 }
2146
2147 println!(
2148 "š Converting {} JSON files to binary format...",
2149 existing_json_files.len()
2150 );
2151
2152 let pb = ProgressBar::new(existing_json_files.len() as u64);
2154 pb.set_style(
2155 ProgressStyle::default_bar()
2156 .template(
2157 "š¦ Converting [{elapsed_precise}] [{bar:40.yellow/blue}] {pos}/{len} {msg}",
2158 )?
2159 .progress_chars("āāā"),
2160 );
2161
2162 let mut converted_files = Vec::new();
2163
2164 for (i, json_file) in existing_json_files.iter().enumerate() {
2165 pb.set_position(i as u64);
2166 pb.set_message("Processing...".to_string());
2167
2168 let binary_file = std::path::Path::new(json_file).with_extension("bin");
2169
2170 let mut temp_engine = Self::new(1024);
2172 if temp_engine
2173 .load_training_data_incremental(json_file)
2174 .is_ok()
2175 {
2176 if temp_engine.save_training_data_binary(&binary_file).is_ok() {
2177 converted_files.push(binary_file.to_string_lossy().to_string());
2178 println!("ā
Converted {} to binary format", json_file);
2179 } else {
2180 println!("Loading complete");
2181 }
2182 } else {
2183 println!("Loading complete");
2184 }
2185 }
2186
2187 pb.set_position(existing_json_files.len() as u64);
2188 pb.finish_with_message(format!("ā
Converted {} files", converted_files.len()));
2189
2190 if !converted_files.is_empty() {
2191 println!("š Binary conversion complete! Startup will be 5-15x faster next time.");
2192 println!("š Conversion summary:");
2193 for _conversion in &converted_files {
2194 println!("Loading complete");
2195 }
2196 }
2197
2198 Ok(converted_files)
2199 }
2200
2201 pub fn is_lsh_enabled(&self) -> bool {
2203 self.use_lsh
2204 }
2205
2206 pub fn lsh_stats(&self) -> Option<crate::lsh::LSHStats> {
2208 self.lsh_index.as_ref().map(|lsh| lsh.stats())
2209 }
2210
2211 pub fn enable_manifold_learning(&mut self, compression_ratio: f32) -> Result<(), String> {
2213 let input_dim = self.encoder.vector_size();
2214 let output_dim = ((input_dim as f32) / compression_ratio) as usize;
2215
2216 if output_dim == 0 {
2217 return Err("Compression ratio too high, output dimension would be 0".to_string());
2218 }
2219
2220 let mut learner = ManifoldLearner::new(input_dim, output_dim);
2221 learner.init_network()?;
2222
2223 self.manifold_learner = Some(learner);
2224 self.manifold_similarity_search = Some(SimilaritySearch::new(output_dim));
2225 self.use_manifold = false; Ok(())
2228 }
2229
2230 pub fn train_manifold_learning(&mut self, epochs: usize) -> Result<(), String> {
2232 if self.manifold_learner.is_none() {
2233 return Err(
2234 "Manifold learning not enabled. Call enable_manifold_learning first.".to_string(),
2235 );
2236 }
2237
2238 if self.similarity_search.size() == 0 {
2239 return Err("No positions in knowledge base to train on.".to_string());
2240 }
2241
2242 let rows = self.similarity_search.size();
2244 let cols = self.encoder.vector_size();
2245
2246 let training_matrix = Array2::from_shape_fn((rows, cols), |(row, col)| {
2247 if let Some((vector, _)) = self.similarity_search.get_position_ref(row) {
2248 vector[col]
2249 } else {
2250 0.0
2251 }
2252 });
2253
2254 if let Some(ref mut learner) = self.manifold_learner {
2256 learner.train(&training_matrix, epochs)?;
2257 let compression_ratio = learner.compression_ratio();
2258
2259 let _ = learner;
2261
2262 self.rebuild_manifold_indices()?;
2264 self.use_manifold = true;
2265
2266 println!(
2267 "Manifold learning training completed. Compression ratio: {:.1}x",
2268 compression_ratio
2269 );
2270 }
2271
2272 Ok(())
2273 }
2274
2275 fn rebuild_manifold_indices(&mut self) -> Result<(), String> {
2277 if let Some(ref learner) = self.manifold_learner {
2278 let output_dim = learner.output_dim();
2280 if let Some(ref mut search) = self.manifold_similarity_search {
2281 *search = SimilaritySearch::new(output_dim);
2282 }
2283 if let Some(ref mut lsh) = self.manifold_lsh_index {
2284 *lsh = LSH::new(output_dim, 8, 16); }
2286
2287 for (vector, eval) in self.similarity_search.iter_positions() {
2289 let compressed = learner.encode(vector);
2290
2291 if let Some(ref mut search) = self.manifold_similarity_search {
2292 search.add_position(compressed.clone(), eval);
2293 }
2294
2295 if let Some(ref mut lsh) = self.manifold_lsh_index {
2296 lsh.add_vector(compressed, eval);
2297 }
2298 }
2299 }
2300
2301 Ok(())
2302 }
2303
2304 pub fn enable_manifold_lsh(
2306 &mut self,
2307 num_tables: usize,
2308 hash_size: usize,
2309 ) -> Result<(), String> {
2310 if self.manifold_learner.is_none() {
2311 return Err("Manifold learning not enabled".to_string());
2312 }
2313
2314 let output_dim = self.manifold_learner.as_ref().unwrap().output_dim();
2315 self.manifold_lsh_index = Some(LSH::new(output_dim, num_tables, hash_size));
2316
2317 if self.use_manifold {
2319 self.rebuild_manifold_indices()?;
2320 }
2321
2322 Ok(())
2323 }
2324
2325 pub fn is_manifold_enabled(&self) -> bool {
2327 self.use_manifold && self.manifold_learner.is_some()
2328 }
2329
2330 pub fn manifold_compression_ratio(&self) -> Option<f32> {
2332 self.manifold_learner
2333 .as_ref()
2334 .map(|l| l.compression_ratio())
2335 }
2336
2337 pub fn load_manifold_models(&mut self) -> Result<(), Box<dyn std::error::Error>> {
2340 if let Some(ref db) = self.database {
2341 match crate::manifold_learner::ManifoldLearner::load_from_database(db)? {
2342 Some(learner) => {
2343 let compression_ratio = learner.compression_ratio();
2344 println!(
2345 "š§ Loaded pre-trained manifold learner (compression: {:.1}x)",
2346 compression_ratio
2347 );
2348
2349 self.manifold_learner = Some(learner);
2351 self.use_manifold = true;
2352
2353 self.rebuild_manifold_indices()?;
2355
2356 println!("ā
Manifold learning enabled with compressed vectors");
2357 Ok(())
2358 }
2359 None => Err("No pre-trained manifold models found in database".into()),
2360 }
2361 } else {
2362 Err("Database not initialized - cannot load manifold models".into())
2363 }
2364 }
2365
2366 pub fn enable_opening_book(&mut self) {
2368 self.opening_book = Some(OpeningBook::with_standard_openings());
2369 }
2370
2371 pub fn set_opening_book(&mut self, book: OpeningBook) {
2373 self.opening_book = Some(book);
2374 }
2375
2376 pub fn is_opening_position(&self, board: &Board) -> bool {
2378 self.opening_book
2379 .as_ref()
2380 .map(|book| book.contains(board))
2381 .unwrap_or(false)
2382 }
2383
2384 pub fn get_opening_entry(&self, board: &Board) -> Option<&OpeningEntry> {
2386 self.opening_book.as_ref()?.lookup(board)
2387 }
2388
2389 pub fn opening_book_stats(&self) -> Option<OpeningBookStats> {
2391 self.opening_book.as_ref().map(|book| book.stats())
2392 }
2393
2394 pub fn add_position_with_move(
2396 &mut self,
2397 board: &Board,
2398 evaluation: f32,
2399 chess_move: Option<ChessMove>,
2400 move_outcome: Option<f32>,
2401 ) {
2402 let position_index = self.knowledge_base_size();
2403
2404 self.add_position(board, evaluation);
2406
2407 if let (Some(mov), Some(outcome)) = (chess_move, move_outcome) {
2409 self.position_moves
2410 .entry(position_index)
2411 .or_default()
2412 .push((mov, outcome));
2413 }
2414 }
2415
2416 pub fn recommend_moves(
2418 &mut self,
2419 board: &Board,
2420 num_recommendations: usize,
2421 ) -> Vec<MoveRecommendation> {
2422 if let Some(entry) = self.get_opening_entry(board) {
2436 let mut recommendations = Vec::new();
2437
2438 for (chess_move, strength) in &entry.best_moves {
2439 recommendations.push(MoveRecommendation {
2440 chess_move: *chess_move,
2441 confidence: strength * 0.9, from_similar_position_count: 1,
2443 average_outcome: entry.evaluation,
2444 });
2445 }
2446
2447 recommendations.sort_by(|a, b| {
2449 b.confidence
2450 .partial_cmp(&a.confidence)
2451 .unwrap_or(std::cmp::Ordering::Equal)
2452 });
2453 recommendations.truncate(num_recommendations);
2454 return recommendations;
2455 }
2456
2457 let similar_positions = self.find_similar_positions_with_indices(board, 20);
2459
2460 let mut move_data: HashMap<ChessMove, Vec<(f32, f32)>> = HashMap::new(); use chess::MoveGen;
2465 let legal_moves: Vec<ChessMove> = match std::panic::catch_unwind(|| {
2466 MoveGen::new_legal(board).collect::<Vec<ChessMove>>()
2467 }) {
2468 Ok(moves) => moves,
2469 Err(_) => {
2470 return Vec::new();
2472 }
2473 };
2474
2475 for (position_index, _eval, similarity) in similar_positions {
2477 if let Some(moves) = self.position_moves.get(&position_index) {
2478 for &(chess_move, outcome) in moves {
2479 if legal_moves.contains(&chess_move) {
2481 move_data
2482 .entry(chess_move)
2483 .or_default()
2484 .push((similarity, outcome));
2485 }
2486 }
2487 }
2488 }
2489
2490 if move_data.is_empty() {
2492 if let Some(ref mut tactical_search) = self.tactical_search {
2493 let tactical_result = tactical_search.search(board);
2495
2496 if let Some(best_move) = tactical_result.best_move {
2498 move_data.insert(best_move, vec![(0.75, tactical_result.evaluation)]);
2499 }
2500
2501 let mut ordered_moves = legal_moves.clone();
2504
2505 ordered_moves.sort_by(|a, b| {
2507 let a_is_capture = board.piece_on(a.get_dest()).is_some();
2508 let b_is_capture = board.piece_on(b.get_dest()).is_some();
2509
2510 match (a_is_capture, b_is_capture) {
2511 (true, false) => std::cmp::Ordering::Less, (false, true) => std::cmp::Ordering::Greater, _ => {
2514 let a_centrality = move_centrality(a);
2516 let b_centrality = move_centrality(b);
2517 b_centrality
2518 .partial_cmp(&a_centrality)
2519 .unwrap_or(std::cmp::Ordering::Equal)
2520 }
2521 }
2522 });
2523
2524 for chess_move in ordered_moves.into_iter().take(num_recommendations) {
2526 move_data
2527 .entry(chess_move)
2528 .or_insert_with(|| vec![(0.6, 0.0)]);
2529 }
2530 } else {
2531 let mut ordered_moves = legal_moves.clone();
2534
2535 ordered_moves.sort_by(|a, b| {
2537 let a_is_capture = board.piece_on(a.get_dest()).is_some();
2538 let b_is_capture = board.piece_on(b.get_dest()).is_some();
2539
2540 match (a_is_capture, b_is_capture) {
2541 (true, false) => std::cmp::Ordering::Less,
2542 (false, true) => std::cmp::Ordering::Greater,
2543 _ => {
2544 let a_centrality = move_centrality(a);
2545 let b_centrality = move_centrality(b);
2546 b_centrality
2547 .partial_cmp(&a_centrality)
2548 .unwrap_or(std::cmp::Ordering::Equal)
2549 }
2550 }
2551 });
2552
2553 for chess_move in ordered_moves.into_iter().take(num_recommendations) {
2554 move_data.insert(chess_move, vec![(0.3, 0.0)]); }
2556 }
2557 }
2558
2559 let mut recommendations = Vec::new();
2561
2562 for (chess_move, outcomes) in move_data {
2563 if outcomes.is_empty() {
2564 continue;
2565 }
2566
2567 let mut weighted_sum = 0.0;
2569 let mut weight_sum = 0.0;
2570
2571 for &(similarity, outcome) in &outcomes {
2572 weighted_sum += similarity * outcome;
2573 weight_sum += similarity;
2574 }
2575
2576 let average_outcome = if weight_sum > 0.0 {
2577 weighted_sum / weight_sum
2578 } else {
2579 0.0
2580 };
2581
2582 let avg_similarity =
2584 outcomes.iter().map(|(s, _)| s).sum::<f32>() / outcomes.len() as f32;
2585 let position_count_bonus = (outcomes.len() as f32).ln().max(1.0) / 5.0; let confidence = (avg_similarity * 0.8 + position_count_bonus * 0.2).min(0.95); recommendations.push(MoveRecommendation {
2589 chess_move,
2590 confidence: confidence.min(1.0), from_similar_position_count: outcomes.len(),
2592 average_outcome,
2593 });
2594 }
2595
2596 recommendations.sort_by(|a, b| {
2598 b.confidence
2599 .partial_cmp(&a.confidence)
2600 .unwrap_or(std::cmp::Ordering::Equal)
2601 });
2602
2603 recommendations.truncate(num_recommendations);
2605 recommendations
2606 }
2607
2608 pub fn recommend_legal_moves(
2610 &mut self,
2611 board: &Board,
2612 num_recommendations: usize,
2613 ) -> Vec<MoveRecommendation> {
2614 use chess::MoveGen;
2615
2616 let legal_moves: std::collections::HashSet<ChessMove> = MoveGen::new_legal(board).collect();
2618
2619 let all_recommendations = self.recommend_moves(board, num_recommendations * 2); all_recommendations
2623 .into_iter()
2624 .filter(|rec| legal_moves.contains(&rec.chess_move))
2625 .take(num_recommendations)
2626 .collect()
2627 }
2628
2629 pub fn enable_persistence<P: AsRef<Path>>(
2631 &mut self,
2632 db_path: P,
2633 ) -> Result<(), Box<dyn std::error::Error>> {
2634 let database = Database::new(db_path)?;
2635 self.database = Some(database);
2636 println!("Persistence enabled");
2637 Ok(())
2638 }
2639
2640 pub fn save_to_database(&self) -> Result<(), Box<dyn std::error::Error>> {
2642 let db = self
2643 .database
2644 .as_ref()
2645 .ok_or("Database not enabled. Call enable_persistence() first.")?;
2646
2647 println!("š¾ Saving engine state to database (batch mode)...");
2648
2649 let current_time = std::time::SystemTime::now()
2651 .duration_since(std::time::UNIX_EPOCH)?
2652 .as_secs() as i64;
2653
2654 let mut position_data_batch = Vec::with_capacity(self.position_boards.len());
2655
2656 for (i, board) in self.position_boards.iter().enumerate() {
2657 if i < self.position_vectors.len() && i < self.position_evaluations.len() {
2658 let vector = self.position_vectors[i].as_slice().unwrap();
2659 let position_data = PositionData {
2660 fen: board.to_string(),
2661 vector: vector.iter().map(|&x| x as f64).collect(),
2662 evaluation: Some(self.position_evaluations[i] as f64),
2663 compressed_vector: None, created_at: current_time,
2665 };
2666 position_data_batch.push(position_data);
2667 }
2668 }
2669
2670 if !position_data_batch.is_empty() {
2672 let saved_count = db.save_positions_batch(&position_data_batch)?;
2673 println!("š Batch saved {} positions", saved_count);
2674 }
2675
2676 if let Some(ref lsh) = self.lsh_index {
2678 lsh.save_to_database(db)?;
2679 }
2680
2681 if let Some(ref learner) = self.manifold_learner {
2683 if learner.is_trained() {
2684 learner.save_to_database(db)?;
2685 }
2686 }
2687
2688 println!("ā
Engine state saved successfully (batch optimized)");
2689 Ok(())
2690 }
2691
2692 pub fn load_from_database(&mut self) -> Result<(), Box<dyn std::error::Error>> {
2694 let db = self
2695 .database
2696 .as_ref()
2697 .ok_or("Database not enabled. Call enable_persistence() first.")?;
2698
2699 println!("Loading engine state from database...");
2700
2701 let positions = db.load_all_positions()?;
2703 for position_data in positions {
2704 if let Ok(board) = Board::from_str(&position_data.fen) {
2705 let vector: Vec<f32> = position_data.vector.iter().map(|&x| x as f32).collect();
2706 let vector_array = Array1::from(vector);
2707 let mut evaluation = position_data.evaluation.unwrap_or(0.0) as f32;
2708
2709 if evaluation.abs() > 15.0 {
2713 evaluation /= 100.0;
2714 }
2715
2716 self.similarity_search
2718 .add_position(vector_array.clone(), evaluation);
2719
2720 self.position_vectors.push(vector_array);
2722 self.position_boards.push(board);
2723 self.position_evaluations.push(evaluation);
2724 }
2725 }
2726
2727 if self.use_lsh {
2729 let positions_for_lsh: Vec<(Array1<f32>, f32)> = self
2730 .position_vectors
2731 .iter()
2732 .zip(self.position_evaluations.iter())
2733 .map(|(v, &e)| (v.clone(), e))
2734 .collect();
2735
2736 match LSH::load_from_database(db, &positions_for_lsh)? {
2737 Some(lsh) => {
2738 self.lsh_index = Some(lsh);
2739 println!("Loaded LSH configuration from database");
2740 }
2741 None => {
2742 println!("No LSH configuration found in database");
2743 }
2744 }
2745 }
2746
2747 match ManifoldLearner::load_from_database(db)? {
2749 Some(learner) => {
2750 self.manifold_learner = Some(learner);
2751 if self.use_manifold {
2752 self.rebuild_manifold_indices()?;
2753 }
2754 println!("Loaded manifold learner from database");
2755 }
2756 None => {
2757 println!("No manifold learner found in database");
2758 }
2759 }
2760
2761 println!(
2762 "Engine state loaded successfully ({} positions)",
2763 self.knowledge_base_size()
2764 );
2765 Ok(())
2766 }
2767
2768 pub fn new_with_persistence<P: AsRef<Path>>(
2770 vector_size: usize,
2771 db_path: P,
2772 ) -> Result<Self, Box<dyn std::error::Error>> {
2773 let mut engine = Self::new(vector_size);
2774 engine.enable_persistence(db_path)?;
2775
2776 match engine.load_from_database() {
2778 Ok(_) => {
2779 println!("Loaded existing engine from database");
2780 }
2781 Err(e) => {
2782 println!("Starting fresh engine (load failed: {})", e);
2783 }
2784 }
2785
2786 Ok(engine)
2787 }
2788
2789 pub fn auto_save(&self) -> Result<(), Box<dyn std::error::Error>> {
2791 if self.database.is_some() {
2792 self.save_to_database()?;
2793 }
2794 Ok(())
2795 }
2796
2797 pub fn is_persistence_enabled(&self) -> bool {
2799 self.database.is_some()
2800 }
2801
2802 pub fn database_position_count(&self) -> Result<i64, Box<dyn std::error::Error>> {
2804 let db = self.database.as_ref().ok_or("Database not enabled")?;
2805 Ok(db.get_position_count()?)
2806 }
2807
2808 pub fn enable_tactical_search(&mut self, config: TacticalConfig) {
2810 self.tactical_search = Some(TacticalSearch::new(config));
2811 }
2812
2813 pub fn enable_tactical_search_default(&mut self) {
2815 self.tactical_search = Some(TacticalSearch::new_default());
2816 }
2817
2818 pub fn configure_hybrid_evaluation(&mut self, config: HybridConfig) {
2820 self.hybrid_config = config;
2821 }
2822
2823 pub fn is_tactical_search_enabled(&self) -> bool {
2825 self.tactical_search.is_some()
2826 }
2827
2828 pub fn enable_parallel_search(&mut self, num_threads: usize) {
2830 if let Some(ref mut tactical_search) = self.tactical_search {
2831 tactical_search.config.enable_parallel_search = true;
2832 tactical_search.config.num_threads = num_threads;
2833 println!(
2834 "š§µ Parallel tactical search enabled with {} threads",
2835 num_threads
2836 );
2837 }
2838 }
2839
2840 pub fn is_parallel_search_enabled(&self) -> bool {
2842 self.tactical_search
2843 .as_ref()
2844 .map(|ts| ts.config.enable_parallel_search)
2845 .unwrap_or(false)
2846 }
2847
2848 pub fn hybrid_config(&self) -> &HybridConfig {
2869 &self.hybrid_config
2870 }
2871
2872 pub fn is_opening_book_enabled(&self) -> bool {
2874 self.opening_book.is_some()
2875 }
2876
2877 pub fn self_play_training(
2879 &mut self,
2880 config: training::SelfPlayConfig,
2881 ) -> Result<usize, Box<dyn std::error::Error>> {
2882 let mut trainer = training::SelfPlayTrainer::new(config);
2883 let new_data = trainer.generate_training_data(self);
2884
2885 let positions_added = new_data.data.len();
2886
2887 for data in &new_data.data {
2889 self.add_position(&data.board, data.evaluation);
2890 }
2891
2892 if self.database.is_some() {
2894 match self.save_to_database() {
2895 Ok(_) => println!("š¾ Saved {} positions to database", positions_added),
2896 Err(_e) => println!("Loading complete"),
2897 }
2898 }
2899
2900 println!(
2901 "š§ Self-play training complete: {} new positions learned",
2902 positions_added
2903 );
2904 Ok(positions_added)
2905 }
2906
2907 pub fn continuous_self_play(
2909 &mut self,
2910 config: training::SelfPlayConfig,
2911 iterations: usize,
2912 save_path: Option<&str>,
2913 ) -> Result<usize, Box<dyn std::error::Error>> {
2914 let mut total_positions = 0;
2915 let mut trainer = training::SelfPlayTrainer::new(config.clone());
2916
2917 println!(
2918 "š Starting continuous self-play training for {} iterations...",
2919 iterations
2920 );
2921
2922 for iteration in 1..=iterations {
2923 println!("\n--- Self-Play Iteration {}/{} ---", iteration, iterations);
2924
2925 let new_data = trainer.generate_training_data(self);
2927 let batch_size = new_data.data.len();
2928
2929 for data in &new_data.data {
2931 self.add_position(&data.board, data.evaluation);
2932 }
2933
2934 total_positions += batch_size;
2935
2936 println!(
2937 "ā
Iteration {}: Added {} positions (total: {})",
2938 iteration,
2939 batch_size,
2940 self.knowledge_base_size()
2941 );
2942
2943 if iteration % 5 == 0 || iteration == iterations {
2945 if let Some(path) = save_path {
2947 match self.save_training_data_binary(path) {
2948 Ok(_) => println!("š¾ Progress saved to {} (binary format)", path),
2949 Err(_e) => println!("Loading complete"),
2950 }
2951 }
2952
2953 if self.database.is_some() {
2955 match self.save_to_database() {
2956 Ok(_) => println!(
2957 "š¾ Database synchronized ({} total positions)",
2958 self.knowledge_base_size()
2959 ),
2960 Err(_e) => println!("Loading complete"),
2961 }
2962 }
2963 }
2964
2965 if iteration % 10 == 0
2967 && self.knowledge_base_size() > 5000
2968 && self.manifold_learner.is_some()
2969 {
2970 println!("š§ Retraining manifold learning with new data...");
2971 let _ = self.train_manifold_learning(5);
2972 }
2973 }
2974
2975 println!(
2976 "\nš Continuous self-play complete: {} total new positions",
2977 total_positions
2978 );
2979 Ok(total_positions)
2980 }
2981
2982 pub fn adaptive_self_play(
2984 &mut self,
2985 base_config: training::SelfPlayConfig,
2986 target_strength: f32,
2987 ) -> Result<usize, Box<dyn std::error::Error>> {
2988 let mut current_config = base_config;
2989 let mut total_positions = 0;
2990 let mut iteration = 1;
2991
2992 println!(
2993 "šÆ Starting adaptive self-play training (target strength: {:.2})...",
2994 target_strength
2995 );
2996
2997 loop {
2998 println!("\n--- Adaptive Iteration {} ---", iteration);
2999
3000 let positions_added = self.self_play_training(current_config.clone())?;
3002 total_positions += positions_added;
3003
3004 if self.database.is_some() {
3006 match self.save_to_database() {
3007 Ok(_) => println!("š¾ Adaptive training progress saved to database"),
3008 Err(_e) => println!("Loading complete"),
3009 }
3010 }
3011
3012 let current_strength = self.knowledge_base_size() as f32 / 10000.0; println!(
3016 "š Current strength estimate: {:.2} (target: {:.2})",
3017 current_strength, target_strength
3018 );
3019
3020 if current_strength >= target_strength {
3021 println!("š Target strength reached!");
3022 break;
3023 }
3024
3025 current_config.exploration_factor *= 0.95; current_config.temperature *= 0.98; current_config.games_per_iteration =
3029 (current_config.games_per_iteration as f32 * 1.1) as usize; iteration += 1;
3032
3033 if iteration > 50 {
3034 println!("ā ļø Maximum iterations reached");
3035 break;
3036 }
3037 }
3038
3039 Ok(total_positions)
3040 }
3041}
3042
3043#[cfg(test)]
3044mod tests {
3045 use super::*;
3046 use chess::Board;
3047
3048 #[test]
3049 fn test_engine_creation() {
3050 let engine = ChessVectorEngine::new(1024);
3051 assert_eq!(engine.knowledge_base_size(), 0);
3052 }
3053
3054 #[test]
3055 fn test_add_and_search() {
3056 let mut engine = ChessVectorEngine::new(1024);
3057 let board = Board::default();
3058
3059 engine.add_position(&board, 0.0);
3060 assert_eq!(engine.knowledge_base_size(), 1);
3061
3062 let similar = engine.find_similar_positions(&board, 1);
3063 assert_eq!(similar.len(), 1);
3064 }
3065
3066 #[test]
3067 fn test_evaluation() {
3068 let mut engine = ChessVectorEngine::new(1024);
3069 let board = Board::default();
3070
3071 engine.add_position(&board, 0.5);
3073
3074 let evaluation = engine.evaluate_position(&board);
3075 assert!(evaluation.is_some());
3076 assert!((evaluation.unwrap() - 0.5).abs() < 1e-6);
3077 }
3078
3079 #[test]
3080 fn test_move_recommendations() {
3081 let mut engine = ChessVectorEngine::new(1024);
3082 let board = Board::default();
3083
3084 use chess::ChessMove;
3086 use std::str::FromStr;
3087 let mov = ChessMove::from_str("e2e4").unwrap();
3088 engine.add_position_with_move(&board, 0.0, Some(mov), Some(0.8));
3089
3090 let recommendations = engine.recommend_moves(&board, 3);
3091 assert!(!recommendations.is_empty());
3092
3093 let legal_recommendations = engine.recommend_legal_moves(&board, 3);
3095 assert!(!legal_recommendations.is_empty());
3096 }
3097
3098 #[test]
3099 fn test_empty_knowledge_base_fallback() {
3100 let mut engine = ChessVectorEngine::new(1024);
3102
3103 use std::str::FromStr;
3105 let board =
3106 Board::from_str("r1bqkbnr/pppp1ppp/2n5/4p3/4P3/5N2/PPPP1PPP/RNBQKB1R w KQkq - 0 1")
3107 .unwrap();
3108
3109 let recommendations = engine.recommend_moves(&board, 5);
3111 assert!(
3112 !recommendations.is_empty(),
3113 "recommend_moves should not return empty even with no training data"
3114 );
3115 assert_eq!(
3116 recommendations.len(),
3117 5,
3118 "Should return exactly 5 recommendations"
3119 );
3120
3121 for rec in &recommendations {
3123 assert!(rec.confidence > 0.0, "Confidence should be greater than 0");
3124 assert_eq!(
3125 rec.from_similar_position_count, 1,
3126 "Should have count of 1 for fallback"
3127 );
3128 assert_eq!(rec.average_outcome, 0.0, "Should have neutral outcome");
3129 }
3130
3131 let starting_board = Board::default();
3133 let starting_recommendations = engine.recommend_moves(&starting_board, 3);
3134 assert!(
3135 !starting_recommendations.is_empty(),
3136 "Should work for starting position too"
3137 );
3138
3139 use chess::MoveGen;
3141 let legal_moves: std::collections::HashSet<_> = MoveGen::new_legal(&board).collect();
3142 for rec in &recommendations {
3143 assert!(
3144 legal_moves.contains(&rec.chess_move),
3145 "All recommended moves should be legal"
3146 );
3147 }
3148 }
3149
3150 #[test]
3151 fn test_opening_book_integration() {
3152 let mut engine = ChessVectorEngine::new(1024);
3153
3154 engine.enable_opening_book();
3156 assert!(engine.opening_book.is_some());
3157
3158 let board = Board::default();
3160 assert!(engine.is_opening_position(&board));
3161
3162 let entry = engine.get_opening_entry(&board);
3163 assert!(entry.is_some());
3164
3165 let stats = engine.opening_book_stats();
3166 assert!(stats.is_some());
3167 assert!(stats.unwrap().total_positions > 0);
3168
3169 let recommendations = engine.recommend_moves(&board, 3);
3171 assert!(!recommendations.is_empty());
3172 assert!(recommendations[0].confidence > 0.7); }
3174
3175 #[test]
3176 fn test_manifold_learning_integration() {
3177 let mut engine = ChessVectorEngine::new(1024);
3178
3179 let board = Board::default();
3181 for i in 0..10 {
3182 engine.add_position(&board, i as f32 * 0.1);
3183 }
3184
3185 assert!(engine.enable_manifold_learning(8.0).is_ok());
3187
3188 let ratio = engine.manifold_compression_ratio();
3190 assert!(ratio.is_some());
3191 assert!((ratio.unwrap() - 8.0).abs() < 0.1);
3192
3193 assert!(engine.train_manifold_learning(5).is_ok());
3195
3196 let original_similar = engine.find_similar_positions(&board, 3);
3198 assert!(!original_similar.is_empty());
3199 }
3200
3201 #[test]
3202 fn test_lsh_integration() {
3203 let mut engine = ChessVectorEngine::new(1024);
3204
3205 let board = Board::default();
3207 for i in 0..50 {
3208 engine.add_position(&board, i as f32 * 0.02);
3209 }
3210
3211 engine.enable_lsh(4, 8);
3213
3214 let similar = engine.find_similar_positions(&board, 5);
3216 assert!(!similar.is_empty());
3217 assert!(similar.len() <= 5);
3218
3219 let eval = engine.evaluate_position(&board);
3221 assert!(eval.is_some());
3222 }
3223
3224 #[test]
3225 fn test_manifold_lsh_integration() {
3226 let mut engine = ChessVectorEngine::new(1024);
3227
3228 let board = Board::default();
3230 for i in 0..20 {
3231 engine.add_position(&board, i as f32 * 0.05);
3232 }
3233
3234 assert!(engine.enable_manifold_learning(8.0).is_ok());
3236 assert!(engine.train_manifold_learning(3).is_ok());
3237
3238 assert!(engine.enable_manifold_lsh(4, 8).is_ok());
3240
3241 let similar = engine.find_similar_positions(&board, 3);
3243 assert!(!similar.is_empty());
3244
3245 let _recommendations = engine.recommend_moves(&board, 2);
3247 }
3249
3250 #[test]
3275 fn test_position_with_move_storage() {
3276 let mut engine = ChessVectorEngine::new(1024);
3277 let board = Board::default();
3278
3279 use chess::ChessMove;
3280 use std::str::FromStr;
3281 let move1 = ChessMove::from_str("e2e4").unwrap();
3282 let move2 = ChessMove::from_str("d2d4").unwrap();
3283
3284 engine.add_position_with_move(&board, 0.0, Some(move1), Some(0.7));
3286 engine.add_position_with_move(&board, 0.1, Some(move2), Some(0.6));
3287
3288 assert_eq!(engine.position_moves.len(), 2);
3290
3291 let recommendations = engine.recommend_moves(&board, 5);
3293 let _move_strings: Vec<String> = recommendations
3294 .iter()
3295 .map(|r| r.chess_move.to_string())
3296 .collect();
3297
3298 assert!(!recommendations.is_empty());
3300 }
3301
3302 #[test]
3303 fn test_performance_regression_basic() {
3304 use std::time::Instant;
3305
3306 let mut engine = ChessVectorEngine::new(1024);
3307 let board = Board::default();
3308
3309 for i in 0..100 {
3311 engine.add_position(&board, i as f32 * 0.01);
3312 }
3313
3314 let start = Instant::now();
3316
3317 for _ in 0..100 {
3319 engine.add_position(&board, 0.0);
3320 }
3321
3322 let encoding_time = start.elapsed();
3323
3324 let start = Instant::now();
3326 for _ in 0..10 {
3327 engine.find_similar_positions(&board, 5);
3328 }
3329 let search_time = start.elapsed();
3330
3331 assert!(
3333 encoding_time.as_millis() < 10000,
3334 "Position encoding too slow: {}ms",
3335 encoding_time.as_millis()
3336 );
3337 assert!(
3338 search_time.as_millis() < 5000,
3339 "Search too slow: {}ms",
3340 search_time.as_millis()
3341 );
3342 }
3343
3344 #[test]
3345 fn test_memory_usage_reasonable() {
3346 let mut engine = ChessVectorEngine::new(1024);
3347 let board = Board::default();
3348
3349 let initial_size = engine.knowledge_base_size();
3351
3352 for i in 0..1000 {
3353 engine.add_position(&board, i as f32 * 0.001);
3354 }
3355
3356 let final_size = engine.knowledge_base_size();
3357 assert_eq!(final_size, initial_size + 1000);
3358
3359 assert!(final_size > initial_size);
3361 }
3362
3363 #[test]
3364 fn test_incremental_training() {
3365 use std::str::FromStr;
3366
3367 let mut engine = ChessVectorEngine::new(1024);
3368 let board1 = Board::default();
3369 let board2 =
3370 Board::from_str("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq - 0 1").unwrap();
3371
3372 engine.add_position(&board1, 0.0);
3374 engine.add_position(&board2, 0.2);
3375 assert_eq!(engine.knowledge_base_size(), 2);
3376
3377 let mut dataset = crate::training::TrainingDataset::new();
3379 dataset.add_position(board1, 0.1, 15, 1); dataset.add_position(
3381 Board::from_str("rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2")
3382 .unwrap(),
3383 0.3,
3384 15,
3385 2,
3386 ); engine.train_from_dataset_incremental(&dataset);
3390
3391 assert_eq!(engine.knowledge_base_size(), 3);
3393
3394 let stats = engine.training_stats();
3396 assert_eq!(stats.total_positions, 3);
3397 assert_eq!(stats.unique_positions, 3);
3398 assert!(!stats.has_move_data); }
3400
3401 #[test]
3402 fn test_save_load_incremental() {
3403 use std::str::FromStr;
3404 use tempfile::tempdir;
3405
3406 let temp_dir = tempdir().unwrap();
3407 let file_path = temp_dir.path().join("test_training.json");
3408
3409 let mut engine1 = ChessVectorEngine::new(1024);
3411 engine1.add_position(&Board::default(), 0.0);
3412 engine1.add_position(
3413 &Board::from_str("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq - 0 1").unwrap(),
3414 0.2,
3415 );
3416
3417 engine1.save_training_data(&file_path).unwrap();
3419
3420 let mut engine2 = ChessVectorEngine::new(1024);
3422 engine2.add_position(
3423 &Board::from_str("rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2")
3424 .unwrap(),
3425 0.3,
3426 );
3427 assert_eq!(engine2.knowledge_base_size(), 1);
3428
3429 engine2.load_training_data_incremental(&file_path).unwrap();
3431
3432 assert_eq!(engine2.knowledge_base_size(), 3);
3434 }
3435
3436 #[test]
3437 fn test_training_stats() {
3438 use std::str::FromStr;
3439
3440 let mut engine = ChessVectorEngine::new(1024);
3441
3442 let stats = engine.training_stats();
3444 assert_eq!(stats.total_positions, 0);
3445 assert_eq!(stats.unique_positions, 0);
3446 assert!(!stats.has_move_data);
3447 assert!(!stats.lsh_enabled);
3448 assert!(!stats.manifold_enabled);
3449 assert!(!stats.opening_book_enabled);
3450
3451 engine.add_position(&Board::default(), 0.0);
3453 engine.add_position_with_move(
3454 &Board::default(),
3455 0.1,
3456 Some(ChessMove::from_str("e2e4").unwrap()),
3457 Some(0.8),
3458 );
3459
3460 engine.enable_opening_book();
3462 engine.enable_lsh(4, 8);
3463
3464 let stats = engine.training_stats();
3465 assert_eq!(stats.total_positions, 2);
3466 assert!(stats.has_move_data);
3467 assert!(stats.move_data_entries > 0);
3468 assert!(stats.lsh_enabled);
3469 assert!(stats.opening_book_enabled);
3470 }
3471
3472 #[test]
3473 fn test_tactical_search_integration() {
3474 let mut engine = ChessVectorEngine::new(1024);
3475 let board = Board::default();
3476
3477 assert!(!engine.is_tactical_search_enabled());
3479
3480 engine.enable_tactical_search_default();
3482 assert!(engine.is_tactical_search_enabled());
3483
3484 let evaluation = engine.evaluate_position(&board);
3486 assert!(evaluation.is_some());
3487
3488 engine.add_position(&board, 0.5);
3490 let hybrid_evaluation = engine.evaluate_position(&board);
3491 assert!(hybrid_evaluation.is_some());
3492 }
3493
3494 #[test]
3495 fn test_hybrid_evaluation_configuration() {
3496 let mut engine = ChessVectorEngine::new(1024);
3497 let board = Board::default();
3498
3499 engine.enable_tactical_search_default();
3501
3502 let custom_config = HybridConfig {
3504 pattern_confidence_threshold: 0.9, enable_tactical_refinement: true,
3506 tactical_config: TacticalConfig::default(),
3507 pattern_weight: 0.8,
3508 min_similar_positions: 5,
3509 };
3510
3511 engine.configure_hybrid_evaluation(custom_config);
3512
3513 engine.add_position(&board, 0.3);
3515
3516 let evaluation = engine.evaluate_position(&board);
3517 assert!(evaluation.is_some());
3518
3519 let no_tactical_config = HybridConfig {
3521 enable_tactical_refinement: false,
3522 ..HybridConfig::default()
3523 };
3524
3525 engine.configure_hybrid_evaluation(no_tactical_config);
3526
3527 let pattern_only_evaluation = engine.evaluate_position(&board);
3528 assert!(pattern_only_evaluation.is_some());
3529 }
3530}