1pub mod ann;
67pub mod auto_discovery;
68pub mod features;
69pub mod gpu_acceleration;
70pub mod license;
71pub mod lichess_loader;
72pub mod lsh;
73pub mod manifold_learner;
74pub mod nnue;
75pub mod opening_book;
76pub mod persistence;
77pub mod position_encoder;
78pub mod similarity_search;
79pub mod streaming_loader;
80pub mod tactical_search;
81pub mod training;
82pub mod ultra_fast_loader;
83pub mod variational_autoencoder;
84pub mod uci;
86
87pub use auto_discovery::{AutoDiscovery, FormatPriority, TrainingFile};
88pub use features::{FeatureChecker, FeatureError, FeatureRegistry, FeatureTier};
89pub use gpu_acceleration::{DeviceType, GPUAccelerator};
90pub use license::{
91 LicenseError, LicenseKey, LicenseStatus, LicenseVerifier, LicensedFeatureChecker,
92};
93pub use lichess_loader::{load_lichess_puzzles_basic, load_lichess_puzzles_premium, LichessLoader};
94pub use lsh::LSH;
95pub use manifold_learner::ManifoldLearner;
96pub use nnue::{BlendStrategy, EvalStats, HybridEvaluator, NNUEConfig, NNUE};
97pub use opening_book::{OpeningBook, OpeningBookStats, OpeningEntry};
98pub use persistence::{Database, LSHTableData, PositionData};
99pub use position_encoder::PositionEncoder;
100pub use similarity_search::SimilaritySearch;
101pub use streaming_loader::StreamingLoader;
102pub use tactical_search::{TacticalConfig, TacticalResult, TacticalSearch};
103pub use training::{
104 EngineEvaluator, GameExtractor, SelfPlayConfig, SelfPlayTrainer, TacticalPuzzle,
105 TacticalPuzzleParser, TacticalTrainingData, TrainingData, TrainingDataset,
106};
107pub use ultra_fast_loader::{LoadingStats, UltraFastLoader};
108pub use variational_autoencoder::{VAEConfig, VariationalAutoencoder};
109pub use uci::{run_uci_engine, run_uci_engine_with_config, UCIConfig, UCIEngine};
111
112use chess::{Board, ChessMove};
113use ndarray::{Array1, Array2};
114use serde_json::Value;
115use std::collections::HashMap;
116use std::path::Path;
117use std::str::FromStr;
118
119fn move_centrality(chess_move: &ChessMove) -> f32 {
122 let dest_square = chess_move.get_dest();
123 let rank = dest_square.get_rank().to_index() as f32;
124 let file = dest_square.get_file().to_index() as f32;
125
126 let center_rank = 3.5;
128 let center_file = 3.5;
129
130 let rank_distance = (rank - center_rank).abs();
131 let file_distance = (file - center_file).abs();
132
133 let max_distance = 3.5; let distance = (rank_distance + file_distance) / 2.0;
136 max_distance - distance
137}
138
139#[derive(Debug, Clone)]
141pub struct MoveRecommendation {
142 pub chess_move: ChessMove,
143 pub confidence: f32,
144 pub from_similar_position_count: usize,
145 pub average_outcome: f32,
146}
147
148#[derive(Debug, Clone)]
150pub struct TrainingStats {
151 pub total_positions: usize,
152 pub unique_positions: usize,
153 pub has_move_data: bool,
154 pub move_data_entries: usize,
155 pub lsh_enabled: bool,
156 pub manifold_enabled: bool,
157 pub opening_book_enabled: bool,
158}
159
160#[derive(Debug, Clone)]
162pub struct HybridConfig {
163 pub pattern_confidence_threshold: f32,
165 pub enable_tactical_refinement: bool,
167 pub tactical_config: TacticalConfig,
169 pub pattern_weight: f32,
171 pub min_similar_positions: usize,
173}
174
175impl Default for HybridConfig {
176 fn default() -> Self {
177 Self {
178 pattern_confidence_threshold: 0.8,
179 enable_tactical_refinement: true,
180 tactical_config: TacticalConfig::default(),
181 pattern_weight: 0.7, min_similar_positions: 3,
183 }
184 }
185}
186
187pub struct ChessVectorEngine {
241 encoder: PositionEncoder,
242 similarity_search: SimilaritySearch,
243 lsh_index: Option<LSH>,
244 manifold_learner: Option<ManifoldLearner>,
245 use_lsh: bool,
246 use_manifold: bool,
247 position_moves: HashMap<usize, Vec<(ChessMove, f32)>>,
249 manifold_similarity_search: Option<SimilaritySearch>,
251 manifold_lsh_index: Option<LSH>,
253 feature_checker: FeatureChecker,
255 licensed_feature_checker: Option<LicensedFeatureChecker>,
257 position_vectors: Vec<Array1<f32>>,
259 position_boards: Vec<Board>,
261 position_evaluations: Vec<f32>,
263 opening_book: Option<OpeningBook>,
265 database: Option<Database>,
267 tactical_search: Option<TacticalSearch>,
269 hybrid_config: HybridConfig,
273}
274
275impl Clone for ChessVectorEngine {
276 fn clone(&self) -> Self {
277 Self {
278 encoder: self.encoder.clone(),
279 similarity_search: self.similarity_search.clone(),
280 lsh_index: self.lsh_index.clone(),
281 manifold_learner: None, use_lsh: self.use_lsh,
283 use_manifold: false, position_moves: self.position_moves.clone(),
285 manifold_similarity_search: self.manifold_similarity_search.clone(),
286 manifold_lsh_index: self.manifold_lsh_index.clone(),
287 feature_checker: self.feature_checker.clone(),
288 licensed_feature_checker: None, position_vectors: self.position_vectors.clone(),
290 position_boards: self.position_boards.clone(),
291 position_evaluations: self.position_evaluations.clone(),
292 opening_book: self.opening_book.clone(),
293 database: None, tactical_search: self.tactical_search.clone(),
295 hybrid_config: self.hybrid_config.clone(),
297 }
298 }
299}
300
301impl ChessVectorEngine {
302 pub fn new(vector_size: usize) -> Self {
304 Self {
305 encoder: PositionEncoder::new(vector_size),
306 similarity_search: SimilaritySearch::new(vector_size),
307 lsh_index: None,
308 manifold_learner: None,
309 use_lsh: false,
310 use_manifold: false,
311 position_moves: HashMap::new(),
312 manifold_similarity_search: None,
313 manifold_lsh_index: None,
314 feature_checker: FeatureChecker::new(FeatureTier::OpenSource), licensed_feature_checker: None,
316 position_vectors: Vec::new(),
317 position_boards: Vec::new(),
318 position_evaluations: Vec::new(),
319 opening_book: None,
320 database: None,
321 tactical_search: None,
322 hybrid_config: HybridConfig::default(),
324 }
325 }
326
327 pub fn new_with_tier(vector_size: usize, tier: FeatureTier) -> Self {
329 let mut engine = Self::new(vector_size);
330 engine.feature_checker = FeatureChecker::new(tier);
331 engine
332 }
333
334 pub fn get_feature_tier(&self) -> &FeatureTier {
336 self.feature_checker.get_current_tier()
337 }
338
339 pub fn upgrade_tier(&mut self, new_tier: FeatureTier) {
341 self.feature_checker.upgrade_tier(new_tier);
342 }
343
344 pub fn is_feature_available(&self, feature: &str) -> bool {
346 self.feature_checker.check_feature(feature).is_ok()
347 }
348
349 pub fn require_feature(&self, feature: &str) -> Result<(), FeatureError> {
351 self.feature_checker.require_feature(feature)
352 }
353
354 pub fn new_adaptive(vector_size: usize, expected_positions: usize, use_case: &str) -> Self {
357 match use_case {
358 "training" => {
359 if expected_positions > 10000 {
360 Self::new_with_lsh(vector_size, 12, 20)
362 } else {
363 Self::new(vector_size)
364 }
365 }
366 "gameplay" => {
367 if expected_positions > 15000 {
368 Self::new_with_lsh(vector_size, 10, 18)
370 } else {
371 Self::new(vector_size)
372 }
373 }
374 "analysis" => {
375 if expected_positions > 10000 {
376 Self::new_with_lsh(vector_size, 14, 22)
378 } else {
379 Self::new(vector_size)
380 }
381 }
382 _ => Self::new(vector_size), }
384 }
385
386 pub fn new_with_lsh(vector_size: usize, num_tables: usize, hash_size: usize) -> Self {
388 Self {
389 encoder: PositionEncoder::new(vector_size),
390 similarity_search: SimilaritySearch::new(vector_size),
391 lsh_index: Some(LSH::new(vector_size, num_tables, hash_size)),
392 manifold_learner: None,
393 use_lsh: true,
394 use_manifold: false,
395 position_moves: HashMap::new(),
396 manifold_similarity_search: None,
397 manifold_lsh_index: None,
398 feature_checker: FeatureChecker::new(FeatureTier::OpenSource),
399 licensed_feature_checker: None,
400 position_vectors: Vec::new(),
401 position_boards: Vec::new(),
402 position_evaluations: Vec::new(),
403 opening_book: None,
404 database: None,
405 tactical_search: None,
406 hybrid_config: HybridConfig::default(),
408 }
409 }
410
411 pub fn enable_lsh(&mut self, num_tables: usize, hash_size: usize) {
413 self.lsh_index = Some(LSH::new(self.encoder.vector_size(), num_tables, hash_size));
414 self.use_lsh = true;
415
416 if let Some(ref mut lsh) = self.lsh_index {
418 for (vector, evaluation) in self.similarity_search.get_all_positions() {
419 lsh.add_vector(vector, evaluation);
420 }
421 }
422 }
423
424 pub fn add_position(&mut self, board: &Board, evaluation: f32) {
426 if !self.is_position_safe(board) {
428 return; }
430
431 let vector = self.encoder.encode(board);
432 self.similarity_search
433 .add_position(vector.clone(), evaluation);
434
435 self.position_vectors.push(vector.clone());
437 self.position_boards.push(*board);
438 self.position_evaluations.push(evaluation);
439
440 if let Some(ref mut lsh) = self.lsh_index {
442 lsh.add_vector(vector.clone(), evaluation);
443 }
444
445 if self.use_manifold {
447 if let Some(ref learner) = self.manifold_learner {
448 let compressed = learner.encode(&vector);
449
450 if let Some(ref mut search) = self.manifold_similarity_search {
451 search.add_position(compressed.clone(), evaluation);
452 }
453
454 if let Some(ref mut lsh) = self.manifold_lsh_index {
455 lsh.add_vector(compressed, evaluation);
456 }
457 }
458 }
459 }
460
461 pub fn find_similar_positions(&self, board: &Board, k: usize) -> Vec<(Array1<f32>, f32, f32)> {
463 let query_vector = self.encoder.encode(board);
464
465 if self.use_manifold {
467 if let Some(ref manifold_learner) = self.manifold_learner {
468 let compressed_query = manifold_learner.encode(&query_vector);
469
470 if let Some(ref lsh) = self.manifold_lsh_index {
472 return lsh.query(&compressed_query, k);
473 }
474
475 if let Some(ref search) = self.manifold_similarity_search {
477 return search.search(&compressed_query, k);
478 }
479 }
480 }
481
482 if self.use_lsh {
484 if let Some(ref lsh_index) = self.lsh_index {
485 return lsh_index.query(&query_vector, k);
486 }
487 }
488
489 self.similarity_search.search(&query_vector, k)
491 }
492
493 pub fn find_similar_positions_with_indices(
495 &self,
496 board: &Board,
497 k: usize,
498 ) -> Vec<(usize, f32, f32)> {
499 let query_vector = self.encoder.encode(board);
500
501 let mut results = Vec::new();
504
505 for (i, stored_vector) in self.position_vectors.iter().enumerate() {
506 let similarity = self.encoder.similarity(&query_vector, stored_vector);
507 let eval = self.position_evaluations.get(i).copied().unwrap_or(0.0);
508 results.push((i, eval, similarity));
509 }
510
511 results.sort_by(|a, b| b.2.partial_cmp(&a.2).unwrap_or(std::cmp::Ordering::Equal));
513 results.truncate(k);
514
515 results
516 }
517
518 pub fn evaluate_position(&mut self, board: &Board) -> Option<f32> {
520 if let Some(entry) = self.get_opening_entry(board) {
529 return Some(entry.evaluation);
530 }
531
532 let similar_positions = self.find_similar_positions(board, 5);
534
535 if similar_positions.is_empty() {
536 if let Some(ref mut tactical_search) = self.tactical_search {
538 let result = tactical_search.search(board);
539 return Some(result.evaluation);
540 }
541 return None;
542 }
543
544 let mut weighted_sum = 0.0;
546 let mut weight_sum = 0.0;
547 let mut similarity_scores = Vec::new();
548
549 for (_, evaluation, similarity) in &similar_positions {
550 let weight = *similarity;
551 weighted_sum += evaluation * weight;
552 weight_sum += weight;
553 similarity_scores.push(*similarity);
554 }
555
556 let pattern_evaluation = weighted_sum / weight_sum;
557
558 let avg_similarity = similarity_scores.iter().sum::<f32>() / similarity_scores.len() as f32;
560 let count_factor = (similar_positions.len() as f32
561 / self.hybrid_config.min_similar_positions as f32)
562 .min(1.0);
563 let pattern_confidence = avg_similarity * count_factor;
564
565 let use_tactical = self.hybrid_config.enable_tactical_refinement
567 && pattern_confidence < self.hybrid_config.pattern_confidence_threshold
568 && self.tactical_search.is_some();
569
570 if use_tactical {
571 if let Some(ref mut tactical_search) = self.tactical_search {
573 let tactical_result = if tactical_search.config.enable_parallel_search {
574 tactical_search.search_parallel(board)
575 } else {
576 tactical_search.search(board)
577 };
578
579 let pattern_weight = self.hybrid_config.pattern_weight * pattern_confidence;
581 let tactical_weight = 1.0 - pattern_weight;
582
583 let hybrid_evaluation = (pattern_evaluation * pattern_weight)
584 + (tactical_result.evaluation * tactical_weight);
585
586 Some(hybrid_evaluation)
587 } else {
588 Some(pattern_evaluation)
590 }
591 } else {
592 Some(pattern_evaluation)
594 }
595 }
596
597 pub fn encode_position(&self, board: &Board) -> Array1<f32> {
599 self.encoder.encode(board)
600 }
601
602 pub fn calculate_similarity(&self, board1: &Board, board2: &Board) -> f32 {
604 let vec1 = self.encoder.encode(board1);
605 let vec2 = self.encoder.encode(board2);
606 self.encoder.similarity(&vec1, &vec2)
607 }
608
609 pub fn knowledge_base_size(&self) -> usize {
611 self.similarity_search.size()
612 }
613
614 pub fn save_training_data<P: AsRef<std::path::Path>>(
616 &self,
617 path: P,
618 ) -> Result<(), Box<dyn std::error::Error>> {
619 use crate::training::{TrainingData, TrainingDataset};
620
621 let mut dataset = TrainingDataset::new();
622
623 for (i, board) in self.position_boards.iter().enumerate() {
625 if i < self.position_evaluations.len() {
626 dataset.data.push(TrainingData {
627 board: *board,
628 evaluation: self.position_evaluations[i],
629 depth: 15, game_id: i, });
632 }
633 }
634
635 dataset.save_incremental(path)?;
636 println!("Saved {} positions to training data", dataset.data.len());
637 Ok(())
638 }
639
640 pub fn load_training_data_incremental<P: AsRef<std::path::Path>>(
642 &mut self,
643 path: P,
644 ) -> Result<(), Box<dyn std::error::Error>> {
645 use crate::training::TrainingDataset;
646 use indicatif::{ProgressBar, ProgressStyle};
647 use std::collections::HashSet;
648
649 let existing_size = self.knowledge_base_size();
650
651 let path_ref = path.as_ref();
653 let binary_path = path_ref.with_extension("bin");
654 if binary_path.exists() {
655 println!("š Loading optimized binary format...");
656 return self.load_training_data_binary(binary_path);
657 }
658
659 println!("š Loading training data from {}...", path_ref.display());
660 let dataset = TrainingDataset::load(path)?;
661
662 let total_positions = dataset.data.len();
663 if total_positions == 0 {
664 println!("ā ļø No positions found in dataset");
665 return Ok(());
666 }
667
668 let dedup_pb = ProgressBar::new(total_positions as u64);
670 dedup_pb.set_style(
671 ProgressStyle::default_bar()
672 .template("š Checking duplicates [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({percent}%) {msg}")?
673 .progress_chars("āāā")
674 );
675
676 let mut existing_boards: HashSet<_> = self.position_boards.iter().cloned().collect();
678 let mut new_positions = Vec::new();
679 let mut new_evaluations = Vec::new();
680
681 for (i, data) in dataset.data.into_iter().enumerate() {
683 if !existing_boards.contains(&data.board) {
684 existing_boards.insert(data.board);
685 new_positions.push(data.board);
686 new_evaluations.push(data.evaluation);
687 }
688
689 if i % 1000 == 0 || i == total_positions - 1 {
690 dedup_pb.set_position((i + 1) as u64);
691 dedup_pb.set_message(format!("{} new positions found", new_positions.len()));
692 }
693 }
694 dedup_pb.finish_with_message(format!("ā
Found {} new positions", new_positions.len()));
695
696 if new_positions.is_empty() {
697 println!("ā¹ļø No new positions to add (all positions already exist)");
698 return Ok(());
699 }
700
701 let add_pb = ProgressBar::new(new_positions.len() as u64);
703 add_pb.set_style(
704 ProgressStyle::default_bar()
705 .template("ā Adding positions [{elapsed_precise}] [{bar:40.green/blue}] {pos}/{len} ({percent}%) {msg}")?
706 .progress_chars("āāā")
707 );
708
709 for (i, (board, evaluation)) in new_positions
711 .into_iter()
712 .zip(new_evaluations.into_iter())
713 .enumerate()
714 {
715 self.add_position(&board, evaluation);
716
717 if i % 500 == 0 || i == add_pb.length().unwrap() as usize - 1 {
718 add_pb.set_position((i + 1) as u64);
719 add_pb.set_message("vectors encoded".to_string());
720 }
721 }
722 add_pb.finish_with_message("ā
All positions added");
723
724 println!(
725 "šÆ Loaded {} new positions (total: {})",
726 self.knowledge_base_size() - existing_size,
727 self.knowledge_base_size()
728 );
729 Ok(())
730 }
731
732 pub fn save_training_data_binary<P: AsRef<std::path::Path>>(
734 &self,
735 path: P,
736 ) -> Result<(), Box<dyn std::error::Error>> {
737 use lz4_flex::compress_prepend_size;
738
739 println!("š¾ Saving training data in binary format (compressed)...");
740
741 #[derive(serde::Serialize)]
743 struct BinaryTrainingData {
744 positions: Vec<String>, evaluations: Vec<f32>,
746 vectors: Vec<Vec<f32>>, created_at: i64,
748 }
749
750 let current_time = std::time::SystemTime::now()
751 .duration_since(std::time::UNIX_EPOCH)?
752 .as_secs() as i64;
753
754 let mut positions = Vec::with_capacity(self.position_boards.len());
756 let mut evaluations = Vec::with_capacity(self.position_boards.len());
757 let mut vectors = Vec::with_capacity(self.position_boards.len());
758
759 for (i, board) in self.position_boards.iter().enumerate() {
760 if i < self.position_evaluations.len() {
761 positions.push(board.to_string());
762 evaluations.push(self.position_evaluations[i]);
763
764 if i < self.position_vectors.len() {
766 if let Some(vector_slice) = self.position_vectors[i].as_slice() {
767 vectors.push(vector_slice.to_vec());
768 }
769 }
770 }
771 }
772
773 let binary_data = BinaryTrainingData {
774 positions,
775 evaluations,
776 vectors,
777 created_at: current_time,
778 };
779
780 let serialized = bincode::serialize(&binary_data)?;
782
783 let compressed = compress_prepend_size(&serialized);
785
786 std::fs::write(path, &compressed)?;
788
789 println!(
790 "ā
Saved {} positions to binary file ({} bytes compressed)",
791 binary_data.positions.len(),
792 compressed.len()
793 );
794 Ok(())
795 }
796
797 pub fn load_training_data_binary<P: AsRef<std::path::Path>>(
799 &mut self,
800 path: P,
801 ) -> Result<(), Box<dyn std::error::Error>> {
802 use indicatif::{ProgressBar, ProgressStyle};
803 use lz4_flex::decompress_size_prepended;
804
805 println!("š Loading training data from binary format...");
806
807 #[derive(serde::Deserialize)]
808 struct BinaryTrainingData {
809 positions: Vec<String>,
810 evaluations: Vec<f32>,
811 #[allow(dead_code)]
812 vectors: Vec<Vec<f32>>,
813 #[allow(dead_code)]
814 created_at: i64,
815 }
816
817 let existing_size = self.knowledge_base_size();
818
819 let file_size = std::fs::metadata(&path)?.len();
821 println!(
822 "š¦ Reading {} compressed file...",
823 Self::format_bytes(file_size)
824 );
825
826 let compressed_data = std::fs::read(path)?;
827 println!("š Decompressing data...");
828 let serialized = decompress_size_prepended(&compressed_data)?;
829
830 println!("š Deserializing binary data...");
831 let binary_data: BinaryTrainingData = bincode::deserialize(&serialized)?;
832
833 let total_positions = binary_data.positions.len();
834 if total_positions == 0 {
835 println!("ā ļø No positions found in binary file");
836 return Ok(());
837 }
838
839 println!("š Processing {total_positions} positions from binary format...");
840
841 let pb = ProgressBar::new(total_positions as u64);
843 pb.set_style(
844 ProgressStyle::default_bar()
845 .template("ā” Loading positions [{elapsed_precise}] [{bar:40.green/blue}] {pos}/{len} ({percent}%) {msg}")?
846 .progress_chars("āāā")
847 );
848
849 let mut added_count = 0;
850
851 for (i, fen) in binary_data.positions.iter().enumerate() {
853 if i < binary_data.evaluations.len() {
854 if let Ok(board) = fen.parse() {
855 if !self.position_boards.contains(&board) {
857 let mut evaluation = binary_data.evaluations[i];
858
859 if evaluation.abs() > 15.0 {
863 evaluation /= 100.0;
864 }
865
866 self.add_position(&board, evaluation);
867 added_count += 1;
868 }
869 }
870 }
871
872 if i % 1000 == 0 || i == total_positions - 1 {
873 pb.set_position((i + 1) as u64);
874 pb.set_message(format!("{added_count} new positions"));
875 }
876 }
877 pb.finish_with_message(format!("ā
Loaded {added_count} new positions"));
878
879 println!(
880 "šÆ Binary loading complete: {} new positions (total: {})",
881 self.knowledge_base_size() - existing_size,
882 self.knowledge_base_size()
883 );
884 Ok(())
885 }
886
887 pub fn load_training_data_mmap<P: AsRef<Path>>(
890 &mut self,
891 path: P,
892 ) -> Result<(), Box<dyn std::error::Error>> {
893 self.require_feature("memory_mapped_files")?;
895
896 use memmap2::Mmap;
897 use std::fs::File;
898
899 let path_ref = path.as_ref();
900 println!(
901 "š Loading training data via memory mapping: {}",
902 path_ref.display()
903 );
904
905 let file = File::open(path_ref)?;
906 let mmap = unsafe { Mmap::map(&file)? };
907
908 if let Ok(data) = rmp_serde::from_slice::<Vec<(String, f32)>>(&mmap) {
910 println!("š¦ Detected MessagePack format");
911 return self.load_positions_from_tuples(data);
912 }
913
914 if let Ok(data) = bincode::deserialize::<Vec<(String, f32)>>(&mmap) {
916 println!("š¦ Detected bincode format");
917 return self.load_positions_from_tuples(data);
918 }
919
920 let decompressed = lz4_flex::decompress_size_prepended(&mmap)?;
922 let data: Vec<(String, f32)> = bincode::deserialize(&decompressed)?;
923 println!("š¦ Detected LZ4+bincode format");
924 self.load_positions_from_tuples(data)
925 }
926
927 pub fn load_training_data_msgpack<P: AsRef<Path>>(
930 &mut self,
931 path: P,
932 ) -> Result<(), Box<dyn std::error::Error>> {
933 use std::fs::File;
934 use std::io::BufReader;
935
936 let path_ref = path.as_ref();
937 println!(
938 "š Loading MessagePack training data: {}",
939 path_ref.display()
940 );
941
942 let file = File::open(path_ref)?;
943 let reader = BufReader::new(file);
944 let data: Vec<(String, f32)> = rmp_serde::from_read(reader)?;
945
946 println!("š¦ MessagePack data loaded: {} positions", data.len());
947 self.load_positions_from_tuples(data)
948 }
949
950 pub fn load_training_data_streaming_json<P: AsRef<Path>>(
953 &mut self,
954 path: P,
955 ) -> Result<(), Box<dyn std::error::Error>> {
956 use dashmap::DashMap;
957 use rayon::prelude::*;
958 use std::fs::File;
959 use std::io::{BufRead, BufReader};
960 use std::sync::Arc;
961
962 let path_ref = path.as_ref();
963 println!(
964 "š Loading JSON with streaming parallel processing: {}",
965 path_ref.display()
966 );
967
968 let file = File::open(path_ref)?;
969 let reader = BufReader::new(file);
970
971 let chunk_size = 10000;
973 let position_map = Arc::new(DashMap::new());
974
975 let lines: Vec<String> = reader.lines().collect::<Result<Vec<_>, _>>()?;
976 let total_lines = lines.len();
977
978 lines.par_chunks(chunk_size).for_each(|chunk| {
980 for line in chunk {
981 if let Ok(data) = serde_json::from_str::<serde_json::Value>(line) {
982 if let (Some(fen), Some(eval)) = (
983 data.get("fen").and_then(|v| v.as_str()),
984 data.get("evaluation").and_then(|v| v.as_f64()),
985 ) {
986 position_map.insert(fen.to_string(), eval as f32);
987 }
988 }
989 }
990 });
991
992 println!(
993 "š¦ Parallel JSON processing complete: {} positions from {} lines",
994 position_map.len(),
995 total_lines
996 );
997
998 let data: Vec<(String, f32)> = match Arc::try_unwrap(position_map) {
1001 Ok(map) => map.into_iter().collect(),
1002 Err(arc_map) => {
1003 arc_map
1005 .iter()
1006 .map(|entry| (entry.key().clone(), *entry.value()))
1007 .collect()
1008 }
1009 };
1010 self.load_positions_from_tuples(data)
1011 }
1012
1013 pub fn load_training_data_compressed<P: AsRef<Path>>(
1016 &mut self,
1017 path: P,
1018 ) -> Result<(), Box<dyn std::error::Error>> {
1019 use std::fs::File;
1020 use std::io::BufReader;
1021
1022 let path_ref = path.as_ref();
1023 println!(
1024 "š Loading zstd compressed training data: {}",
1025 path_ref.display()
1026 );
1027
1028 let file = File::open(path_ref)?;
1029 let reader = BufReader::new(file);
1030 let decoder = zstd::stream::Decoder::new(reader)?;
1031
1032 if let Ok(data) = rmp_serde::from_read::<_, Vec<(String, f32)>>(decoder) {
1034 println!("š¦ Zstd+MessagePack data loaded: {} positions", data.len());
1035 return self.load_positions_from_tuples(data);
1036 }
1037
1038 let file = File::open(path_ref)?;
1040 let reader = BufReader::new(file);
1041 let decoder = zstd::stream::Decoder::new(reader)?;
1042 let data: Vec<(String, f32)> = bincode::deserialize_from(decoder)?;
1043
1044 println!("š¦ Zstd+bincode data loaded: {} positions", data.len());
1045 self.load_positions_from_tuples(data)
1046 }
1047
1048 fn load_positions_from_tuples(
1051 &mut self,
1052 data: Vec<(String, f32)>,
1053 ) -> Result<(), Box<dyn std::error::Error>> {
1054 use indicatif::{ProgressBar, ProgressStyle};
1055 use std::collections::HashSet;
1056
1057 let existing_size = self.knowledge_base_size();
1058 let mut seen_positions = HashSet::new();
1059 let mut loaded_count = 0;
1060
1061 let pb = ProgressBar::new(data.len() as u64);
1063 pb.set_style(ProgressStyle::with_template(
1064 "{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({per_sec}) {msg}"
1065 )?);
1066
1067 for (fen, evaluation) in data {
1068 pb.inc(1);
1069
1070 if seen_positions.contains(&fen) {
1072 continue;
1073 }
1074 seen_positions.insert(fen.clone());
1075
1076 if let Ok(board) = Board::from_str(&fen) {
1078 self.add_position(&board, evaluation);
1079 loaded_count += 1;
1080
1081 if loaded_count % 1000 == 0 {
1082 pb.set_message(format!("Loaded {loaded_count} positions"));
1083 }
1084 }
1085 }
1086
1087 pb.finish_with_message(format!("ā
Loaded {loaded_count} new positions"));
1088
1089 println!(
1090 "šÆ Ultra-fast loading complete: {} new positions (total: {})",
1091 self.knowledge_base_size() - existing_size,
1092 self.knowledge_base_size()
1093 );
1094
1095 Ok(())
1096 }
1097
1098 fn format_bytes(bytes: u64) -> String {
1100 const UNITS: &[&str] = &["B", "KB", "MB", "GB"];
1101 let mut size = bytes as f64;
1102 let mut unit_index = 0;
1103
1104 while size >= 1024.0 && unit_index < UNITS.len() - 1 {
1105 size /= 1024.0;
1106 unit_index += 1;
1107 }
1108
1109 format!("{:.1} {}", size, UNITS[unit_index])
1110 }
1111
1112 pub fn train_from_dataset_incremental(&mut self, dataset: &crate::training::TrainingDataset) {
1114 let _existing_size = self.knowledge_base_size();
1115 let mut added = 0;
1116
1117 for data in &dataset.data {
1118 if !self.position_boards.contains(&data.board) {
1120 self.add_position(&data.board, data.evaluation);
1121 added += 1;
1122 }
1123 }
1124
1125 println!(
1126 "Added {} new positions from dataset (total: {})",
1127 added,
1128 self.knowledge_base_size()
1129 );
1130 }
1131
1132 pub fn training_stats(&self) -> TrainingStats {
1134 TrainingStats {
1135 total_positions: self.knowledge_base_size(),
1136 unique_positions: self.position_boards.len(),
1137 has_move_data: !self.position_moves.is_empty(),
1138 move_data_entries: self.position_moves.len(),
1139 lsh_enabled: self.use_lsh,
1140 manifold_enabled: self.use_manifold,
1141 opening_book_enabled: self.opening_book.is_some(),
1142 }
1143 }
1144
1145 pub fn auto_load_training_data(&mut self) -> Result<Vec<String>, Box<dyn std::error::Error>> {
1147 use indicatif::{ProgressBar, ProgressStyle};
1148
1149 let common_files = vec![
1150 "training_data.json",
1151 "tactical_training_data.json",
1152 "engine_training.json",
1153 "chess_training.json",
1154 "my_training.json",
1155 ];
1156
1157 let tactical_files = vec![
1158 "tactical_puzzles.json",
1159 "lichess_puzzles.json",
1160 "my_puzzles.json",
1161 ];
1162
1163 let mut available_files = Vec::new();
1165 for file_path in &common_files {
1166 if std::path::Path::new(file_path).exists() {
1167 available_files.push((file_path, "training"));
1168 }
1169 }
1170 for file_path in &tactical_files {
1171 if std::path::Path::new(file_path).exists() {
1172 available_files.push((file_path, "tactical"));
1173 }
1174 }
1175
1176 if available_files.is_empty() {
1177 return Ok(Vec::new());
1178 }
1179
1180 println!(
1181 "š Found {} training files to auto-load",
1182 available_files.len()
1183 );
1184
1185 let pb = ProgressBar::new(available_files.len() as u64);
1187 pb.set_style(
1188 ProgressStyle::default_bar()
1189 .template("š Auto-loading files [{elapsed_precise}] [{bar:40.blue/cyan}] {pos}/{len} {msg}")?
1190 .progress_chars("āāā")
1191 );
1192
1193 let mut loaded_files = Vec::new();
1194
1195 for (i, (file_path, file_type)) in available_files.iter().enumerate() {
1196 pb.set_position(i as u64);
1197 pb.set_message("Processing...".to_string());
1198
1199 let result = match *file_type {
1200 "training" => self.load_training_data_incremental(file_path).map(|_| {
1201 loaded_files.push(file_path.to_string());
1202 println!("Loading complete");
1203 }),
1204 "tactical" => crate::training::TacticalPuzzleParser::load_tactical_puzzles(
1205 file_path,
1206 )
1207 .map(|puzzles| {
1208 crate::training::TacticalPuzzleParser::load_into_engine_incremental(
1209 &puzzles, self,
1210 );
1211 loaded_files.push(file_path.to_string());
1212 println!("Loading complete");
1213 }),
1214 _ => Ok(()),
1215 };
1216
1217 if let Err(_e) = result {
1218 println!("Loading complete");
1219 }
1220 }
1221
1222 pb.set_position(available_files.len() as u64);
1223 pb.finish_with_message(format!("ā
Auto-loaded {} files", loaded_files.len()));
1224
1225 Ok(loaded_files)
1226 }
1227
1228 pub fn load_lichess_puzzles_premium<P: AsRef<std::path::Path>>(
1230 &mut self,
1231 csv_path: P,
1232 ) -> Result<(), Box<dyn std::error::Error>> {
1233 self.require_feature("ultra_fast_loading")?; println!("š„ Loading Lichess puzzles with premium performance...");
1236 let puzzle_entries =
1237 crate::lichess_loader::load_lichess_puzzles_premium_with_moves(csv_path)?;
1238
1239 for (board, evaluation, best_move) in puzzle_entries {
1240 self.add_position_with_move(&board, evaluation, Some(best_move), Some(evaluation));
1241 }
1242
1243 println!("ā
Premium Lichess puzzle loading complete!");
1244 Ok(())
1245 }
1246
1247 pub fn load_lichess_puzzles_basic<P: AsRef<std::path::Path>>(
1249 &mut self,
1250 csv_path: P,
1251 max_puzzles: usize,
1252 ) -> Result<(), Box<dyn std::error::Error>> {
1253 println!("š Loading Lichess puzzles (basic tier, limited to {max_puzzles} puzzles)...");
1254 let puzzle_entries =
1255 crate::lichess_loader::load_lichess_puzzles_basic_with_moves(csv_path, max_puzzles)?;
1256
1257 for (board, evaluation, best_move) in puzzle_entries {
1258 self.add_position_with_move(&board, evaluation, Some(best_move), Some(evaluation));
1259 }
1260
1261 println!("ā
Basic Lichess puzzle loading complete!");
1262 Ok(())
1263 }
1264
1265 pub fn new_with_auto_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1267 let mut engine = Self::new(vector_size);
1268 engine.enable_opening_book();
1269
1270 let loaded_files = engine.auto_load_training_data()?;
1272
1273 if loaded_files.is_empty() {
1274 println!("š¤ Created fresh engine (no training data found)");
1275 } else {
1276 println!(
1277 "š Created engine with auto-loaded training data from {} files",
1278 loaded_files.len()
1279 );
1280 let _stats = engine.training_stats();
1281 println!("Loading complete");
1282 println!("Loading complete");
1283 }
1284
1285 Ok(engine)
1286 }
1287
1288 pub fn new_with_fast_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1291 use indicatif::{ProgressBar, ProgressStyle};
1292
1293 let mut engine = Self::new(vector_size);
1294 engine.enable_opening_book();
1295
1296 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1298 println!("Loading complete");
1299 }
1300
1301 let binary_files = [
1303 "training_data_a100.bin", "training_data.bin",
1305 "tactical_training_data.bin",
1306 "engine_training.bin",
1307 "chess_training.bin",
1308 ];
1309
1310 let existing_binary_files: Vec<_> = binary_files
1312 .iter()
1313 .filter(|&file_path| std::path::Path::new(file_path).exists())
1314 .collect();
1315
1316 let mut loaded_count = 0;
1317
1318 if !existing_binary_files.is_empty() {
1319 println!(
1320 "ā” Fast loading: Found {} binary files",
1321 existing_binary_files.len()
1322 );
1323
1324 let pb = ProgressBar::new(existing_binary_files.len() as u64);
1326 pb.set_style(
1327 ProgressStyle::default_bar()
1328 .template("š Fast loading [{elapsed_precise}] [{bar:40.green/cyan}] {pos}/{len} {msg}")?
1329 .progress_chars("āāā")
1330 );
1331
1332 for (i, file_path) in existing_binary_files.iter().enumerate() {
1333 pb.set_position(i as u64);
1334 pb.set_message("Processing...".to_string());
1335
1336 if engine.load_training_data_binary(file_path).is_ok() {
1337 loaded_count += 1;
1338 }
1339 }
1340
1341 pb.set_position(existing_binary_files.len() as u64);
1342 pb.finish_with_message(format!("ā
Loaded {loaded_count} binary files"));
1343 } else {
1344 println!("š¦ No binary files found, falling back to JSON auto-loading...");
1345 let _ = engine.auto_load_training_data()?;
1346 }
1347
1348 if let Err(e) = engine.load_manifold_models() {
1350 println!("ā ļø No pre-trained manifold models found ({e})");
1351 println!(" Use --rebuild-models flag to train new models");
1352 }
1353
1354 let stats = engine.training_stats();
1355 println!(
1356 "ā” Fast engine ready with {} positions ({} binary files loaded)",
1357 stats.total_positions, loaded_count
1358 );
1359
1360 Ok(engine)
1361 }
1362
1363 pub fn new_with_auto_discovery(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1366 println!("š Initializing engine with AUTO-DISCOVERY and format consolidation...");
1367 let mut engine = Self::new(vector_size);
1368 engine.enable_opening_book();
1369
1370 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1372 println!("Loading complete");
1373 }
1374
1375 let discovered_files = AutoDiscovery::discover_training_files(".", true)?;
1377
1378 if discovered_files.is_empty() {
1379 println!("ā¹ļø No training data found. Use convert methods to create optimized files.");
1380 return Ok(engine);
1381 }
1382
1383 let consolidated = AutoDiscovery::consolidate_by_base_name(discovered_files.clone());
1385
1386 let mut total_loaded = 0;
1387 for (base_name, best_file) in &consolidated {
1388 println!("š Loading {} ({})", base_name, best_file.format);
1389
1390 let initial_size = engine.knowledge_base_size();
1391 engine.load_file_by_format(&best_file.path, &best_file.format)?;
1392 let loaded_count = engine.knowledge_base_size() - initial_size;
1393 total_loaded += loaded_count;
1394
1395 println!(" ā
Loaded {loaded_count} positions");
1396 }
1397
1398 let cleanup_candidates = AutoDiscovery::get_cleanup_candidates(&discovered_files);
1400 if !cleanup_candidates.is_empty() {
1401 println!(
1402 "š§¹ Found {} old format files that can be cleaned up:",
1403 cleanup_candidates.len()
1404 );
1405 AutoDiscovery::cleanup_old_formats(&cleanup_candidates, true)?; println!(" š” To actually remove old files, run: cargo run --bin cleanup_formats");
1408 }
1409
1410 if let Err(e) = engine.load_manifold_models() {
1412 println!("ā ļø No pre-trained manifold models found ({e})");
1413 }
1414
1415 println!(
1416 "šÆ Engine ready: {} positions loaded from {} datasets",
1417 total_loaded,
1418 consolidated.len()
1419 );
1420 Ok(engine)
1421 }
1422
1423 pub fn new_with_instant_load(vector_size: usize) -> Result<Self, Box<dyn std::error::Error>> {
1426 println!("š Initializing engine with INSTANT loading...");
1427 let mut engine = Self::new(vector_size);
1428 engine.enable_opening_book();
1429
1430 if let Err(_e) = engine.enable_persistence("chess_vector_engine.db") {
1432 println!("Loading complete");
1433 }
1434
1435 let discovered_files = AutoDiscovery::discover_training_files(".", false)?;
1437
1438 if discovered_files.is_empty() {
1439 println!("ā¹ļø No user training data found, loading starter dataset...");
1441 if let Err(_e) = engine.load_starter_dataset() {
1442 println!("Loading complete");
1443 println!("ā¹ļø Starting with empty engine");
1444 } else {
1445 println!(
1446 "ā
Loaded starter dataset with {} positions",
1447 engine.knowledge_base_size()
1448 );
1449 }
1450 return Ok(engine);
1451 }
1452
1453 if let Some(best_file) = discovered_files.first() {
1455 println!(
1456 "ā” Loading {} format: {}",
1457 best_file.format,
1458 best_file.path.display()
1459 );
1460 engine.load_file_by_format(&best_file.path, &best_file.format)?;
1461 println!(
1462 "ā
Loaded {} positions from {} format",
1463 engine.knowledge_base_size(),
1464 best_file.format
1465 );
1466 }
1467
1468 if let Err(e) = engine.load_manifold_models() {
1470 println!("ā ļø No pre-trained manifold models found ({e})");
1471 }
1472
1473 println!(
1474 "šÆ Engine ready: {} positions loaded",
1475 engine.knowledge_base_size()
1476 );
1477 Ok(engine)
1478 }
1479
1480 pub fn new_with_license(vector_size: usize, license_url: String) -> Self {
1482 let mut engine = Self::new(vector_size);
1483 engine.licensed_feature_checker = Some(LicensedFeatureChecker::new(license_url));
1484 engine
1485 }
1486
1487 pub fn new_with_offline_license(vector_size: usize) -> Self {
1489 let mut engine = Self::new(vector_size);
1490 engine.licensed_feature_checker = Some(LicensedFeatureChecker::new_offline());
1491 engine
1492 }
1493
1494 pub async fn activate_license(&mut self, key: &str) -> Result<FeatureTier, LicenseError> {
1496 if let Some(ref mut checker) = self.licensed_feature_checker {
1497 let tier = checker.activate_license(key).await?;
1498 self.feature_checker.upgrade_tier(tier.clone());
1500 Ok(tier)
1501 } else {
1502 Err(LicenseError::InvalidFormat(
1503 "No license checker initialized".to_string(),
1504 ))
1505 }
1506 }
1507
1508 pub async fn check_licensed_feature(&mut self, feature: &str) -> Result<(), FeatureError> {
1510 if let Some(ref mut checker) = self.licensed_feature_checker {
1511 checker.check_feature(feature).await
1512 } else {
1513 self.feature_checker.check_feature(feature)
1515 }
1516 }
1517
1518 pub fn load_license_cache<P: AsRef<std::path::Path>>(
1520 &mut self,
1521 path: P,
1522 ) -> Result<(), Box<dyn std::error::Error>> {
1523 if let Some(ref mut checker) = self.licensed_feature_checker {
1524 checker.load_cache(path)?;
1525 }
1526 Ok(())
1527 }
1528
1529 pub fn save_license_cache<P: AsRef<std::path::Path>>(
1531 &self,
1532 path: P,
1533 ) -> Result<(), Box<dyn std::error::Error>> {
1534 if let Some(ref checker) = self.licensed_feature_checker {
1535 checker.save_cache(path)?;
1536 }
1537 Ok(())
1538 }
1539
1540 fn is_position_safe(&self, board: &Board) -> bool {
1545 match std::panic::catch_unwind(|| {
1547 use chess::MoveGen;
1548 let _legal_moves: Vec<ChessMove> = MoveGen::new_legal(board).collect();
1549 true
1550 }) {
1551 Ok(_) => true,
1552 Err(_) => {
1553 false
1555 }
1556 }
1557 }
1558
1559 pub fn check_gpu_acceleration(&self) -> Result<(), Box<dyn std::error::Error>> {
1561 self.feature_checker.check_feature("gpu_acceleration")?;
1562
1563 match crate::gpu_acceleration::GPUAccelerator::new() {
1565 Ok(_) => {
1566 println!("š„ GPU acceleration available and ready");
1567 Ok(())
1568 }
1569 Err(_e) => Err("Processing...".to_string().into()),
1570 }
1571 }
1572
1573 pub fn load_starter_dataset(&mut self) -> Result<(), Box<dyn std::error::Error>> {
1575 let starter_data = if let Ok(file_content) =
1577 std::fs::read_to_string("training_data/starter_dataset.json")
1578 {
1579 file_content
1580 } else {
1581 r#"[
1583 {
1584 "fen": "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1",
1585 "evaluation": 0.0,
1586 "best_move": null,
1587 "depth": 0
1588 },
1589 {
1590 "fen": "rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1",
1591 "evaluation": 0.1,
1592 "best_move": "e7e5",
1593 "depth": 2
1594 },
1595 {
1596 "fen": "rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq e6 0 2",
1597 "evaluation": 0.0,
1598 "best_move": "g1f3",
1599 "depth": 2
1600 }
1601 ]"#
1602 .to_string()
1603 };
1604
1605 let training_data: Vec<serde_json::Value> = serde_json::from_str(&starter_data)?;
1606
1607 for entry in training_data {
1608 if let (Some(fen), Some(evaluation)) = (entry.get("fen"), entry.get("evaluation")) {
1609 if let (Some(fen_str), Some(eval_f64)) = (fen.as_str(), evaluation.as_f64()) {
1610 match chess::Board::from_str(fen_str) {
1611 Ok(board) => {
1612 let mut eval = eval_f64 as f32;
1614
1615 if eval.abs() > 15.0 {
1618 eval /= 100.0;
1619 }
1620
1621 self.add_position(&board, eval);
1622 }
1623 Err(_) => {
1624 continue;
1626 }
1627 }
1628 }
1629 }
1630 }
1631
1632 Ok(())
1633 }
1634
1635 fn load_file_by_format(
1637 &mut self,
1638 path: &std::path::Path,
1639 format: &str,
1640 ) -> Result<(), Box<dyn std::error::Error>> {
1641 let file_size = std::fs::metadata(path)?.len();
1643
1644 if file_size > 10_000_000 {
1646 println!(
1647 "š Large file detected ({:.1} MB) - using ultra-fast loader",
1648 file_size as f64 / 1_000_000.0
1649 );
1650 return self.ultra_fast_load_any_format(path);
1651 }
1652
1653 match format {
1655 "MMAP" => self.load_training_data_mmap(path),
1656 "MSGPACK" => self.load_training_data_msgpack(path),
1657 "BINARY" => self.load_training_data_streaming_binary(path),
1658 "ZSTD" => self.load_training_data_compressed(path),
1659 "JSON" => self.load_training_data_streaming_json_v2(path),
1660 _ => Err("Processing...".to_string().into()),
1661 }
1662 }
1663
1664 pub fn ultra_fast_load_any_format<P: AsRef<std::path::Path>>(
1666 &mut self,
1667 path: P,
1668 ) -> Result<(), Box<dyn std::error::Error>> {
1669 self.require_feature("ultra_fast_loading")?;
1671
1672 let mut loader = UltraFastLoader::new_for_massive_datasets();
1673 loader.ultra_load_binary(path, self)?;
1674
1675 let stats = loader.get_stats();
1676 println!("š Ultra-fast loading complete:");
1677 println!(" ā
Loaded: {} positions", stats.loaded);
1678 println!("Loading complete");
1679 println!("Loading complete");
1680 println!(" š Success rate: {:.1}%", stats.success_rate() * 100.0);
1681
1682 Ok(())
1683 }
1684
1685 pub fn load_training_data_streaming_binary<P: AsRef<std::path::Path>>(
1688 &mut self,
1689 path: P,
1690 ) -> Result<(), Box<dyn std::error::Error>> {
1691 let mut loader = StreamingLoader::new();
1692 loader.stream_load_binary(path, self)?;
1693
1694 println!("š Streaming binary load complete:");
1695 println!(" Loaded: {} new positions", loader.loaded_count);
1696 println!("Loading complete");
1697 println!("Loading complete");
1698
1699 Ok(())
1700 }
1701
1702 pub fn load_training_data_streaming_json_v2<P: AsRef<std::path::Path>>(
1705 &mut self,
1706 path: P,
1707 ) -> Result<(), Box<dyn std::error::Error>> {
1708 let mut loader = StreamingLoader::new();
1709
1710 let batch_size = if std::fs::metadata(path.as_ref())?.len() > 100_000_000 {
1712 20000 } else {
1715 5000 };
1717
1718 loader.stream_load_json(path, self, batch_size)?;
1719
1720 println!("š Streaming JSON load complete:");
1721 println!(" Loaded: {} new positions", loader.loaded_count);
1722 println!("Loading complete");
1723 println!("Loading complete");
1724
1725 Ok(())
1726 }
1727
1728 pub fn new_for_massive_datasets(
1731 vector_size: usize,
1732 ) -> Result<Self, Box<dyn std::error::Error>> {
1733 println!("š Initializing engine for MASSIVE datasets (100k-1M+ positions)...");
1734 let mut engine = Self::new(vector_size);
1735 engine.enable_opening_book();
1736
1737 let discovered_files = AutoDiscovery::discover_training_files(".", false)?;
1739
1740 if discovered_files.is_empty() {
1741 println!("ā¹ļø No training data found");
1742 return Ok(engine);
1743 }
1744
1745 let largest_file = discovered_files
1747 .iter()
1748 .max_by_key(|f| f.size_bytes)
1749 .unwrap();
1750
1751 println!(
1752 "šÆ Loading largest dataset: {} ({} bytes)",
1753 largest_file.path.display(),
1754 largest_file.size_bytes
1755 );
1756
1757 engine.ultra_fast_load_any_format(&largest_file.path)?;
1759
1760 println!(
1761 "šÆ Engine ready: {} positions loaded",
1762 engine.knowledge_base_size()
1763 );
1764 Ok(engine)
1765 }
1766
1767 pub fn convert_to_msgpack() -> Result<(), Box<dyn std::error::Error>> {
1770 use serde_json::Value;
1771 use std::fs::File;
1772 use std::io::{BufReader, BufWriter};
1773
1774 if std::path::Path::new("training_data_a100.bin").exists() {
1776 Self::convert_a100_binary_to_json()?;
1777 }
1778
1779 let input_files = [
1780 "training_data.json",
1781 "tactical_training_data.json",
1782 "training_data_a100.json",
1783 ];
1784
1785 for input_file in &input_files {
1786 let input_path = std::path::Path::new(input_file);
1787 if !input_path.exists() {
1788 continue;
1789 }
1790
1791 let output_file_path = input_file.replace(".json", ".msgpack");
1792 println!("š Converting {input_file} ā {output_file_path} (MessagePack format)");
1793
1794 let file = File::open(input_path)?;
1796 let reader = BufReader::new(file);
1797 let json_value: Value = serde_json::from_reader(reader)?;
1798
1799 let data: Vec<(String, f32)> = match json_value {
1800 Value::Array(arr) if !arr.is_empty() => {
1802 if let Some(first) = arr.first() {
1803 if first.is_array() {
1804 arr.into_iter()
1806 .filter_map(|item| {
1807 if let Value::Array(tuple) = item {
1808 if tuple.len() >= 2 {
1809 let fen = tuple[0].as_str()?.to_string();
1810 let mut eval = tuple[1].as_f64()? as f32;
1811
1812 if eval.abs() > 15.0 {
1816 eval /= 100.0;
1817 }
1818
1819 Some((fen, eval))
1820 } else {
1821 None
1822 }
1823 } else {
1824 None
1825 }
1826 })
1827 .collect()
1828 } else if first.is_object() {
1829 arr.into_iter()
1831 .filter_map(|item| {
1832 if let Value::Object(obj) = item {
1833 let fen = obj.get("fen")?.as_str()?.to_string();
1834 let mut eval = obj.get("evaluation")?.as_f64()? as f32;
1835
1836 if eval.abs() > 15.0 {
1840 eval /= 100.0;
1841 }
1842
1843 Some((fen, eval))
1844 } else {
1845 None
1846 }
1847 })
1848 .collect()
1849 } else {
1850 return Err("Processing...".to_string().into());
1851 }
1852 } else {
1853 Vec::new()
1854 }
1855 }
1856 _ => return Err("Processing...".to_string().into()),
1857 };
1858
1859 if data.is_empty() {
1860 println!("Loading complete");
1861 continue;
1862 }
1863
1864 let output_file = File::create(&output_file_path)?;
1866 let mut writer = BufWriter::new(output_file);
1867 rmp_serde::encode::write(&mut writer, &data)?;
1868
1869 let input_size = input_path.metadata()?.len();
1870 let output_size = std::path::Path::new(&output_file_path).metadata()?.len();
1871 let ratio = input_size as f64 / output_size as f64;
1872
1873 println!(
1874 "ā
Converted: {} ā {} ({:.1}x size reduction, {} positions)",
1875 Self::format_bytes(input_size),
1876 Self::format_bytes(output_size),
1877 ratio,
1878 data.len()
1879 );
1880 }
1881
1882 Ok(())
1883 }
1884
1885 pub fn convert_a100_binary_to_json() -> Result<(), Box<dyn std::error::Error>> {
1887 use std::fs::File;
1888 use std::io::BufWriter;
1889
1890 let binary_path = "training_data_a100.bin";
1891 let json_path = "training_data_a100.json";
1892
1893 if !std::path::Path::new(binary_path).exists() {
1894 println!("Loading complete");
1895 return Ok(());
1896 }
1897
1898 println!("š Converting A100 binary data {binary_path} ā {json_path} (JSON format)");
1899
1900 let mut engine = ChessVectorEngine::new(1024);
1902 engine.load_training_data_binary(binary_path)?;
1903
1904 let mut data = Vec::new();
1906 for (i, board) in engine.position_boards.iter().enumerate() {
1907 if i < engine.position_evaluations.len() {
1908 data.push(serde_json::json!({
1909 "fen": board.to_string(),
1910 "evaluation": engine.position_evaluations[i],
1911 "depth": 15,
1912 "game_id": i
1913 }));
1914 }
1915 }
1916
1917 let file = File::create(json_path)?;
1919 let writer = BufWriter::new(file);
1920 serde_json::to_writer(writer, &data)?;
1921
1922 println!(
1923 "ā
Converted A100 data: {} positions ā {}",
1924 data.len(),
1925 json_path
1926 );
1927 Ok(())
1928 }
1929
1930 pub fn convert_to_zstd() -> Result<(), Box<dyn std::error::Error>> {
1933 use std::fs::File;
1934 use std::io::{BufReader, BufWriter};
1935
1936 if std::path::Path::new("training_data_a100.bin").exists() {
1938 Self::convert_a100_binary_to_json()?;
1939 }
1940
1941 let input_files = [
1942 ("training_data.json", "training_data.zst"),
1943 ("tactical_training_data.json", "tactical_training_data.zst"),
1944 ("training_data_a100.json", "training_data_a100.zst"),
1945 ("training_data.bin", "training_data.bin.zst"),
1946 (
1947 "tactical_training_data.bin",
1948 "tactical_training_data.bin.zst",
1949 ),
1950 ("training_data_a100.bin", "training_data_a100.bin.zst"),
1951 ];
1952
1953 for (input_file, output_file) in &input_files {
1954 let input_path = std::path::Path::new(input_file);
1955 if !input_path.exists() {
1956 continue;
1957 }
1958
1959 println!("š Converting {input_file} ā {output_file} (Zstd compression)");
1960
1961 let input_file = File::open(input_path)?;
1962 let output_file_handle = File::create(output_file)?;
1963 let writer = BufWriter::new(output_file_handle);
1964 let mut encoder = zstd::stream::Encoder::new(writer, 9)?; std::io::copy(&mut BufReader::new(input_file), &mut encoder)?;
1967 encoder.finish()?;
1968
1969 let input_size = input_path.metadata()?.len();
1970 let output_size = std::path::Path::new(output_file).metadata()?.len();
1971 let ratio = input_size as f64 / output_size as f64;
1972
1973 println!(
1974 "ā
Compressed: {} ā {} ({:.1}x size reduction)",
1975 Self::format_bytes(input_size),
1976 Self::format_bytes(output_size),
1977 ratio
1978 );
1979 }
1980
1981 Ok(())
1982 }
1983
1984 pub fn convert_to_mmap() -> Result<(), Box<dyn std::error::Error>> {
1987 use std::fs::File;
1988 use std::io::{BufReader, BufWriter};
1989
1990 if std::path::Path::new("training_data_a100.bin").exists() {
1992 Self::convert_a100_binary_to_json()?;
1993 }
1994
1995 let input_files = [
1996 ("training_data.json", "training_data.mmap"),
1997 ("tactical_training_data.json", "tactical_training_data.mmap"),
1998 ("training_data_a100.json", "training_data_a100.mmap"),
1999 ("training_data.msgpack", "training_data.mmap"),
2000 (
2001 "tactical_training_data.msgpack",
2002 "tactical_training_data.mmap",
2003 ),
2004 ("training_data_a100.msgpack", "training_data_a100.mmap"),
2005 ];
2006
2007 for (input_file, output_file) in &input_files {
2008 let input_path = std::path::Path::new(input_file);
2009 if !input_path.exists() {
2010 continue;
2011 }
2012
2013 println!("š Converting {input_file} ā {output_file} (Memory-mapped format)");
2014
2015 let data: Vec<(String, f32)> = if input_file.ends_with(".json") {
2017 let file = File::open(input_path)?;
2018 let reader = BufReader::new(file);
2019 let json_value: Value = serde_json::from_reader(reader)?;
2020
2021 match json_value {
2022 Value::Array(arr) if !arr.is_empty() => {
2024 if let Some(first) = arr.first() {
2025 if first.is_array() {
2026 arr.into_iter()
2028 .filter_map(|item| {
2029 if let Value::Array(tuple) = item {
2030 if tuple.len() >= 2 {
2031 let fen = tuple[0].as_str()?.to_string();
2032 let mut eval = tuple[1].as_f64()? as f32;
2033
2034 if eval.abs() > 15.0 {
2038 eval /= 100.0;
2039 }
2040
2041 Some((fen, eval))
2042 } else {
2043 None
2044 }
2045 } else {
2046 None
2047 }
2048 })
2049 .collect()
2050 } else if first.is_object() {
2051 arr.into_iter()
2053 .filter_map(|item| {
2054 if let Value::Object(obj) = item {
2055 let fen = obj.get("fen")?.as_str()?.to_string();
2056 let mut eval = obj.get("evaluation")?.as_f64()? as f32;
2057
2058 if eval.abs() > 15.0 {
2062 eval /= 100.0;
2063 }
2064
2065 Some((fen, eval))
2066 } else {
2067 None
2068 }
2069 })
2070 .collect()
2071 } else {
2072 return Err("Failed to process training data".into());
2073 }
2074 } else {
2075 Vec::new()
2076 }
2077 }
2078 _ => return Err("Processing...".to_string().into()),
2079 }
2080 } else if input_file.ends_with(".msgpack") {
2081 let file = File::open(input_path)?;
2082 let reader = BufReader::new(file);
2083 rmp_serde::from_read(reader)?
2084 } else {
2085 return Err("Unsupported input format for memory mapping".into());
2086 };
2087
2088 let output_file_handle = File::create(output_file)?;
2090 let mut writer = BufWriter::new(output_file_handle);
2091 rmp_serde::encode::write(&mut writer, &data)?;
2092
2093 let input_size = input_path.metadata()?.len();
2094 let output_size = std::path::Path::new(output_file).metadata()?.len();
2095
2096 println!(
2097 "ā
Memory-mapped file created: {} ā {} ({} positions)",
2098 Self::format_bytes(input_size),
2099 Self::format_bytes(output_size),
2100 data.len()
2101 );
2102 }
2103
2104 Ok(())
2105 }
2106
2107 pub fn convert_json_to_binary() -> Result<Vec<String>, Box<dyn std::error::Error>> {
2109 use indicatif::{ProgressBar, ProgressStyle};
2110
2111 let json_files = [
2112 "training_data.json",
2113 "tactical_training_data.json",
2114 "engine_training.json",
2115 "chess_training.json",
2116 ];
2117
2118 let existing_json_files: Vec<_> = json_files
2120 .iter()
2121 .filter(|&file_path| std::path::Path::new(file_path).exists())
2122 .collect();
2123
2124 if existing_json_files.is_empty() {
2125 println!("ā¹ļø No JSON training files found to convert");
2126 return Ok(Vec::new());
2127 }
2128
2129 println!(
2130 "š Converting {} JSON files to binary format...",
2131 existing_json_files.len()
2132 );
2133
2134 let pb = ProgressBar::new(existing_json_files.len() as u64);
2136 pb.set_style(
2137 ProgressStyle::default_bar()
2138 .template(
2139 "š¦ Converting [{elapsed_precise}] [{bar:40.yellow/blue}] {pos}/{len} {msg}",
2140 )?
2141 .progress_chars("āāā"),
2142 );
2143
2144 let mut converted_files = Vec::new();
2145
2146 for (i, json_file) in existing_json_files.iter().enumerate() {
2147 pb.set_position(i as u64);
2148 pb.set_message("Processing...".to_string());
2149
2150 let binary_file = std::path::Path::new(json_file).with_extension("bin");
2151
2152 let mut temp_engine = Self::new(1024);
2154 if temp_engine
2155 .load_training_data_incremental(json_file)
2156 .is_ok()
2157 {
2158 if temp_engine.save_training_data_binary(&binary_file).is_ok() {
2159 converted_files.push(binary_file.to_string_lossy().to_string());
2160 println!("ā
Converted {json_file} to binary format");
2161 } else {
2162 println!("Loading complete");
2163 }
2164 } else {
2165 println!("Loading complete");
2166 }
2167 }
2168
2169 pb.set_position(existing_json_files.len() as u64);
2170 pb.finish_with_message(format!("ā
Converted {} files", converted_files.len()));
2171
2172 if !converted_files.is_empty() {
2173 println!("š Binary conversion complete! Startup will be 5-15x faster next time.");
2174 println!("š Conversion summary:");
2175 for _conversion in &converted_files {
2176 println!("Loading complete");
2177 }
2178 }
2179
2180 Ok(converted_files)
2181 }
2182
2183 pub fn is_lsh_enabled(&self) -> bool {
2185 self.use_lsh
2186 }
2187
2188 pub fn lsh_stats(&self) -> Option<crate::lsh::LSHStats> {
2190 self.lsh_index.as_ref().map(|lsh| lsh.stats())
2191 }
2192
2193 pub fn enable_manifold_learning(&mut self, compression_ratio: f32) -> Result<(), String> {
2195 let input_dim = self.encoder.vector_size();
2196 let output_dim = ((input_dim as f32) / compression_ratio) as usize;
2197
2198 if output_dim == 0 {
2199 return Err("Compression ratio too high, output dimension would be 0".to_string());
2200 }
2201
2202 let mut learner = ManifoldLearner::new(input_dim, output_dim);
2203 learner.init_network()?;
2204
2205 self.manifold_learner = Some(learner);
2206 self.manifold_similarity_search = Some(SimilaritySearch::new(output_dim));
2207 self.use_manifold = false; Ok(())
2210 }
2211
2212 pub fn train_manifold_learning(&mut self, epochs: usize) -> Result<(), String> {
2214 if self.manifold_learner.is_none() {
2215 return Err(
2216 "Manifold learning not enabled. Call enable_manifold_learning first.".to_string(),
2217 );
2218 }
2219
2220 if self.similarity_search.size() == 0 {
2221 return Err("No positions in knowledge base to train on.".to_string());
2222 }
2223
2224 let rows = self.similarity_search.size();
2226 let cols = self.encoder.vector_size();
2227
2228 let training_matrix = Array2::from_shape_fn((rows, cols), |(row, col)| {
2229 if let Some((vector, _)) = self.similarity_search.get_position_ref(row) {
2230 vector[col]
2231 } else {
2232 0.0
2233 }
2234 });
2235
2236 if let Some(ref mut learner) = self.manifold_learner {
2238 learner.train(&training_matrix, epochs)?;
2239 let compression_ratio = learner.compression_ratio();
2240
2241 let _ = learner;
2243
2244 self.rebuild_manifold_indices()?;
2246 self.use_manifold = true;
2247
2248 println!(
2249 "Manifold learning training completed. Compression ratio: {compression_ratio:.1}x"
2250 );
2251 }
2252
2253 Ok(())
2254 }
2255
2256 fn rebuild_manifold_indices(&mut self) -> Result<(), String> {
2258 if let Some(ref learner) = self.manifold_learner {
2259 let output_dim = learner.output_dim();
2261 if let Some(ref mut search) = self.manifold_similarity_search {
2262 *search = SimilaritySearch::new(output_dim);
2263 }
2264 if let Some(ref mut lsh) = self.manifold_lsh_index {
2265 *lsh = LSH::new(output_dim, 8, 16); }
2267
2268 for (vector, eval) in self.similarity_search.iter_positions() {
2270 let compressed = learner.encode(vector);
2271
2272 if let Some(ref mut search) = self.manifold_similarity_search {
2273 search.add_position(compressed.clone(), eval);
2274 }
2275
2276 if let Some(ref mut lsh) = self.manifold_lsh_index {
2277 lsh.add_vector(compressed, eval);
2278 }
2279 }
2280 }
2281
2282 Ok(())
2283 }
2284
2285 pub fn enable_manifold_lsh(
2287 &mut self,
2288 num_tables: usize,
2289 hash_size: usize,
2290 ) -> Result<(), String> {
2291 if self.manifold_learner.is_none() {
2292 return Err("Manifold learning not enabled".to_string());
2293 }
2294
2295 let output_dim = self.manifold_learner.as_ref().unwrap().output_dim();
2296 self.manifold_lsh_index = Some(LSH::new(output_dim, num_tables, hash_size));
2297
2298 if self.use_manifold {
2300 self.rebuild_manifold_indices()?;
2301 }
2302
2303 Ok(())
2304 }
2305
2306 pub fn is_manifold_enabled(&self) -> bool {
2308 self.use_manifold && self.manifold_learner.is_some()
2309 }
2310
2311 pub fn manifold_compression_ratio(&self) -> Option<f32> {
2313 self.manifold_learner
2314 .as_ref()
2315 .map(|l| l.compression_ratio())
2316 }
2317
2318 pub fn load_manifold_models(&mut self) -> Result<(), Box<dyn std::error::Error>> {
2321 if let Some(ref db) = self.database {
2322 match crate::manifold_learner::ManifoldLearner::load_from_database(db)? {
2323 Some(learner) => {
2324 let compression_ratio = learner.compression_ratio();
2325 println!(
2326 "š§ Loaded pre-trained manifold learner (compression: {compression_ratio:.1}x)"
2327 );
2328
2329 self.manifold_learner = Some(learner);
2331 self.use_manifold = true;
2332
2333 self.rebuild_manifold_indices()?;
2335
2336 println!("ā
Manifold learning enabled with compressed vectors");
2337 Ok(())
2338 }
2339 None => Err("No pre-trained manifold models found in database".into()),
2340 }
2341 } else {
2342 Err("Database not initialized - cannot load manifold models".into())
2343 }
2344 }
2345
2346 pub fn enable_opening_book(&mut self) {
2348 self.opening_book = Some(OpeningBook::with_standard_openings());
2349 }
2350
2351 pub fn set_opening_book(&mut self, book: OpeningBook) {
2353 self.opening_book = Some(book);
2354 }
2355
2356 pub fn is_opening_position(&self, board: &Board) -> bool {
2358 self.opening_book
2359 .as_ref()
2360 .map(|book| book.contains(board))
2361 .unwrap_or(false)
2362 }
2363
2364 pub fn get_opening_entry(&self, board: &Board) -> Option<&OpeningEntry> {
2366 self.opening_book.as_ref()?.lookup(board)
2367 }
2368
2369 pub fn opening_book_stats(&self) -> Option<OpeningBookStats> {
2371 self.opening_book.as_ref().map(|book| book.stats())
2372 }
2373
2374 pub fn add_position_with_move(
2376 &mut self,
2377 board: &Board,
2378 evaluation: f32,
2379 chess_move: Option<ChessMove>,
2380 move_outcome: Option<f32>,
2381 ) {
2382 let position_index = self.knowledge_base_size();
2383
2384 self.add_position(board, evaluation);
2386
2387 if let (Some(mov), Some(outcome)) = (chess_move, move_outcome) {
2389 self.position_moves
2390 .entry(position_index)
2391 .or_default()
2392 .push((mov, outcome));
2393 }
2394 }
2395
2396 pub fn recommend_moves(
2398 &mut self,
2399 board: &Board,
2400 num_recommendations: usize,
2401 ) -> Vec<MoveRecommendation> {
2402 if let Some(entry) = self.get_opening_entry(board) {
2416 let mut recommendations = Vec::new();
2417
2418 for (chess_move, strength) in &entry.best_moves {
2419 recommendations.push(MoveRecommendation {
2420 chess_move: *chess_move,
2421 confidence: strength * 0.9, from_similar_position_count: 1,
2423 average_outcome: entry.evaluation,
2424 });
2425 }
2426
2427 recommendations.sort_by(|a, b| {
2429 b.confidence
2430 .partial_cmp(&a.confidence)
2431 .unwrap_or(std::cmp::Ordering::Equal)
2432 });
2433 recommendations.truncate(num_recommendations);
2434 return recommendations;
2435 }
2436
2437 let similar_positions = self.find_similar_positions_with_indices(board, 20);
2439
2440 let mut move_data: HashMap<ChessMove, Vec<(f32, f32)>> = HashMap::new(); use chess::MoveGen;
2445 let legal_moves: Vec<ChessMove> = match std::panic::catch_unwind(|| {
2446 MoveGen::new_legal(board).collect::<Vec<ChessMove>>()
2447 }) {
2448 Ok(moves) => moves,
2449 Err(_) => {
2450 return Vec::new();
2452 }
2453 };
2454
2455 for (position_index, _eval, similarity) in similar_positions {
2457 if let Some(moves) = self.position_moves.get(&position_index) {
2458 for &(chess_move, outcome) in moves {
2459 if legal_moves.contains(&chess_move) {
2461 move_data
2462 .entry(chess_move)
2463 .or_default()
2464 .push((similarity, outcome));
2465 }
2466 }
2467 }
2468 }
2469
2470 if move_data.is_empty() {
2472 if let Some(ref mut tactical_search) = self.tactical_search {
2473 let tactical_result = tactical_search.search(board);
2475
2476 if let Some(best_move) = tactical_result.best_move {
2478 move_data.insert(best_move, vec![(0.75, tactical_result.evaluation)]);
2479 }
2480
2481 let mut ordered_moves = legal_moves.clone();
2484
2485 ordered_moves.sort_by(|a, b| {
2487 let a_is_capture = board.piece_on(a.get_dest()).is_some();
2488 let b_is_capture = board.piece_on(b.get_dest()).is_some();
2489
2490 match (a_is_capture, b_is_capture) {
2491 (true, false) => std::cmp::Ordering::Less, (false, true) => std::cmp::Ordering::Greater, _ => {
2494 let a_centrality = move_centrality(a);
2496 let b_centrality = move_centrality(b);
2497 b_centrality
2498 .partial_cmp(&a_centrality)
2499 .unwrap_or(std::cmp::Ordering::Equal)
2500 }
2501 }
2502 });
2503
2504 for chess_move in ordered_moves.into_iter().take(num_recommendations) {
2506 move_data
2507 .entry(chess_move)
2508 .or_insert_with(|| vec![(0.6, 0.0)]);
2509 }
2510 } else {
2511 let mut ordered_moves = legal_moves.clone();
2514
2515 ordered_moves.sort_by(|a, b| {
2517 let a_is_capture = board.piece_on(a.get_dest()).is_some();
2518 let b_is_capture = board.piece_on(b.get_dest()).is_some();
2519
2520 match (a_is_capture, b_is_capture) {
2521 (true, false) => std::cmp::Ordering::Less,
2522 (false, true) => std::cmp::Ordering::Greater,
2523 _ => {
2524 let a_centrality = move_centrality(a);
2525 let b_centrality = move_centrality(b);
2526 b_centrality
2527 .partial_cmp(&a_centrality)
2528 .unwrap_or(std::cmp::Ordering::Equal)
2529 }
2530 }
2531 });
2532
2533 for chess_move in ordered_moves.into_iter().take(num_recommendations) {
2534 move_data.insert(chess_move, vec![(0.3, 0.0)]); }
2536 }
2537 }
2538
2539 let mut recommendations = Vec::new();
2541
2542 for (chess_move, outcomes) in move_data {
2543 if outcomes.is_empty() {
2544 continue;
2545 }
2546
2547 let mut weighted_sum = 0.0;
2549 let mut weight_sum = 0.0;
2550
2551 for &(similarity, outcome) in &outcomes {
2552 weighted_sum += similarity * outcome;
2553 weight_sum += similarity;
2554 }
2555
2556 let average_outcome = if weight_sum > 0.0 {
2557 weighted_sum / weight_sum
2558 } else {
2559 0.0
2560 };
2561
2562 let avg_similarity =
2564 outcomes.iter().map(|(s, _)| s).sum::<f32>() / outcomes.len() as f32;
2565 let position_count_bonus = (outcomes.len() as f32).ln().max(1.0) / 5.0; let confidence = (avg_similarity * 0.8 + position_count_bonus * 0.2).min(0.95); recommendations.push(MoveRecommendation {
2569 chess_move,
2570 confidence: confidence.min(1.0), from_similar_position_count: outcomes.len(),
2572 average_outcome,
2573 });
2574 }
2575
2576 recommendations.sort_by(|a, b| {
2578 b.confidence
2579 .partial_cmp(&a.confidence)
2580 .unwrap_or(std::cmp::Ordering::Equal)
2581 });
2582
2583 recommendations.truncate(num_recommendations);
2585 recommendations
2586 }
2587
2588 pub fn recommend_legal_moves(
2590 &mut self,
2591 board: &Board,
2592 num_recommendations: usize,
2593 ) -> Vec<MoveRecommendation> {
2594 use chess::MoveGen;
2595
2596 let legal_moves: std::collections::HashSet<ChessMove> = MoveGen::new_legal(board).collect();
2598
2599 let all_recommendations = self.recommend_moves(board, num_recommendations * 2); all_recommendations
2603 .into_iter()
2604 .filter(|rec| legal_moves.contains(&rec.chess_move))
2605 .take(num_recommendations)
2606 .collect()
2607 }
2608
2609 pub fn enable_persistence<P: AsRef<Path>>(
2611 &mut self,
2612 db_path: P,
2613 ) -> Result<(), Box<dyn std::error::Error>> {
2614 let database = Database::new(db_path)?;
2615 self.database = Some(database);
2616 println!("Persistence enabled");
2617 Ok(())
2618 }
2619
2620 pub fn save_to_database(&self) -> Result<(), Box<dyn std::error::Error>> {
2622 let db = self
2623 .database
2624 .as_ref()
2625 .ok_or("Database not enabled. Call enable_persistence() first.")?;
2626
2627 println!("š¾ Saving engine state to database (batch mode)...");
2628
2629 let current_time = std::time::SystemTime::now()
2631 .duration_since(std::time::UNIX_EPOCH)?
2632 .as_secs() as i64;
2633
2634 let mut position_data_batch = Vec::with_capacity(self.position_boards.len());
2635
2636 for (i, board) in self.position_boards.iter().enumerate() {
2637 if i < self.position_vectors.len() && i < self.position_evaluations.len() {
2638 let vector = self.position_vectors[i].as_slice().unwrap();
2639 let position_data = PositionData {
2640 fen: board.to_string(),
2641 vector: vector.iter().map(|&x| x as f64).collect(),
2642 evaluation: Some(self.position_evaluations[i] as f64),
2643 compressed_vector: None, created_at: current_time,
2645 };
2646 position_data_batch.push(position_data);
2647 }
2648 }
2649
2650 if !position_data_batch.is_empty() {
2652 let saved_count = db.save_positions_batch(&position_data_batch)?;
2653 println!("š Batch saved {saved_count} positions");
2654 }
2655
2656 if let Some(ref lsh) = self.lsh_index {
2658 lsh.save_to_database(db)?;
2659 }
2660
2661 if let Some(ref learner) = self.manifold_learner {
2663 if learner.is_trained() {
2664 learner.save_to_database(db)?;
2665 }
2666 }
2667
2668 println!("ā
Engine state saved successfully (batch optimized)");
2669 Ok(())
2670 }
2671
2672 pub fn load_from_database(&mut self) -> Result<(), Box<dyn std::error::Error>> {
2674 let db = self
2675 .database
2676 .as_ref()
2677 .ok_or("Database not enabled. Call enable_persistence() first.")?;
2678
2679 println!("Loading engine state from database...");
2680
2681 let positions = db.load_all_positions()?;
2683 for position_data in positions {
2684 if let Ok(board) = Board::from_str(&position_data.fen) {
2685 let vector: Vec<f32> = position_data.vector.iter().map(|&x| x as f32).collect();
2686 let vector_array = Array1::from(vector);
2687 let mut evaluation = position_data.evaluation.unwrap_or(0.0) as f32;
2688
2689 if evaluation.abs() > 15.0 {
2693 evaluation /= 100.0;
2694 }
2695
2696 self.similarity_search
2698 .add_position(vector_array.clone(), evaluation);
2699
2700 self.position_vectors.push(vector_array);
2702 self.position_boards.push(board);
2703 self.position_evaluations.push(evaluation);
2704 }
2705 }
2706
2707 if self.use_lsh {
2709 let positions_for_lsh: Vec<(Array1<f32>, f32)> = self
2710 .position_vectors
2711 .iter()
2712 .zip(self.position_evaluations.iter())
2713 .map(|(v, &e)| (v.clone(), e))
2714 .collect();
2715
2716 match LSH::load_from_database(db, &positions_for_lsh)? {
2717 Some(lsh) => {
2718 self.lsh_index = Some(lsh);
2719 println!("Loaded LSH configuration from database");
2720 }
2721 None => {
2722 println!("No LSH configuration found in database");
2723 }
2724 }
2725 }
2726
2727 match ManifoldLearner::load_from_database(db)? {
2729 Some(learner) => {
2730 self.manifold_learner = Some(learner);
2731 if self.use_manifold {
2732 self.rebuild_manifold_indices()?;
2733 }
2734 println!("Loaded manifold learner from database");
2735 }
2736 None => {
2737 println!("No manifold learner found in database");
2738 }
2739 }
2740
2741 println!(
2742 "Engine state loaded successfully ({} positions)",
2743 self.knowledge_base_size()
2744 );
2745 Ok(())
2746 }
2747
2748 pub fn new_with_persistence<P: AsRef<Path>>(
2750 vector_size: usize,
2751 db_path: P,
2752 ) -> Result<Self, Box<dyn std::error::Error>> {
2753 let mut engine = Self::new(vector_size);
2754 engine.enable_persistence(db_path)?;
2755
2756 match engine.load_from_database() {
2758 Ok(_) => {
2759 println!("Loaded existing engine from database");
2760 }
2761 Err(e) => {
2762 println!("Starting fresh engine (load failed: {e})");
2763 }
2764 }
2765
2766 Ok(engine)
2767 }
2768
2769 pub fn auto_save(&self) -> Result<(), Box<dyn std::error::Error>> {
2771 if self.database.is_some() {
2772 self.save_to_database()?;
2773 }
2774 Ok(())
2775 }
2776
2777 pub fn is_persistence_enabled(&self) -> bool {
2779 self.database.is_some()
2780 }
2781
2782 pub fn database_position_count(&self) -> Result<i64, Box<dyn std::error::Error>> {
2784 let db = self.database.as_ref().ok_or("Database not enabled")?;
2785 Ok(db.get_position_count()?)
2786 }
2787
2788 pub fn enable_tactical_search(&mut self, config: TacticalConfig) {
2790 self.tactical_search = Some(TacticalSearch::new(config));
2791 }
2792
2793 pub fn enable_tactical_search_default(&mut self) {
2795 self.tactical_search = Some(TacticalSearch::new_default());
2796 }
2797
2798 pub fn configure_hybrid_evaluation(&mut self, config: HybridConfig) {
2800 self.hybrid_config = config;
2801 }
2802
2803 pub fn is_tactical_search_enabled(&self) -> bool {
2805 self.tactical_search.is_some()
2806 }
2807
2808 pub fn enable_parallel_search(&mut self, num_threads: usize) {
2810 if let Some(ref mut tactical_search) = self.tactical_search {
2811 tactical_search.config.enable_parallel_search = true;
2812 tactical_search.config.num_threads = num_threads;
2813 println!("š§µ Parallel tactical search enabled with {num_threads} threads");
2814 }
2815 }
2816
2817 pub fn is_parallel_search_enabled(&self) -> bool {
2819 self.tactical_search
2820 .as_ref()
2821 .map(|ts| ts.config.enable_parallel_search)
2822 .unwrap_or(false)
2823 }
2824
2825 pub fn hybrid_config(&self) -> &HybridConfig {
2846 &self.hybrid_config
2847 }
2848
2849 pub fn is_opening_book_enabled(&self) -> bool {
2851 self.opening_book.is_some()
2852 }
2853
2854 pub fn self_play_training(
2856 &mut self,
2857 config: training::SelfPlayConfig,
2858 ) -> Result<usize, Box<dyn std::error::Error>> {
2859 let mut trainer = training::SelfPlayTrainer::new(config);
2860 let new_data = trainer.generate_training_data(self);
2861
2862 let positions_added = new_data.data.len();
2863
2864 for data in &new_data.data {
2866 self.add_position(&data.board, data.evaluation);
2867 }
2868
2869 if self.database.is_some() {
2871 match self.save_to_database() {
2872 Ok(_) => println!("š¾ Saved {positions_added} positions to database"),
2873 Err(_e) => println!("Loading complete"),
2874 }
2875 }
2876
2877 println!("š§ Self-play training complete: {positions_added} new positions learned");
2878 Ok(positions_added)
2879 }
2880
2881 pub fn continuous_self_play(
2883 &mut self,
2884 config: training::SelfPlayConfig,
2885 iterations: usize,
2886 save_path: Option<&str>,
2887 ) -> Result<usize, Box<dyn std::error::Error>> {
2888 let mut total_positions = 0;
2889 let mut trainer = training::SelfPlayTrainer::new(config.clone());
2890
2891 println!("š Starting continuous self-play training for {iterations} iterations...");
2892
2893 for iteration in 1..=iterations {
2894 println!("\n--- Self-Play Iteration {iteration}/{iterations} ---");
2895
2896 let new_data = trainer.generate_training_data(self);
2898 let batch_size = new_data.data.len();
2899
2900 for data in &new_data.data {
2902 self.add_position(&data.board, data.evaluation);
2903 }
2904
2905 total_positions += batch_size;
2906
2907 println!(
2908 "ā
Iteration {}: Added {} positions (total: {})",
2909 iteration,
2910 batch_size,
2911 self.knowledge_base_size()
2912 );
2913
2914 if iteration % 5 == 0 || iteration == iterations {
2916 if let Some(path) = save_path {
2918 match self.save_training_data_binary(path) {
2919 Ok(_) => println!("š¾ Progress saved to {path} (binary format)"),
2920 Err(_e) => println!("Loading complete"),
2921 }
2922 }
2923
2924 if self.database.is_some() {
2926 match self.save_to_database() {
2927 Ok(_) => println!(
2928 "š¾ Database synchronized ({} total positions)",
2929 self.knowledge_base_size()
2930 ),
2931 Err(_e) => println!("Loading complete"),
2932 }
2933 }
2934 }
2935
2936 if iteration % 10 == 0
2938 && self.knowledge_base_size() > 5000
2939 && self.manifold_learner.is_some()
2940 {
2941 println!("š§ Retraining manifold learning with new data...");
2942 let _ = self.train_manifold_learning(5);
2943 }
2944 }
2945
2946 println!("\nš Continuous self-play complete: {total_positions} total new positions");
2947 Ok(total_positions)
2948 }
2949
2950 pub fn adaptive_self_play(
2952 &mut self,
2953 base_config: training::SelfPlayConfig,
2954 target_strength: f32,
2955 ) -> Result<usize, Box<dyn std::error::Error>> {
2956 let mut current_config = base_config;
2957 let mut total_positions = 0;
2958 let mut iteration = 1;
2959
2960 println!(
2961 "šÆ Starting adaptive self-play training (target strength: {target_strength:.2})..."
2962 );
2963
2964 loop {
2965 println!("\n--- Adaptive Iteration {iteration} ---");
2966
2967 let positions_added = self.self_play_training(current_config.clone())?;
2969 total_positions += positions_added;
2970
2971 if self.database.is_some() {
2973 match self.save_to_database() {
2974 Ok(_) => println!("š¾ Adaptive training progress saved to database"),
2975 Err(_e) => println!("Loading complete"),
2976 }
2977 }
2978
2979 let current_strength = self.knowledge_base_size() as f32 / 10000.0; println!(
2983 "š Current strength estimate: {current_strength:.2} (target: {target_strength:.2})"
2984 );
2985
2986 if current_strength >= target_strength {
2987 println!("š Target strength reached!");
2988 break;
2989 }
2990
2991 current_config.exploration_factor *= 0.95; current_config.temperature *= 0.98; current_config.games_per_iteration =
2995 (current_config.games_per_iteration as f32 * 1.1) as usize; iteration += 1;
2998
2999 if iteration > 50 {
3000 println!("ā ļø Maximum iterations reached");
3001 break;
3002 }
3003 }
3004
3005 Ok(total_positions)
3006 }
3007}
3008
3009#[cfg(test)]
3010mod tests {
3011 use super::*;
3012 use chess::Board;
3013
3014 #[test]
3015 fn test_engine_creation() {
3016 let engine = ChessVectorEngine::new(1024);
3017 assert_eq!(engine.knowledge_base_size(), 0);
3018 }
3019
3020 #[test]
3021 fn test_add_and_search() {
3022 let mut engine = ChessVectorEngine::new(1024);
3023 let board = Board::default();
3024
3025 engine.add_position(&board, 0.0);
3026 assert_eq!(engine.knowledge_base_size(), 1);
3027
3028 let similar = engine.find_similar_positions(&board, 1);
3029 assert_eq!(similar.len(), 1);
3030 }
3031
3032 #[test]
3033 fn test_evaluation() {
3034 let mut engine = ChessVectorEngine::new(1024);
3035 let board = Board::default();
3036
3037 engine.add_position(&board, 0.5);
3039
3040 let evaluation = engine.evaluate_position(&board);
3041 assert!(evaluation.is_some());
3042 assert!((evaluation.unwrap() - 0.5).abs() < 1e-6);
3043 }
3044
3045 #[test]
3046 fn test_move_recommendations() {
3047 let mut engine = ChessVectorEngine::new(1024);
3048 let board = Board::default();
3049
3050 use chess::ChessMove;
3052 use std::str::FromStr;
3053 let mov = ChessMove::from_str("e2e4").unwrap();
3054 engine.add_position_with_move(&board, 0.0, Some(mov), Some(0.8));
3055
3056 let recommendations = engine.recommend_moves(&board, 3);
3057 assert!(!recommendations.is_empty());
3058
3059 let legal_recommendations = engine.recommend_legal_moves(&board, 3);
3061 assert!(!legal_recommendations.is_empty());
3062 }
3063
3064 #[test]
3065 fn test_empty_knowledge_base_fallback() {
3066 let mut engine = ChessVectorEngine::new(1024);
3068
3069 use std::str::FromStr;
3071 let board =
3072 Board::from_str("r1bqkbnr/pppp1ppp/2n5/4p3/4P3/5N2/PPPP1PPP/RNBQKB1R w KQkq - 0 1")
3073 .unwrap();
3074
3075 let recommendations = engine.recommend_moves(&board, 5);
3077 assert!(
3078 !recommendations.is_empty(),
3079 "recommend_moves should not return empty even with no training data"
3080 );
3081 assert_eq!(
3082 recommendations.len(),
3083 5,
3084 "Should return exactly 5 recommendations"
3085 );
3086
3087 for rec in &recommendations {
3089 assert!(rec.confidence > 0.0, "Confidence should be greater than 0");
3090 assert_eq!(
3091 rec.from_similar_position_count, 1,
3092 "Should have count of 1 for fallback"
3093 );
3094 assert_eq!(rec.average_outcome, 0.0, "Should have neutral outcome");
3095 }
3096
3097 let starting_board = Board::default();
3099 let starting_recommendations = engine.recommend_moves(&starting_board, 3);
3100 assert!(
3101 !starting_recommendations.is_empty(),
3102 "Should work for starting position too"
3103 );
3104
3105 use chess::MoveGen;
3107 let legal_moves: std::collections::HashSet<_> = MoveGen::new_legal(&board).collect();
3108 for rec in &recommendations {
3109 assert!(
3110 legal_moves.contains(&rec.chess_move),
3111 "All recommended moves should be legal"
3112 );
3113 }
3114 }
3115
3116 #[test]
3117 fn test_opening_book_integration() {
3118 let mut engine = ChessVectorEngine::new(1024);
3119
3120 engine.enable_opening_book();
3122 assert!(engine.opening_book.is_some());
3123
3124 let board = Board::default();
3126 assert!(engine.is_opening_position(&board));
3127
3128 let entry = engine.get_opening_entry(&board);
3129 assert!(entry.is_some());
3130
3131 let stats = engine.opening_book_stats();
3132 assert!(stats.is_some());
3133 assert!(stats.unwrap().total_positions > 0);
3134
3135 let recommendations = engine.recommend_moves(&board, 3);
3137 assert!(!recommendations.is_empty());
3138 assert!(recommendations[0].confidence > 0.7); }
3140
3141 #[test]
3142 fn test_manifold_learning_integration() {
3143 let mut engine = ChessVectorEngine::new(1024);
3144
3145 let board = Board::default();
3147 for i in 0..10 {
3148 engine.add_position(&board, i as f32 * 0.1);
3149 }
3150
3151 assert!(engine.enable_manifold_learning(8.0).is_ok());
3153
3154 let ratio = engine.manifold_compression_ratio();
3156 assert!(ratio.is_some());
3157 assert!((ratio.unwrap() - 8.0).abs() < 0.1);
3158
3159 assert!(engine.train_manifold_learning(5).is_ok());
3161
3162 let original_similar = engine.find_similar_positions(&board, 3);
3164 assert!(!original_similar.is_empty());
3165 }
3166
3167 #[test]
3168 fn test_lsh_integration() {
3169 let mut engine = ChessVectorEngine::new(1024);
3170
3171 let board = Board::default();
3173 for i in 0..50 {
3174 engine.add_position(&board, i as f32 * 0.02);
3175 }
3176
3177 engine.enable_lsh(4, 8);
3179
3180 let similar = engine.find_similar_positions(&board, 5);
3182 assert!(!similar.is_empty());
3183 assert!(similar.len() <= 5);
3184
3185 let eval = engine.evaluate_position(&board);
3187 assert!(eval.is_some());
3188 }
3189
3190 #[test]
3191 fn test_manifold_lsh_integration() {
3192 let mut engine = ChessVectorEngine::new(1024);
3193
3194 let board = Board::default();
3196 for i in 0..20 {
3197 engine.add_position(&board, i as f32 * 0.05);
3198 }
3199
3200 assert!(engine.enable_manifold_learning(8.0).is_ok());
3202 assert!(engine.train_manifold_learning(3).is_ok());
3203
3204 assert!(engine.enable_manifold_lsh(4, 8).is_ok());
3206
3207 let similar = engine.find_similar_positions(&board, 3);
3209 assert!(!similar.is_empty());
3210
3211 let _recommendations = engine.recommend_moves(&board, 2);
3213 }
3215
3216 #[test]
3241 fn test_position_with_move_storage() {
3242 let mut engine = ChessVectorEngine::new(1024);
3243 let board = Board::default();
3244
3245 use chess::ChessMove;
3246 use std::str::FromStr;
3247 let move1 = ChessMove::from_str("e2e4").unwrap();
3248 let move2 = ChessMove::from_str("d2d4").unwrap();
3249
3250 engine.add_position_with_move(&board, 0.0, Some(move1), Some(0.7));
3252 engine.add_position_with_move(&board, 0.1, Some(move2), Some(0.6));
3253
3254 assert_eq!(engine.position_moves.len(), 2);
3256
3257 let recommendations = engine.recommend_moves(&board, 5);
3259 let _move_strings: Vec<String> = recommendations
3260 .iter()
3261 .map(|r| r.chess_move.to_string())
3262 .collect();
3263
3264 assert!(!recommendations.is_empty());
3266 }
3267
3268 #[test]
3269 fn test_performance_regression_basic() {
3270 use std::time::Instant;
3271
3272 let mut engine = ChessVectorEngine::new(1024);
3273 let board = Board::default();
3274
3275 for i in 0..100 {
3277 engine.add_position(&board, i as f32 * 0.01);
3278 }
3279
3280 let start = Instant::now();
3282
3283 for _ in 0..100 {
3285 engine.add_position(&board, 0.0);
3286 }
3287
3288 let encoding_time = start.elapsed();
3289
3290 let start = Instant::now();
3292 for _ in 0..10 {
3293 engine.find_similar_positions(&board, 5);
3294 }
3295 let search_time = start.elapsed();
3296
3297 assert!(
3299 encoding_time.as_millis() < 10000,
3300 "Position encoding too slow: {}ms",
3301 encoding_time.as_millis()
3302 );
3303 assert!(
3304 search_time.as_millis() < 5000,
3305 "Search too slow: {}ms",
3306 search_time.as_millis()
3307 );
3308 }
3309
3310 #[test]
3311 fn test_memory_usage_reasonable() {
3312 let mut engine = ChessVectorEngine::new(1024);
3313 let board = Board::default();
3314
3315 let initial_size = engine.knowledge_base_size();
3317
3318 for i in 0..1000 {
3319 engine.add_position(&board, i as f32 * 0.001);
3320 }
3321
3322 let final_size = engine.knowledge_base_size();
3323 assert_eq!(final_size, initial_size + 1000);
3324
3325 assert!(final_size > initial_size);
3327 }
3328
3329 #[test]
3330 fn test_incremental_training() {
3331 use std::str::FromStr;
3332
3333 let mut engine = ChessVectorEngine::new(1024);
3334 let board1 = Board::default();
3335 let board2 =
3336 Board::from_str("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq - 0 1").unwrap();
3337
3338 engine.add_position(&board1, 0.0);
3340 engine.add_position(&board2, 0.2);
3341 assert_eq!(engine.knowledge_base_size(), 2);
3342
3343 let mut dataset = crate::training::TrainingDataset::new();
3345 dataset.add_position(board1, 0.1, 15, 1); dataset.add_position(
3347 Board::from_str("rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2")
3348 .unwrap(),
3349 0.3,
3350 15,
3351 2,
3352 ); engine.train_from_dataset_incremental(&dataset);
3356
3357 assert_eq!(engine.knowledge_base_size(), 3);
3359
3360 let stats = engine.training_stats();
3362 assert_eq!(stats.total_positions, 3);
3363 assert_eq!(stats.unique_positions, 3);
3364 assert!(!stats.has_move_data); }
3366
3367 #[test]
3368 fn test_save_load_incremental() {
3369 use std::str::FromStr;
3370 use tempfile::tempdir;
3371
3372 let temp_dir = tempdir().unwrap();
3373 let file_path = temp_dir.path().join("test_training.json");
3374
3375 let mut engine1 = ChessVectorEngine::new(1024);
3377 engine1.add_position(&Board::default(), 0.0);
3378 engine1.add_position(
3379 &Board::from_str("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq - 0 1").unwrap(),
3380 0.2,
3381 );
3382
3383 engine1.save_training_data(&file_path).unwrap();
3385
3386 let mut engine2 = ChessVectorEngine::new(1024);
3388 engine2.add_position(
3389 &Board::from_str("rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2")
3390 .unwrap(),
3391 0.3,
3392 );
3393 assert_eq!(engine2.knowledge_base_size(), 1);
3394
3395 engine2.load_training_data_incremental(&file_path).unwrap();
3397
3398 assert_eq!(engine2.knowledge_base_size(), 3);
3400 }
3401
3402 #[test]
3403 fn test_training_stats() {
3404 use std::str::FromStr;
3405
3406 let mut engine = ChessVectorEngine::new(1024);
3407
3408 let stats = engine.training_stats();
3410 assert_eq!(stats.total_positions, 0);
3411 assert_eq!(stats.unique_positions, 0);
3412 assert!(!stats.has_move_data);
3413 assert!(!stats.lsh_enabled);
3414 assert!(!stats.manifold_enabled);
3415 assert!(!stats.opening_book_enabled);
3416
3417 engine.add_position(&Board::default(), 0.0);
3419 engine.add_position_with_move(
3420 &Board::default(),
3421 0.1,
3422 Some(ChessMove::from_str("e2e4").unwrap()),
3423 Some(0.8),
3424 );
3425
3426 engine.enable_opening_book();
3428 engine.enable_lsh(4, 8);
3429
3430 let stats = engine.training_stats();
3431 assert_eq!(stats.total_positions, 2);
3432 assert!(stats.has_move_data);
3433 assert!(stats.move_data_entries > 0);
3434 assert!(stats.lsh_enabled);
3435 assert!(stats.opening_book_enabled);
3436 }
3437
3438 #[test]
3439 fn test_tactical_search_integration() {
3440 let mut engine = ChessVectorEngine::new(1024);
3441 let board = Board::default();
3442
3443 assert!(!engine.is_tactical_search_enabled());
3445
3446 engine.enable_tactical_search_default();
3448 assert!(engine.is_tactical_search_enabled());
3449
3450 let evaluation = engine.evaluate_position(&board);
3452 assert!(evaluation.is_some());
3453
3454 engine.add_position(&board, 0.5);
3456 let hybrid_evaluation = engine.evaluate_position(&board);
3457 assert!(hybrid_evaluation.is_some());
3458 }
3459
3460 #[test]
3461 fn test_hybrid_evaluation_configuration() {
3462 let mut engine = ChessVectorEngine::new(1024);
3463 let board = Board::default();
3464
3465 engine.enable_tactical_search_default();
3467
3468 let custom_config = HybridConfig {
3470 pattern_confidence_threshold: 0.9, enable_tactical_refinement: true,
3472 tactical_config: TacticalConfig::default(),
3473 pattern_weight: 0.8,
3474 min_similar_positions: 5,
3475 };
3476
3477 engine.configure_hybrid_evaluation(custom_config);
3478
3479 engine.add_position(&board, 0.3);
3481
3482 let evaluation = engine.evaluate_position(&board);
3483 assert!(evaluation.is_some());
3484
3485 let no_tactical_config = HybridConfig {
3487 enable_tactical_refinement: false,
3488 ..HybridConfig::default()
3489 };
3490
3491 engine.configure_hybrid_evaluation(no_tactical_config);
3492
3493 let pattern_only_evaluation = engine.evaluate_position(&board);
3494 assert!(pattern_only_evaluation.is_some());
3495 }
3496}