1use rand::prelude::*;
2use std::fs;
3use std::io::{self, Write};
4use std::thread;
5use std::time::Duration;
6use vexus::{NeuralNetwork, Sigmoid};
7
8const MODEL_FILE: &str = "tictactoe_nn.json";
9const CLEAR_SCREEN: &str = "\x1B[2J\x1B[1;1H";
10const RED: &str = "\x1B[31m";
11const GREEN: &str = "\x1B[32m";
12const YELLOW: &str = "\x1B[33m";
13const BLUE: &str = "\x1B[34m";
14const PURPLE: &str = "\x1B[35m";
15const CYAN: &str = "\x1B[36m";
16const BOLD: &str = "\x1B[1m";
17const RESET: &str = "\x1B[0m";
18const MODEL_X_FILE: &str = "tictactoe_x_model.json";
19const MODEL_O_FILE: &str = "tictactoe_o_model.json";
20
21#[derive(Clone, Copy, Debug, PartialEq, Eq)]
22enum Cell {
23 Empty,
24 X,
25 O,
26}
27
28impl Cell {
29 fn to_colored_string(&self) -> String {
30 match self {
31 Cell::Empty => " ".to_string(),
32 Cell::X => format!("{}{}{}", GREEN, "X", RESET),
33 Cell::O => format!("{}{}{}", RED, "O", RESET),
34 }
35 }
36
37 fn opponent(&self) -> Self {
38 match self {
39 Cell::X => Cell::O,
40 Cell::O => Cell::X,
41 Cell::Empty => Cell::Empty,
42 }
43 }
44}
45
46#[derive(Clone, Copy, Debug, PartialEq, Eq)]
47enum GameState {
48 InProgress,
49 XWins,
50 OWins,
51 Draw,
52}
53
54#[derive(Clone, Copy, Debug, PartialEq, Eq)]
55enum Player {
56 Human,
57 AI,
58}
59
60#[derive(Clone)]
61struct Board {
62 cells: [Cell; 9],
63}
64
65impl Board {
66 fn new() -> Self {
67 Self {
68 cells: [Cell::Empty; 9],
69 }
70 }
71
72 fn make_move(&mut self, index: usize, cell: Cell) -> bool {
73 if index < 9 && self.cells[index] == Cell::Empty {
74 self.cells[index] = cell;
75 true
76 } else {
77 false
78 }
79 }
80
81 fn is_full(&self) -> bool {
82 self.cells.iter().all(|&cell| cell != Cell::Empty)
83 }
84
85 fn get_empty_cells(&self) -> Vec<usize> {
86 self.cells
87 .iter()
88 .enumerate()
89 .filter_map(|(i, &cell)| if cell == Cell::Empty { Some(i) } else { None })
90 .collect()
91 }
92
93 fn check_winner(&self) -> GameState {
94 for i in 0..3 {
95 if self.cells[i * 3] != Cell::Empty
96 && self.cells[i * 3] == self.cells[i * 3 + 1]
97 && self.cells[i * 3] == self.cells[i * 3 + 2]
98 {
99 return match self.cells[i * 3] {
100 Cell::X => GameState::XWins,
101 Cell::O => GameState::OWins,
102 _ => unreachable!(),
103 };
104 }
105 }
106
107 for i in 0..3 {
108 if self.cells[i] != Cell::Empty
109 && self.cells[i] == self.cells[i + 3]
110 && self.cells[i] == self.cells[i + 6]
111 {
112 return match self.cells[i] {
113 Cell::X => GameState::XWins,
114 Cell::O => GameState::OWins,
115 _ => unreachable!(),
116 };
117 }
118 }
119
120 if self.cells[0] != Cell::Empty
121 && self.cells[0] == self.cells[4]
122 && self.cells[0] == self.cells[8]
123 {
124 return match self.cells[0] {
125 Cell::X => GameState::XWins,
126 Cell::O => GameState::OWins,
127 _ => unreachable!(),
128 };
129 }
130
131 if self.cells[2] != Cell::Empty
132 && self.cells[2] == self.cells[4]
133 && self.cells[2] == self.cells[6]
134 {
135 return match self.cells[2] {
136 Cell::X => GameState::XWins,
137 Cell::O => GameState::OWins,
138 _ => unreachable!(),
139 };
140 }
141
142 if self.is_full() {
143 return GameState::Draw;
144 }
145
146 GameState::InProgress
147 }
148
149 fn to_nn_input(&self, player_symbol: Cell) -> Vec<f32> {
150 let mut input = Vec::with_capacity(18);
151
152 for &cell in &self.cells {
153 input.push(if cell == player_symbol { 1.0 } else { 0.0 });
154 }
155
156 let opponent_symbol = player_symbol.opponent();
157 for &cell in &self.cells {
158 input.push(if cell == opponent_symbol { 1.0 } else { 0.0 });
159 }
160
161 input
162 }
163}
164
165struct NeuralNetworkAI {
166 nn: NeuralNetwork,
167 training_data: Vec<(Vec<f32>, Vec<f32>)>,
168 symbol: Cell,
169}
170
171impl NeuralNetworkAI {
172 fn new(symbol: Cell) -> Self {
173 let nn = NeuralNetwork::new(vec![18, 36, 36, 36, 9], 0.01, Box::new(Sigmoid));
174
175 Self {
176 nn,
177 training_data: Vec::new(),
178 symbol,
179 }
180 }
181
182 fn get_move(&mut self, board: &Board) -> usize {
183 let empty_cells = board.get_empty_cells();
184
185 if empty_cells.len() == 1 {
186 return empty_cells[0];
187 }
188
189 let input = board.to_nn_input(self.symbol);
190
191 let output = self.nn.forward(&input);
192
193 let mut valid_moves: Vec<(usize, f32)> =
194 empty_cells.iter().map(|&idx| (idx, output[idx])).collect();
195
196 valid_moves.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
197
198 let mut rng = rand::rng();
199 if rng.random::<f32>() < 0.1 {
200 *empty_cells.choose(&mut rng).unwrap()
201 } else {
202 valid_moves[0].0
203 }
204 }
205
206 fn record_game(&mut self, board_states: &Vec<(Board, usize)>, winner: GameState) {
207 for (board, move_idx) in board_states {
208 let current_player =
209 if board.cells.iter().filter(|&&c| c != Cell::Empty).count() % 2 == 0 {
210 Cell::X
211 } else {
212 Cell::O
213 };
214
215 if current_player != self.symbol {
216 continue;
217 }
218
219 let input = board.to_nn_input(self.symbol);
220
221 let mut target = vec![0.0; 9];
222
223 let move_value = match winner {
224 GameState::XWins => {
225 if self.symbol == Cell::X {
226 1.0
227 } else {
228 0.0
229 }
230 }
231 GameState::OWins => {
232 if self.symbol == Cell::O {
233 1.0
234 } else {
235 0.0
236 }
237 }
238 GameState::Draw => 0.5,
239 _ => 0.1,
240 };
241
242 target[*move_idx] = move_value;
243
244 self.training_data.push((input, target));
245 }
246 }
247
248 fn train_incremental(&mut self, epochs: usize) {
249 if self.training_data.len() < 3 {
250 return;
251 }
252
253 let mut rng = rand::rng();
254 let data_size = self.training_data.len();
255 let mut indices: Vec<usize> = (0..data_size).collect();
256 indices.shuffle(&mut rng);
257
258 let training_size = data_size.min(50);
259 let indices = indices[0..training_size].to_vec();
260
261 for _ in 0..epochs {
262 for &idx in &indices {
263 let (input, target) = &self.training_data[idx];
264
265 let _outputs = self.nn.forward(input);
266
267 self.nn.backpropagate(target);
268 }
269 }
270
271 if self.training_data.len() > 1000 {
272 self.training_data = self.training_data[self.training_data.len() - 1000..].to_vec();
273 }
274 }
275}
276
277struct TicTacToe {
278 board: Board,
279 player_symbol: Cell,
280 ai_symbol: Cell,
281 current_turn: Player,
282 ai: NeuralNetworkAI,
283 games_played: usize,
284 player_wins: usize,
285 ai_wins: usize,
286 draws: usize,
287}
288
289impl TicTacToe {
290 fn new() -> Self {
291 Self {
292 board: Board::new(),
293 player_symbol: Cell::X,
294 ai_symbol: Cell::O,
295 current_turn: Player::Human,
296 ai: NeuralNetworkAI::new(Cell::O),
297 games_played: 0,
298 player_wins: 0,
299 ai_wins: 0,
300 draws: 0,
301 }
302 }
303
304 fn reset_game(&mut self) {
305 self.board = Board::new();
306
307 if self.player_symbol == Cell::X {
308 self.current_turn = Player::Human;
309 } else {
310 self.current_turn = Player::AI;
311 }
312
313 self.ai.symbol = self.ai_symbol;
314 }
315
316 fn display_board(&self) {
317 clear_screen();
318 println!(
319 "{}{}TIC TAC TOE WITH NEURAL NETWORK{}{}",
320 BOLD, BLUE, RESET, YELLOW
321 );
322 println!("----------------------------------");
323
324 if self.player_symbol == Cell::X {
325 println!("You: {}X{} | AI: {}O{}", GREEN, RESET, RED, RESET);
326 } else {
327 println!("You: {}O{} | AI: {}X{}", RED, RESET, GREEN, RESET);
328 }
329
330 println!(
331 "\nCurrent turn: {}",
332 if self.current_turn == Player::Human {
333 format!("{}YOUR TURN{}", GREEN, RESET)
334 } else {
335 format!("{}AI'S TURN{}", RED, RESET)
336 }
337 );
338
339 println!(
340 "\nStats: You: {} | AI: {} | Draws: {}",
341 self.player_wins, self.ai_wins, self.draws
342 );
343
344 println!("\n");
345 println!(
346 " {} | {} | {} ",
347 self.board.cells[0].to_colored_string(),
348 self.board.cells[1].to_colored_string(),
349 self.board.cells[2].to_colored_string()
350 );
351 println!("-----------");
352 println!(
353 " {} | {} | {} ",
354 self.board.cells[3].to_colored_string(),
355 self.board.cells[4].to_colored_string(),
356 self.board.cells[5].to_colored_string()
357 );
358 println!("-----------");
359 println!(
360 " {} | {} | {} ",
361 self.board.cells[6].to_colored_string(),
362 self.board.cells[7].to_colored_string(),
363 self.board.cells[8].to_colored_string()
364 );
365 println!();
366
367 println!("{}Board positions:{}", CYAN, RESET);
368 println!(" 1 | 2 | 3 ");
369 println!("-----------");
370 println!(" 4 | 5 | 6 ");
371 println!("-----------");
372 println!(" 7 | 8 | 9 ");
373 println!();
374 }
375
376 fn make_player_move(&mut self, position: usize) -> bool {
377 if position < 1 || position > 9 {
378 return false;
379 }
380
381 let index = position - 1;
382 self.board.make_move(index, self.player_symbol)
383 }
384 fn train_specialized_ai(&mut self) {
385 clear_screen();
386 println!(
387 "{}{}SPECIALIZED AI TRAINING{}{}",
388 BOLD, PURPLE, RESET, YELLOW
389 );
390 println!("------------------------");
391 println!("This mode creates two separate specialized neural networks:");
392 println!("1. X-AI: Specialized in playing as the first player (X)");
393 println!("2. O-AI: Specialized in playing as the second player (O)");
394 println!();
395 println!("Each AI will be trained only for its specific role,");
396 println!("allowing them to develop more focused strategies.");
397 println!();
398 println!("How many games should be played?");
399 println!("(Recommended: 1000-5000 games)");
400 println!();
401 print!("Enter number of games (or 0 to cancel): ");
402 io::stdout().flush().unwrap();
403
404 let mut input = String::new();
405 io::stdin().read_line(&mut input).unwrap();
406 let input = input.trim();
407
408 if let Ok(num_games) = input.parse::<usize>() {
409 if num_games > 0 {
410 clear_screen();
411 println!(
412 "{}{}SPECIALIZED TRAINING IN PROGRESS{}\n",
413 BOLD, PURPLE, RESET
414 );
415
416 let mut ai_x = NeuralNetworkAI::new(Cell::X);
417 let mut ai_o = NeuralNetworkAI::new(Cell::O);
418
419 let mut x_wins = 0;
420 let mut o_wins = 0;
421 let mut draws = 0;
422
423 println!("\nTraining for {} games...", num_games);
424
425 for game in 1..=num_games {
426 if game % 100 == 0 {
427 println!("Completed {} games...", game);
428 } else if game % 10 == 0 {
429 print!(".");
430 io::stdout().flush().unwrap();
431 }
432
433 let mut board = Board::new();
434 let mut game_history: Vec<(Board, usize)> = Vec::new();
435 let mut current_player = Cell::X;
436
437 while board.check_winner() == GameState::InProgress {
438 let board_before = board.clone();
439
440 let ai_move = if current_player == Cell::X {
441 ai_x.symbol = Cell::X;
442 ai_x.get_move(&board)
443 } else {
444 ai_o.symbol = Cell::O;
445 ai_o.get_move(&board)
446 };
447
448 board.make_move(ai_move, current_player);
449
450 game_history.push((board_before, ai_move));
451
452 current_player = current_player.opponent();
453 }
454
455 let result = board.check_winner();
456
457 match result {
458 GameState::XWins => x_wins += 1,
459 GameState::OWins => o_wins += 1,
460 GameState::Draw => draws += 1,
461 _ => {}
462 }
463
464 ai_x.symbol = Cell::X;
465 ai_x.record_game(&game_history, result);
466
467 ai_o.symbol = Cell::O;
468 ai_o.record_game(&game_history, result);
469
470 if game % 100 == 0 || game == num_games {
471 println!("\nTraining X-AI...");
472 ai_x.train_incremental(5);
473
474 println!("Training O-AI...");
475 ai_o.train_incremental(5);
476 }
477 }
478
479 println!("\n{}Training complete!{}", GREEN, RESET);
480 println!("Games played: {}", num_games);
481 println!(
482 "X wins: {} ({:.1}%)",
483 x_wins,
484 (x_wins as f32 / num_games as f32) * 100.0
485 );
486 println!(
487 "O wins: {} ({:.1}%)",
488 o_wins,
489 (o_wins as f32 / num_games as f32) * 100.0
490 );
491 println!(
492 "Draws: {} ({:.1}%)",
493 draws,
494 (draws as f32 / num_games as f32) * 100.0
495 );
496
497 println!("\nWhich specialized model would you like to use as default?");
498 println!("{}[1]{} X Model (First Player)", GREEN, RESET);
499 println!("{}[2]{} O Model (Second Player)", RED, RESET);
500 println!("{}[3]{} Keep Current Model", YELLOW, RESET);
501 print!("Enter your choice: ");
502 io::stdout().flush().unwrap();
503
504 let mut input = String::new();
505 io::stdin().read_line(&mut input).unwrap();
506
507 match input.trim() {
508 "1" => {
509 self.ai.nn = ai_x.nn.clone();
510 }
511 "2" => {
512 self.ai.nn = ai_o.nn.clone();
513 }
514 _ => {
515 println!("Keeping current model as default.");
516 }
517 }
518
519 println!("\nPress Enter to continue...");
520 io::stdout().flush().unwrap();
521 let mut input = String::new();
522 io::stdin().read_line(&mut input).unwrap();
523 }
524 } else {
525 println!("{}Invalid input. Training cancelled.{}", RED, RESET);
526 thread::sleep(Duration::from_secs(1));
527 }
528 }
529
530 fn make_ai_move(&mut self) {
531 thread::sleep(Duration::from_millis(500));
532
533 let move_index = self.ai.get_move(&self.board);
534
535 self.board.make_move(move_index, self.ai_symbol);
536 }
537
538 fn display_game_over(&self, state: GameState) {
539 self.display_board();
540
541 println!("{}GAME OVER{}", BOLD, RESET);
542
543 match state {
544 GameState::XWins => {
545 if self.player_symbol == Cell::X {
546 println!("{}You win! Congratulations!{}", GREEN, RESET);
547 } else {
548 println!("{}AI wins!{}", RED, RESET);
549 }
550 }
551 GameState::OWins => {
552 if self.player_symbol == Cell::O {
553 println!("{}You win! Congratulations!{}", GREEN, RESET);
554 } else {
555 println!("{}AI wins!{}", RED, RESET);
556 }
557 }
558 GameState::Draw => {
559 println!("{}It's a draw!{}", YELLOW, RESET);
560 }
561 _ => {}
562 }
563
564 println!("\nPress Enter to continue...");
565 io::stdout().flush().unwrap();
566 let mut input = String::new();
567 io::stdin().read_line(&mut input).unwrap();
568 }
569
570 fn play_game(&mut self) {
571 self.reset_game();
572
573 let mut game_history: Vec<(Board, usize)> = Vec::new();
574
575 loop {
576 self.display_board();
577
578 let state = self.board.check_winner();
579 if state != GameState::InProgress {
580 match state {
581 GameState::XWins => {
582 if self.player_symbol == Cell::X {
583 self.player_wins += 1;
584 } else {
585 self.ai_wins += 1;
586 }
587 }
588 GameState::OWins => {
589 if self.player_symbol == Cell::O {
590 self.player_wins += 1;
591 } else {
592 self.ai_wins += 1;
593 }
594 }
595 GameState::Draw => {
596 self.draws += 1;
597 }
598 _ => {}
599 }
600
601 let original_symbol = self.ai.symbol;
602 self.ai.symbol = self.ai_symbol;
603 self.ai.record_game(&game_history, state);
604
605 println!("\n{}AI is learning from this game...{}", PURPLE, RESET);
606 io::stdout().flush().unwrap();
607
608 self.ai.train_incremental(5);
609
610 self.ai.symbol = original_symbol;
611
612 self.games_played += 1;
613 self.display_game_over(state);
614
615 break;
616 }
617
618 if self.current_turn == Player::Human {
619 println!(
620 "{}Your turn! Enter position (1-9) or 'q' to quit: {}",
621 GREEN, RESET
622 );
623 io::stdout().flush().unwrap();
624
625 let mut input = String::new();
626 io::stdin().read_line(&mut input).unwrap();
627 let input = input.trim();
628
629 if input.eq_ignore_ascii_case("q") {
630 break;
631 }
632
633 if let Ok(position) = input.parse::<usize>() {
634 let board_before = self.board.clone();
635
636 if self.make_player_move(position) {
637 game_history.push((board_before, position - 1));
638
639 self.current_turn = Player::AI;
640 } else {
641 println!("{}Invalid move! Try again.{}", RED, RESET);
642 thread::sleep(Duration::from_secs(1));
643 }
644 } else {
645 println!("{}Invalid input! Try again.{}", RED, RESET);
646 thread::sleep(Duration::from_secs(1));
647 }
648 } else {
649 let board_before = self.board.clone();
650 self.make_ai_move();
651
652 let ai_move_idx = self
653 .board
654 .cells
655 .iter()
656 .enumerate()
657 .find(|(i, cell)| {
658 **cell == self.ai_symbol && board_before.cells[*i] == Cell::Empty
659 })
660 .map(|(i, _)| i)
661 .unwrap_or(0);
662
663 game_history.push((board_before, ai_move_idx));
664
665 self.current_turn = Player::Human;
666 }
667 }
668 }
669
670 fn display_main_menu(&mut self) {
671 clear_screen();
672 println!(
673 "{}{}TIC TAC TOE WITH NEURAL NETWORK{}{}",
674 BOLD, BLUE, RESET, YELLOW
675 );
676 println!("----------------------------------");
677 println!(
678 "{}Welcome to Tic Tac Toe with a Neural Network AI!{}",
679 YELLOW, RESET
680 );
681 println!("The AI learns from playing against itself and you!");
682 println!();
683 println!("{}[1]{} Play as X (First Move)", GREEN, RESET);
684 println!("{}[2]{} Play as O (Second Move)", RED, RESET);
685 println!("{}[3]{} Train AI (Competitive)", YELLOW, RESET);
686 println!("{}[4]{} Train Specialized AIs (X & O)", PURPLE, RESET);
687 println!("{}[5]{} Watch AI vs AI Game", CYAN, RESET);
688 println!("{}[6]{} View Stats", BLUE, RESET);
689 println!("{}[7]{} Exit", RED, RESET);
690 println!();
691 print!("Enter your choice: ");
692 io::stdout().flush().unwrap();
693 }
694
695 fn train_ai(&mut self) {
696 clear_screen();
697 println!("{}{}AI TRAINING{}{}", BOLD, PURPLE, RESET, YELLOW);
698 println!("------------------------");
699 println!("This method creates two separate AIs that compete against each other:");
700 println!("1. Both AIs start with the current neural network");
701 println!("2. They play a batch of games against each other");
702 println!("3. The winner becomes the new saved neural network");
703 println!();
704 println!("How many games should be played in each batch?");
705 println!("(Recommended: 100-1000 games per batch)");
706 println!();
707 print!("Enter number of games per batch (or 0 to cancel): ");
708 io::stdout().flush().unwrap();
709
710 let mut input = String::new();
711 io::stdin().read_line(&mut input).unwrap();
712 let input = input.trim();
713
714 if let Ok(games_per_batch) = input.parse::<usize>() {
715 if games_per_batch > 0 {
716 println!("How many batches do you want to run?");
717 print!("Enter number of batches: ");
718 io::stdout().flush().unwrap();
719
720 let mut input = String::new();
721 io::stdin().read_line(&mut input).unwrap();
722 let input = input.trim();
723
724 if let Ok(num_batches) = input.parse::<usize>() {
725 if num_batches > 0 {
726 clear_screen();
727 println!(
728 "{}{}COMPETITIVE TRAINING IN PROGRESS{}\n",
729 BOLD, PURPLE, RESET
730 );
731
732 let mut ai_1 = NeuralNetworkAI::new(Cell::X);
733 let mut ai_2 = NeuralNetworkAI::new(Cell::O);
734
735 ai_1.nn = self.ai.nn.clone();
736 ai_2.nn = self.ai.nn.clone();
737
738 for batch in 1..=num_batches {
739 println!("Running batch {}/{}...", batch, num_batches);
740
741 let mut ai_1_wins = 0;
742 let mut ai_2_wins = 0;
743 let mut draws = 0;
744
745 for game in 1..=games_per_batch {
746 if game % 20 == 0 {
747 print!(".");
748 io::stdout().flush().unwrap();
749 }
750
751 let (a1_symbol, _) = if game % 2 == 0 {
752 (Cell::X, Cell::O)
753 } else {
754 (Cell::O, Cell::X)
755 };
756
757 let mut board = Board::new();
758 let mut current_player = Cell::X;
759
760 while board.check_winner() == GameState::InProgress {
761 let ai_move = if current_player == a1_symbol {
762 ai_1.symbol = current_player;
763 ai_1.get_move(&board)
764 } else {
765 ai_2.symbol = current_player;
766 ai_2.get_move(&board)
767 };
768
769 board.make_move(ai_move, current_player);
770
771 current_player = current_player.opponent();
772 }
773
774 match board.check_winner() {
775 GameState::XWins => {
776 if a1_symbol == Cell::X {
777 ai_1_wins += 1;
778 } else {
779 ai_2_wins += 1;
780 }
781 }
782 GameState::OWins => {
783 if a1_symbol == Cell::O {
784 ai_1_wins += 1;
785 } else {
786 ai_2_wins += 1;
787 }
788 }
789 GameState::Draw => {
790 draws += 1;
791 }
792 _ => {}
793 }
794 }
795
796 println!("\nBatch {} results:", batch);
797 println!(" AI-1 wins: {}", ai_1_wins);
798 println!(" AI-2 wins: {}", ai_2_wins);
799 println!(" Draws: {}", draws);
800
801 if ai_1_wins > ai_2_wins {
802 println!(" {}AI-1 performed better!{}", GREEN, RESET);
803 self.ai.nn = ai_1.nn.clone();
804 ai_2.nn = ai_1.nn.clone();
805 } else if ai_2_wins > ai_1_wins {
806 println!(" {}AI-2 performed better!{}", GREEN, RESET);
807 self.ai.nn = ai_2.nn.clone();
808 ai_1.nn = ai_2.nn.clone();
809 } else {
810 println!(" {}It's a tie! No clear winner.{}", YELLOW, RESET);
811 }
812
813 let mut rng = rand::rng();
814
815 if ai_1_wins <= ai_2_wins {
816 println!(" Training AI-1 for next batch...");
817 let mut board = Board::new();
818 let mut game_history: Vec<(Board, usize)> = Vec::new();
819 let mut current_player = Cell::X;
820
821 while board.check_winner() == GameState::InProgress {
822 let board_before = board.clone();
823 let empty_cells = board.get_empty_cells();
824 let random_move = *empty_cells.choose(&mut rng).unwrap();
825
826 board.make_move(random_move, current_player);
827 game_history.push((board_before, random_move));
828 current_player = current_player.opponent();
829 }
830
831 let result = board.check_winner();
832
833 ai_1.symbol = Cell::X;
834 ai_1.record_game(&game_history, result);
835 ai_1.train_incremental(3);
836
837 ai_1.symbol = Cell::O;
838 ai_1.record_game(&game_history, result);
839 ai_1.train_incremental(3);
840 }
841
842 if ai_2_wins <= ai_1_wins {
843 println!(" Training AI-2 for next batch...");
844 let mut board = Board::new();
845 let mut game_history: Vec<(Board, usize)> = Vec::new();
846 let mut current_player = Cell::X;
847
848 while board.check_winner() == GameState::InProgress {
849 let board_before = board.clone();
850 let empty_cells = board.get_empty_cells();
851 let random_move = *empty_cells.choose(&mut rng).unwrap();
852
853 board.make_move(random_move, current_player);
854 game_history.push((board_before, random_move));
855 current_player = current_player.opponent();
856 }
857
858 let result = board.check_winner();
859
860 ai_2.symbol = Cell::X;
861 ai_2.record_game(&game_history, result);
862 ai_2.train_incremental(3);
863
864 ai_2.symbol = Cell::O;
865 ai_2.record_game(&game_history, result);
866 ai_2.train_incremental(3);
867 }
868 }
869
870 self.ai.symbol = self.ai_symbol;
871
872 println!("\n{}Competitive training complete!{}", GREEN, RESET);
873 println!("The neural network has improved through competition.");
874 println!("\nPress Enter to continue...");
875 io::stdout().flush().unwrap();
876 let mut input = String::new();
877 io::stdin().read_line(&mut input).unwrap();
878 }
879 } else {
880 println!("{}Invalid input. Training cancelled.{}", RED, RESET);
881 thread::sleep(Duration::from_secs(1));
882 }
883 }
884 } else {
885 println!("{}Invalid input. Training cancelled.{}", RED, RESET);
886 thread::sleep(Duration::from_secs(1));
887 }
888 }
889
890 fn watch_ai_vs_ai(&mut self) {
891 clear_screen();
892 println!("{}{}WATCH AI VS AI GAME{}{}", BOLD, CYAN, RESET, YELLOW);
893 println!("-------------------");
894 println!("Watch the Neural Network play against itself!");
895 println!("Press Enter after each move to continue, or 'q' to quit anytime.");
896 println!();
897
898 let mut board = Board::new();
899 let mut game_history: Vec<(Board, usize)> = Vec::new();
900 let mut current_player = Cell::X;
901
902 let mut ai_x = NeuralNetworkAI::new(Cell::X);
903 let mut ai_o = NeuralNetworkAI::new(Cell::O);
904
905 ai_x.nn = self.ai.nn.clone();
906 ai_o.nn = self.ai.nn.clone();
907
908 display_spectator_board(&board, current_player);
909
910 println!("\nPress Enter to start the game...");
911 io::stdout().flush().unwrap();
912 let mut input = String::new();
913 io::stdin().read_line(&mut input).unwrap();
914
915 let mut move_count = 0;
916
917 while board.check_winner() == GameState::InProgress {
918 move_count += 1;
919
920 let board_before = board.clone();
921
922 let ai_move = if current_player == Cell::X {
923 ai_x.get_move(&board)
924 } else {
925 ai_o.get_move(&board)
926 };
927
928 board.make_move(ai_move, current_player);
929
930 game_history.push((board_before, ai_move));
931
932 display_spectator_board(&board, current_player.opponent());
933
934 println!(
935 "Move #{}: {} placed at position {}",
936 move_count,
937 if current_player == Cell::X {
938 "X (Green)"
939 } else {
940 "O (Red)"
941 },
942 ai_move + 1
943 );
944
945 let state = board.check_winner();
946 if state != GameState::InProgress {
947 println!("\n{}GAME OVER{}", BOLD, RESET);
948 match state {
949 GameState::XWins => println!("{}X (Green) wins!{}", GREEN, RESET),
950 GameState::OWins => println!("{}O (Red) wins!{}", RED, RESET),
951 GameState::Draw => println!("{}It's a draw!{}", YELLOW, RESET),
952 _ => {}
953 }
954
955 println!("\n{}AI is learning from this game...{}", PURPLE, RESET);
956 io::stdout().flush().unwrap();
957
958 self.ai.symbol = Cell::X;
959 self.ai.record_game(&game_history, state);
960 self.ai.train_incremental(5);
961
962 self.ai.symbol = Cell::O;
963 self.ai.record_game(&game_history, state);
964 self.ai.train_incremental(5);
965
966 self.ai.symbol = self.ai_symbol;
967
968 println!("{}Learning complete!{}", GREEN, RESET);
969
970 println!("\nPress Enter to return to the main menu...");
971 io::stdout().flush().unwrap();
972 let mut input = String::new();
973 io::stdin().read_line(&mut input).unwrap();
974 break;
975 }
976
977 current_player = current_player.opponent();
978
979 thread::sleep(Duration::from_secs(1));
980 }
981 }
982
983 fn display_stats(&self) {
984 clear_screen();
985 println!("{}{}GAME STATISTICS{}{}", BOLD, BLUE, RESET, YELLOW);
986 println!("----------------");
987 println!("Total games played: {}", self.games_played);
988 println!(
989 "Player wins: {} ({:.1}%)",
990 self.player_wins,
991 if self.games_played > 0 {
992 (self.player_wins as f32 / self.games_played as f32) * 100.0
993 } else {
994 0.0
995 }
996 );
997 println!(
998 "AI wins: {} ({:.1}%)",
999 self.ai_wins,
1000 if self.games_played > 0 {
1001 (self.ai_wins as f32 / self.games_played as f32) * 100.0
1002 } else {
1003 0.0
1004 }
1005 );
1006 println!(
1007 "Draws: {} ({:.1}%)",
1008 self.draws,
1009 if self.games_played > 0 {
1010 (self.draws as f32 / self.games_played as f32) * 100.0
1011 } else {
1012 0.0
1013 }
1014 );
1015
1016 if let Ok(metadata) = fs::metadata(MODEL_FILE) {
1017 println!("\nNeural Network Model:");
1018 println!(" File: {}", MODEL_FILE);
1019 println!(" Size: {} bytes", metadata.len());
1020 println!(" Training examples: {}", self.ai.training_data.len());
1021 } else {
1022 println!("\nNeural Network Model: Not yet created");
1023 }
1024
1025 println!();
1026 println!(
1027 "AI Performance: {}",
1028 if self.games_played < 5 {
1029 "Not enough data"
1030 } else if self.ai_wins as f32 / self.games_played as f32 > 0.6 {
1031 "Strong"
1032 } else if self.ai_wins as f32 / self.games_played as f32 > 0.4 {
1033 "Moderate"
1034 } else {
1035 "Needs more training"
1036 }
1037 );
1038
1039 println!();
1040 println!("Press Enter to return to the main menu...");
1041 io::stdout().flush().unwrap();
1042 let mut input = String::new();
1043 io::stdin().read_line(&mut input).unwrap();
1044 }
1045
1046 fn run(&mut self) {
1047 loop {
1048 self.display_main_menu();
1049
1050 let mut input = String::new();
1051 io::stdin().read_line(&mut input).unwrap();
1052 let input = input.trim();
1053
1054 match input {
1055 "1" => {
1056 self.player_symbol = Cell::X;
1057 self.ai_symbol = Cell::O;
1058 self.current_turn = Player::Human;
1059
1060 self.play_game();
1061 }
1062 "2" => {
1063 self.player_symbol = Cell::O;
1064 self.ai_symbol = Cell::X;
1065 self.current_turn = Player::AI;
1066
1067 self.play_game();
1068 }
1069 "3" => {
1070 self.train_ai();
1071 }
1072 "4" => {
1073 self.train_specialized_ai();
1074 }
1075 "5" => {
1076 self.watch_ai_vs_ai();
1077 }
1078 "6" => {
1079 self.display_stats();
1080 }
1081 "7" => {
1082 clear_screen();
1083 println!("{}Thanks for playing!{}", GREEN, RESET);
1084 break;
1085 }
1086 _ => {
1087 clear_screen();
1088 println!("{}Invalid choice. Please try again.{}", RED, RESET);
1089 thread::sleep(Duration::from_secs(1));
1090 }
1091 }
1092 }
1093 }
1094}
1095fn display_spectator_board(board: &Board, current_player: Cell) {
1096 clear_screen();
1097 println!("{}{}WATCHING AI VS AI GAME{}{}", BOLD, CYAN, RESET, YELLOW);
1098 println!("----------------------");
1099 println!("{}X (Green){} vs {}O (Red){}", GREEN, RESET, RED, RESET);
1100
1101 println!(
1102 "\nCurrent turn: {}",
1103 if current_player == Cell::X {
1104 format!("{}X's TURN{}", GREEN, RESET)
1105 } else {
1106 format!("{}O's TURN{}", RED, RESET)
1107 }
1108 );
1109
1110 println!("\n");
1111 println!(
1112 " {} | {} | {} ",
1113 board.cells[0].to_colored_string(),
1114 board.cells[1].to_colored_string(),
1115 board.cells[2].to_colored_string()
1116 );
1117 println!("-----------");
1118 println!(
1119 " {} | {} | {} ",
1120 board.cells[3].to_colored_string(),
1121 board.cells[4].to_colored_string(),
1122 board.cells[5].to_colored_string()
1123 );
1124 println!("-----------");
1125 println!(
1126 " {} | {} | {} ",
1127 board.cells[6].to_colored_string(),
1128 board.cells[7].to_colored_string(),
1129 board.cells[8].to_colored_string()
1130 );
1131 println!();
1132
1133 println!("{}Board positions:{}", CYAN, RESET);
1134 println!(" 1 | 2 | 3 ");
1135 println!("-----------");
1136 println!(" 4 | 5 | 6 ");
1137 println!("-----------");
1138 println!(" 7 | 8 | 9 ");
1139 println!();
1140}
1141
1142fn clear_screen() {
1143 print!("{}", CLEAR_SCREEN);
1144 io::stdout().flush().unwrap();
1145}
1146
1147fn main() {
1148 let mut game = TicTacToe::new();
1149 game.run();
1150}