//! AI module for chess engine
//!
//! This module provides basic AI functionality and interfaces for
//! training more advanced AI models.

use crate::game::Game;
use crate::moves::Move;
use crate::utils::evaluate_position;
use crate::{GameResult, Side};
use rand::seq::SliceRandom;

/// Maximum search depth for minimax algorithm
const MAX_DEPTH: i32 = 3;

/// AI player interface
pub trait AIPlayer {
    /// Get the next move for the AI
    fn get_move(&mut self, game: &Game) -> Option<Move>;

    /// Update the AI with the result of a game
    fn update_with_result(&mut self, _game: &Game, _result: GameResult) {}
}

/// Random AI player that makes random legal moves
pub struct RandomAI;

impl AIPlayer for RandomAI {
    fn get_move(&mut self, game: &Game) -> Option<Move> {
        let legal_moves = game.get_legal_moves();
        legal_moves.choose(&mut rand::thread_rng()).copied()
    }
}

/// Minimax AI player with alpha-beta pruning
pub struct MinimaxAI {
    max_depth: i32,
    side: Side,
}

impl MinimaxAI {
    /// Create a new minimax AI player
    pub fn new(side: Side, max_depth: Option<i32>) -> Self {
        MinimaxAI {
            max_depth: max_depth.unwrap_or(MAX_DEPTH),
            side,
        }
    }

    /// Minimax algorithm with alpha-beta pruning
    fn minimax(
        &self,
        game: &Game,
        depth: i32,
        mut alpha: i32,
        mut beta: i32,
        maximizing: bool,
    ) -> i32 {
        // Terminal conditions
        if depth == 0 || game.result != GameResult::Ongoing {
            return evaluate_position(game);
        }

        let legal_moves = game.get_legal_moves();

        if maximizing {
            let mut max_eval = i32::MIN;
            for mv in legal_moves {
                let mut game_copy = game.clone();
                if game_copy.make_move(mv).is_err() {
                    continue;
                }

                let eval = self.minimax(&game_copy, depth - 1, alpha, beta, false);
                max_eval = max_eval.max(eval);
                alpha = alpha.max(eval);
                if beta <= alpha {
                    break;
                }
            }
            max_eval
        } else {
            let mut min_eval = i32::MAX;
            for mv in legal_moves {
                let mut game_copy = game.clone();
                if game_copy.make_move(mv).is_err() {
                    continue;
                }

                let eval = self.minimax(&game_copy, depth - 1, alpha, beta, true);
                min_eval = min_eval.min(eval);
                beta = beta.min(eval);
                if beta <= alpha {
                    break;
                }
            }
            min_eval
        }
    }
}

impl AIPlayer for MinimaxAI {
    fn get_move(&mut self, game: &Game) -> Option<Move> {
        let legal_moves = game.get_legal_moves();
        if legal_moves.is_empty() {
            return None;
        }

        let maximizing = game.current_side == self.side;
        let mut best_move = legal_moves[0];
        let mut best_score = if maximizing { i32::MIN } else { i32::MAX };

        for mv in legal_moves {
            let mut game_copy = game.clone();
            if game_copy.make_move(mv).is_err() {
                continue;
            }

            let score = self.minimax(
                &game_copy,
                self.max_depth - 1,
                i32::MIN,
                i32::MAX,
                !maximizing,
            );

            if maximizing && score > best_score || !maximizing && score < best_score {
                best_score = score;
                best_move = mv;
            }
        }

        Some(best_move)
    }
}

/// Interface for training AI models
pub struct AITrainer<T: AIPlayer> {
    ai: T,
    games_played: usize,
    games_won: usize,
}

impl<T: AIPlayer> AITrainer<T> {
    /// Create a new AI trainer
    pub fn new(ai: T) -> Self {
        AITrainer {
            ai,
            games_played: 0,
            games_won: 0,
        }
    }

    /// Play a game against another AI
    pub fn play_game<'a>(&mut self, opponent: &'a mut dyn AIPlayer, side: Side) -> GameResult
    where
        T: 'a,
    {
        let mut game = Game::new();

        while game.result == GameResult::Ongoing {
            let current_ai: &mut (dyn AIPlayer + 'a) = if game.current_side == side {
                &mut self.ai
            } else {
                &mut *opponent
            };
            if let Some(mv) = current_ai.get_move(&game) {
                if game.make_move(mv).is_err() {
                    break;
                }
            } else {
                break;
            }
        }

        self.games_played += 1;
        if let GameResult::Win(winner) = game.result {
            if winner == side {
                self.games_won += 1;
            }
        }

        self.ai.update_with_result(&game, game.result);
        game.result
    }

    /// Get the win rate of the AI
    pub fn win_rate(&self) -> f64 {
        if self.games_played == 0 {
            0.0
        } else {
            self.games_won as f64 / self.games_played as f64
        }
    }

    /// Get a reference to the AI
    pub fn ai(&self) -> &T {
        &self.ai
    }

    /// Get a mutable reference to the AI
    pub fn ai_mut(&mut self) -> &mut T {
        &mut self.ai
    }
}

/// Feature extraction for machine learning
pub fn extract_features(game: &Game) -> Vec<f32> {
    let mut features = Vec::new();

    // Board state features (flattened)
    let state = game.get_state_features();
    for rank in 0..10 {
        for file in 0..9 {
            for channel in 0..14 {
                features.push(state[rank][file][channel] as f32);
            }
        }
    }

    // Add some additional features
    features.push(if game.current_side == Side::Red {
        1.0
    } else {
        0.0
    });
    features.push(if game.is_in_check(Side::Red) {
        1.0
    } else {
        0.0
    });
    features.push(if game.is_in_check(Side::Black) {
        1.0
    } else {
        0.0
    });
    features.push(game.moves_without_capture as f32 / 60.0); // Normalized

    features
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_random_ai() {
        let mut game = Game::new();
        let mut ai = RandomAI;

        let mv = ai.get_move(&game);
        assert!(mv.is_some());
    }

    #[test]
    fn test_minimax_ai() {
        let mut game = Game::new();
        let mut ai = MinimaxAI::new(Side::Red, Some(2));

        let mv = ai.get_move(&game);
        assert!(mv.is_some());
    }

    #[test]
    fn test_feature_extraction() {
        let game = Game::new();
        let features = extract_features(&game);

        // 9x10x14 board features + 4 additional features
        assert_eq!(features.len(), 9 * 10 * 14 + 4);
    }
}
