package adversarial3;

import env.Action;
import adversarial.State;
import agents.AbsAgent;
import env.Enviroment;
import java.util.HashMap;
import javax.swing.ImageIcon;

public class MinMaxAgent3 extends AbsAgent {

    private static int MAX_DEPTH = 25;
    private final static long MAX_TIME = 2000;
    private final static boolean debug = false;

    public MinMaxAgent3(String name, int row, int col) {
        super(name, row, col);
        super.setIcon(new ImageIcon("images/cow.jpg"));
    }

    @Override
    public Action getMove() {
        AbsAgent enemy = null;
        for (AbsAgent a : Enviroment.env.agents) {
            if (a != this) {
                enemy = a;
                break;
            }
        }
        State3 root = new State3(Enviroment.env.flags, getState(), enemy.getState());
        Action ans = null;
        long start = System.currentTimeMillis();
        long end = start;
        count = 0;
        hits = 0;
        miss = 0;
        double best = -1000;

        //for (int i = MAX_DEPTH; i < 100 && (end - start) < MAX_TIME; i += 10) {
        //best = -1000;
        for (State3 child : root.expand()) {
            if (child.getLastAction() == Action.shoot &
            		!Enviroment.isInLineOfSight(child.getFirstAgent().getRow(), child.getFirstAgent().getCol(), child.secondAgent.getRow(), child.secondAgent.getCol())) {

            }
            else {
            	long temp = System.currentTimeMillis();
                System.out.println("# getMove checking action " + child.getLastAction() + "  child=" + child);
                double x = -maxiMaxAlphaBeta(child, 1, MAX_DEPTH, best, 1000);
                if (x > best) {
                    System.out.println("# getMove " + child.getLastAction() + " (" + x + ") is BETTER than " + ans + " (" + best + ")!");
                    best = x;
                    ans = child.getLastAction();
                } else {
                    System.out.println("# getMove " + child.getLastAction() + " (" + x + ") is not better than " + ans + " (" + best + ").");
                }
                end = System.currentTimeMillis();
                System.out.println("-------------- took " + (end - temp) + " msec  (total time=" + (end - start) + ") -----------------");
            }
        }

        //}
        System.out.println("##########\n########## getMove " + getName() + " chose [" + ans + "] as the best move (score " + best + ") in max depth " + MAX_DEPTH + ".");
//        System.out.println(MAX_DEPTH + "," + binary(State.useHash) + "," + binary(State.useSort) + "," + binary(State.useComplexCompare) + "," + count + "," + (end - start));
        System.out.println("search is finished after " + (end - start) + " msec   and " + count + " calls to maximax.  memo.size=" + memo.size() + ", hit=%" + (100.0 * hits / (hits + miss)));

        //System.exit(0);
//        if (debug) {
//            try {
//                System.out.println("hit return to continue... ****");
//                System.in.read();
//            } catch (IOException ex) {
//                Logger.getLogger(Enviroment.class.getName()).log(Level.SEVERE, null, ex);
//            }
//        }
        System.out.println("\n\n\n");
        return ans;
    }

    /**
     * AI
     * returns the best possible score that the first player to move in s can hope for.
     * @param s
     * @param depth
     * @param max_depth
     * @param alpha
     * @param beta
     * @return
     */
    private class Pair {

        final double maximax;
        final int depth;

        public Pair(double maximax, int depth) {
            this.maximax = maximax;
            this.depth = depth;
        }

        @Override
        public String toString() {
            return "Pair: maximax=" + maximax + " on depth " + depth;
        }
    }
    /**
     * AI
     * i want to memoize maximax, but return the memo'd part ONLY when i've computed
     * it already for a greater depth. (aka the depth param was lower)
     */
    private static HashMap<State3, Pair> memo = new HashMap<State3, Pair>();
    private long hits = 0,  miss = 0,  count = 0;// just for debug

    private double maxiMaxAlphaBeta(State3 s, int depth, final int max_depth, double alpha, double beta) {
        count++;// just for debug
        if (s.isTerminal() || depth == max_depth)
            return s.utilityForFirstPlayer();
        if (memo.containsKey(s) && memo.get(s).depth <= depth) {
            hits++;
           return memo.get(s).maximax;
        }
        else miss++;
        double bestScore = -1000;
        final State3[] successors = s.expand();// AI <- sorted from best to worst score for first player
        for (int i=0;i<successors.length;++i) {
        	State3 child = successors[i];
        	double x = -maxiMaxAlphaBeta(child, depth + 1, max_depth, -beta, -alpha);
        	//looks like shit
        	if (s.getFirstAgent().getTile().isIce() & i > 0) {
        		++i;
        		child = successors[i];
        		double y = -maxiMaxAlphaBeta(child, depth + 1, max_depth, -beta, -alpha);
        		x = (1-Enviroment.p)*x + Enviroment.p*y;
        		//x = Math.sqrt(Math.pow((1-Enviroment.p)*x,2) + Math.pow(Enviroment.p*y,2));
        	}
        	else {
        		//x = Math.sqrt(Math.pow((1-Enviroment.p)*x,2) + Math.pow(Enviroment.p*x,2));
        	}
            if (x > bestScore)
                bestScore = x;
            if (x > alpha)
                alpha = x;
            if (alpha >= beta)
                break;
        }
        memo.put(s, new Pair(bestScore, depth));
        return bestScore;
    }
}
