package everything.search.trees;

import java.util.Map;
import java.util.TreeMap;

import everything.WorldMap;
import everything.search.AgentState;
import everything.search.agents.IntelligentAgent;
import everything.search.trees.nodes.GameTreeDualNode;
import everything.search.trees.nodes.GameTreeNode;

public class GameTreeSearchWithProbability extends GameTreeSearch{

	public GameTreeSearchWithProbability(WorldMap map, int id, int idOp){
		super(map, id, idOp);
	}
	
	@Override
	protected GameTreeNode searchWithParam(AgentState state, int ply) {
		GameTreeNode node = new GameTreeNode(state, null, 0, null);
		GameTreeNode n = searchMax(node, ply);
		node.setValue(n.getValue());
		return n;
	}
	

	protected GameTreeNode searchMax(GameTreeNode node, int ply){
		//System.out.print(ply);
		AgentState state = node.getState();
		
		GameTreeNode storedState = checkState(state); 
		if (storedState!=null){
			return storedState;
		}
		
		if (state.getOrientation()==null){
			Map<String, AgentState> expansions = new TreeMap<String, AgentState>();
			expansions.put("North", getNextState(state, "North", false));
			expansions.put("South", getNextState(state, "South", false));
			expansions.put("West", getNextState(state, "West", false));
			expansions.put("East", getNextState(state, "East", false));
			expansions.put("Fire", getNextState(state, "Fire", false));


			GameTreeNode optimalValue = null;
			for (Map.Entry<String, AgentState> t: expansions.entrySet()){
				if (t.getValue().isTerminal() || ply == 0){
					double utility = IntelligentAgent.utility(_id, t.getValue(), _map);
					if (optimalValue == null || utility > optimalValue.getValue()){
						node.setValue(utility);
						optimalValue = new GameTreeNode(t.getValue(), node, utility, t.getKey());
					}
				} else {
					GameTreeNode tmp = new GameTreeNode(t.getValue(), node, 0, t.getKey());
					GameTreeNode tmp2 = searchMin(tmp, ply-1);
					if (optimalValue == null || tmp2.getValue() > optimalValue.getValue()){
						tmp.setValue(tmp2.getValue());
						optimalValue = tmp2;
					}
				}
			}
			storeState(state, optimalValue);
			return optimalValue;
		} else {
			GameTreeNode optimalValue = getProbabilityNode(state, node, "North", ply);
			GameTreeNode tmp = getProbabilityNode(state, node, "South", ply);
			if (tmp.getValue() >= optimalValue.getValue())
				optimalValue = tmp;
			tmp = getProbabilityNode(state, node, "West", ply);
			if (tmp.getValue() >= optimalValue.getValue())
				optimalValue = tmp;
			tmp = getProbabilityNode(state, node, "East", ply);
			if (tmp.getValue() >= optimalValue.getValue())
				optimalValue = tmp;
			AgentState fire = getNextState(state, "Fire", false);
			if (fire.isTerminal() || ply == 0){
				double ut = IntelligentAgent.utility(_id, fire, _map);
				tmp = new GameTreeNode(fire, node, ut, "Fire");
			} else {
				GameTreeNode rootNode = new GameTreeNode(fire, node, 0.0, "Fire");
				tmp = searchMin(rootNode, ply-1);
				rootNode.setValue(tmp.getValue());
			}
			if (tmp.getValue() >= optimalValue.getValue()){
				tmp.getParent().setValue(tmp.getValue());
				optimalValue = tmp;
			}
			storeState(state, optimalValue);
			return optimalValue;
		}
	}
	protected GameTreeNode searchMin(GameTreeNode node, int ply){
		//System.out.print(ply);
		AgentState state = node.getState();
		
		GameTreeNode storedState = checkState(state); 
		if (storedState!=null){
			return storedState;
		}
		
		if (state.getDOpponent()==null){
			Map<String, AgentState> expansions = new TreeMap<String, AgentState>();
			expansions.put("North", getNextOpState(state, "North", false));
			expansions.put("South", getNextOpState(state, "South", false));
			expansions.put("West", getNextOpState(state, "West", false));
			expansions.put("East", getNextOpState(state, "East", false));
			expansions.put("Fire", getNextOpState(state, "Fire", false));

			GameTreeNode optimalValue = null;
			for (Map.Entry<String, AgentState> t: expansions.entrySet()){
				if (t.getValue().isTerminal()){
					//TODO not needed for terminals (opponenet's move) since this is a whole search tree
					double utility = IntelligentAgent.utility(_id, t.getValue(), _map);
					if (optimalValue == null || utility < optimalValue.getValue()){
						node.setValue(utility);
						optimalValue = new GameTreeNode(t.getValue(), node, utility, t.getKey());
					}
				} else {
					GameTreeNode tmp = new GameTreeNode(t.getValue(), node, 0, t.getKey());
					GameTreeNode tmp2 = searchMax(tmp, ply-1);
					if (optimalValue == null || tmp2.getValue() < optimalValue.getValue()){
						tmp.setValue(tmp2.getValue());
						optimalValue = tmp2;
					}
				}
			}
			storeState(state, optimalValue);
			return optimalValue;
		} else {
			GameTreeNode optimalValue = getProbabilityOpNode(state, node, "North", ply);
			GameTreeNode tmp = getProbabilityOpNode(state, node, "South", ply);
			if (tmp.getValue() <= optimalValue.getValue())
				optimalValue = tmp;
			tmp = getProbabilityOpNode(state, node, "West", ply);
			if (tmp.getValue() <= optimalValue.getValue())
				optimalValue = tmp;
			tmp = getProbabilityOpNode(state, node, "East", ply);
			if (tmp.getValue() <= optimalValue.getValue())
				optimalValue = tmp;

			AgentState fire = getNextOpState(state, "Fire", false);

			if (fire.isTerminal()){
				double ut = IntelligentAgent.utility(_id, fire, _map);
				tmp = new GameTreeNode(fire, node, ut, "Fire");
			} else {
				GameTreeNode rootNode = new GameTreeNode(fire, node, 0.0, "Fire");
				tmp = searchMax(rootNode, ply-1);
				rootNode.setValue(tmp.getValue());
			}
			if (tmp.getValue() <= optimalValue.getValue()){
				tmp.getParent().setValue(tmp.getValue());
				optimalValue = tmp;
			}
			storeState(state, optimalValue);
			return optimalValue;
		}
	}

	private GameTreeNode getProbabilityOpNode(AgentState state, GameTreeNode node, String direction, int ply){
		AgentState t1 = getNextOpState(state, direction, true);
		AgentState t2 = getNextOpState(state, direction, false);
		double value = 0.0;
		if (t1.isTerminal()) {
			value = _map.getNotSlippingP()*IntelligentAgent.utility(_id, t1,_map);
		} else {
			GameTreeNode t1Root = new GameTreeNode(t1, node, 0, direction);
			GameTreeNode t1Result = searchMax(t1Root, ply-1);
			value = _map.getNotSlippingP()*t1Result.getValue();
		}
		if (t2.isTerminal()) {
			value = value + (1.0-_map.getNotSlippingP())*IntelligentAgent.utility(_id, t2,_map);
		} else {
			GameTreeNode t2Root = new GameTreeNode(t2, node, 0, direction);
			GameTreeNode t2Result = searchMax(t2Root, ply-1);
			value = (1.0-_map.getNotSlippingP())*t2Result.getValue();
		}
		return new GameTreeDualNode(t1, node, value, direction, t2);
	}

	private GameTreeNode getProbabilityNode(AgentState state, GameTreeNode node, String direction, int ply){
		AgentState t1 = getNextState(state, direction, true);
		AgentState t2 = getNextState(state, direction, false);
		double value = 0.0;
		if (t1.isTerminal() || ply == 0) {
			value = _map.getNotSlippingP()*IntelligentAgent.utility(_id, t1,_map);
		} else {
			GameTreeNode t1Root = new GameTreeNode(t1, node, 0, direction);
			GameTreeNode t1Result = searchMin(t1Root, ply-1);
			value = _map.getNotSlippingP()*t1Result.getValue();
		}
		if (t2.isTerminal() || ply == 0) {
			value = value + (1.0-_map.getNotSlippingP())*IntelligentAgent.utility(_id, t2,_map);
		} else {
			GameTreeNode t2Root = new GameTreeNode(t2, node, 0, direction);
			GameTreeNode t2Result = searchMin(t2Root, ply-1);
			value = (1.0-_map.getNotSlippingP())*t2Result.getValue();
		}
		return new GameTreeDualNode(t1, node, value, direction, t2);
	}
}
