package ai.phase.cognition.development.simplebeam;
import ai.algorithms.interruptible.*;
import ai.filetools.StandardTools;
import ai.*;
import ai.phase.cognition.development.*;

import java.util.*;
public class BeamAgent implements MapAgent{

	public enum CONDITIONS
	{
		NO_PRIOR_REGION,
		CURRENT_REGION_UNEXPECTED,
		EXPECTED_REGION_VIRTUAL,
		CURRENT_REGION_EXPECTED,
		ERROR,
		ACTION_NOT_AVAILABLE,
		CAN_EXECUTE_ACTION
	}
	
	public enum STATES
	{
		CHECKING_ACTION_LIST,
		PROCESSING_PREVIOUS_REGION,
		TRY_PROCESSING_PRIOR_REGION,
		CHOOSING_GOAL_REGION,
		CREATING_NEW_REGION,
		ROUTING_TO_GOAL,
		TIMEOUT_INTERRUPT,
		PROCESSING_NEXT_ACTION,
		INITIAL
	}
	
	
	
	public BeamSearch j_Search;
	public HashMap<Integer,BeamRegion> j_RegionSet;
	private SimpleGoalIndicator j_GoalIndicator;
	private HashMap<AgentState,AgentState> j_BoundaryStateMap;
	private HashMap<AgentState,AgentState> j_InnerStateMap;
	private HashMap<AgentState,String> j_StateNames;
	HashMap<Integer, BeamRegion> j_RegionIndexModelMap;
	HashMap<BeamRegion,Integer> j_ModelToIndexMap;
	AgentState j_CurrentState;
	HashMap<String, AgentState> j_StateMap;
	Integer j_ChosenAction;
	BeamRegion j_CurrentRegion;
	BeamRegion j_PriorRegion;
	Integer j_CurrentRegionIndex;
	Integer j_PriorRegionIndex;
	int[] j_AvailableActions;
	int j_CurrentUtility;
	int j_PreviousUtility;
	Integer j_PreviousAction;
	BeamRegion j_ExpectedNextRegion;
	
	LinkedList<WeightedValue<BeamRegion>> j_TotalRegionList;
	LinkedList<ChoicePointer> j_ActionList;
	
	boolean j_Timeout=false;
	int j_MaxSearchDepth;
	int j_CurrentSearchDepth;
	String j_TraceFileFullName;
	
	Hashtable<BeamRegion, Hashtable<BeamRegion, Integer>> j_RegionRelationMap;
	Hashtable<Integer, Integer> j_OppositeActionMap;
	boolean j_AssumeSymmetricalOppositesP;
	boolean j_TraceP=false;
	public BeamAgent()
	{
		j_StateNames = new HashMap<AgentState, String>();
		j_ModelToIndexMap= new HashMap<BeamRegion, Integer>();
		j_RegionIndexModelMap = new HashMap<Integer, BeamRegion>();
		j_MaxSearchDepth=10;
		j_StateMap = new HashMap<String, AgentState>();
		j_GoalIndicator = new SimpleGoalIndicator();
		j_TraceFileFullName="E:\\AI_system\\Phase1.5\\BeamAgentTrace.log";
		Initialize();
		j_CurrentState = GetState(STATES.INITIAL);
		j_RegionRelationMap = new Hashtable<BeamRegion, Hashtable<BeamRegion,Integer>>();
		j_OppositeActionMap = new Hashtable<Integer, Integer>();
		j_AssumeSymmetricalOppositesP=true;
	}
	
	
	
	public int GetUtility()
	{
		return j_CurrentUtility;
	}
	private void Initialize()
	{
		AddState(STATES.INITIAL, 
			new AgentState()
			{
				public AgentState GetNextState(Object eventData)
				{
					return GetState(STATES.CHECKING_ACTION_LIST);
				}
			}
		);
		
		AddState(STATES.CHECKING_ACTION_LIST, 
			new AgentState()
			{
				public AgentState GetNextState(Object eventData)
				{
					if ((j_ActionList!=null)&&(j_ActionList.size()>0))
						return GetState(STATES.PROCESSING_PREVIOUS_REGION);
					else
						return GetState(STATES.TRY_PROCESSING_PRIOR_REGION);
				}
			}
		);
		
	
		AddState(STATES.PROCESSING_PREVIOUS_REGION, 
			new AgentState()
			{
			
				public AgentState GetNextState(Object eventData)
				{
					CONDITIONS result;
					result = UpdatePriorRegion();
					if (result==CONDITIONS.CURRENT_REGION_UNEXPECTED)
						return GetState(STATES.CHOOSING_GOAL_REGION);
					else
						return GetState(STATES.PROCESSING_NEXT_ACTION);
				}
			}
		);
		
		AddState(STATES.PROCESSING_NEXT_ACTION, 
			new AgentState()
			{
			
				public AgentState GetNextState(Object eventData)
				{
					CONDITIONS result;
					result = ProcessNextAction();
					if (result==CONDITIONS.ACTION_NOT_AVAILABLE)
						return GetState(STATES.CHOOSING_GOAL_REGION);
					else
						return GetState(STATES.INITIAL);
				}
			}
		);
		AddState(STATES.TRY_PROCESSING_PRIOR_REGION, 
			new AgentState()
			{
			
				public AgentState GetNextState(Object eventData)
				{
					CONDITIONS result;
					result = UpdatePriorRegion();
					if (result==CONDITIONS.NO_PRIOR_REGION)
						return GetState(STATES.CREATING_NEW_REGION);
					else
						return GetState(STATES.CHOOSING_GOAL_REGION);
				}
			}
		);
		
		AddState(STATES.CREATING_NEW_REGION, 
			new AgentState()
			{
			
				public AgentState GetNextState(Object eventData)
				{
					CreateDisconnectedRegion();
					return GetState(STATES.CHOOSING_GOAL_REGION);
				}
			}
		);
		
		
		AddState(STATES.CHOOSING_GOAL_REGION, 
			new AgentState()
			{
			
				public AgentState GetNextState(Object eventData)
				{
					j_ChosenAction=null;
					BeamRegion goal = GetGoalRegion();
					if (!j_ModelToIndexMap.containsKey(goal))
						throw new RuntimeException("Assert exception: Goal must be possible to reach");
					j_GoalIndicator.SetGoal(goal);
					j_CurrentSearchDepth=1;
					j_Search = new BeamSearch(j_CurrentRegion,j_CurrentSearchDepth);
					j_Search.ResetSearchState();
					return GetState(STATES.ROUTING_TO_GOAL);
				}
			}
		);
		
		AddState(STATES.ROUTING_TO_GOAL, 
			new AgentState()
			{
			
				public AgentState GetNextState(Object eventData)
				{
					if (j_Search.ExecuteNextStep())
					{
						return GetState(STATES.ROUTING_TO_GOAL);
					}
					else
					{
						j_ActionList=j_Search.GetSolutionPath();
						if (j_ActionList==null)
						{ // failure to route to goal
							
							if (j_CurrentSearchDepth<j_MaxSearchDepth)
							{
								j_CurrentSearchDepth++;
								j_Search = new BeamSearch(j_CurrentRegion,j_CurrentSearchDepth);
								j_Search.ResetSearchState();
								
//								if (j_CurrentSearchDepth>3)
//									StandardTools.WriteTraceLog(j_TraceFileFullName, "Search depth too deep: "+j_CurrentSearchDepth);
								return GetState(STATES.ROUTING_TO_GOAL);
							}
							else
								return GetState(STATES.CHOOSING_GOAL_REGION);
						}
						else
							return GetState(STATES.PROCESSING_NEXT_ACTION);
					}
					
				}
			}
		);
		
		
		AddState(STATES.TIMEOUT_INTERRUPT, 
			new AgentState()
			{
			
				public AgentState GetNextState(Object eventData)
				{
					j_ChosenAction=null;
					if (j_CurrentRegionIndex.equals(j_PriorRegionIndex))
					{
						if (j_CurrentUtility==j_PreviousUtility)
							return GetState(STATES.ROUTING_TO_GOAL);
					}
					return GetState(STATES.CHOOSING_GOAL_REGION);
					
				}
			}
		);
		
		j_BoundaryStateMap = new HashMap<AgentState, AgentState>();
		j_InnerStateMap=new HashMap<AgentState, AgentState>();
		j_InnerStateMap.put(GetState(STATES.ROUTING_TO_GOAL), GetState(STATES.TIMEOUT_INTERRUPT));
		j_BoundaryStateMap.put(GetState(STATES.INITIAL), GetState(STATES.INITIAL));
	}
	
	private AgentState GetState(STATES newState)
	{
		return j_StateMap.get(newState.name());
	}
	
	private void AddState(STATES newState, AgentState value)
	{
		j_StateMap.put(newState.name(), value);
		j_StateNames.put(value, newState.name());
	}
	
	
	
	@Override
	public Integer UpdateCurrentState(int currentRegionId, int[] currentAvailableActions, int currentUtility, int stepMilli) 
	{
		// TODO Auto-generated method stub
		// While 
		
		UpdateAllRegions();
		j_ChosenAction=null;
		j_AvailableActions=currentAvailableActions;
		j_CurrentUtility=currentUtility;
		j_CurrentRegionIndex=currentRegionId;
		long startTimeMilli = System.currentTimeMillis();
		long stopTimeMilli = startTimeMilli+stepMilli;
		// check if 
		
		
		while (System.currentTimeMillis()<stopTimeMilli)
		{
			if (j_TraceP)
				StandardTools.WriteTraceLog(j_TraceFileFullName, "Current state is: " + j_StateNames.get(j_CurrentState));
			j_CurrentState = j_CurrentState.GetNextState(null);
			if (j_BoundaryStateMap.containsKey(j_CurrentState))
				break;
		}

		if (j_InnerStateMap.containsKey(j_CurrentState))
			j_CurrentState=j_InnerStateMap.get(j_CurrentState);
		
		HandleTimeoutInterrupt();
		if (j_ChosenAction!=null)
			j_PreviousAction=j_ChosenAction;
		return j_ChosenAction;
	}

	private CONDITIONS UpdatePriorRegion()
	{

		if (j_PriorRegion!=null)
		{
			// The expected next Region is defined
			// Check if expected region is the current region
			Integer mappedIndex;
			if (j_ModelToIndexMap.containsKey(j_ExpectedNextRegion))
			{
				mappedIndex=j_ModelToIndexMap.get(j_ExpectedNextRegion);
				if (mappedIndex==null) // virtual region
				{
					if (j_RegionIndexModelMap.containsKey(j_CurrentRegionIndex))
					{
						j_ModelToIndexMap.remove(j_ExpectedNextRegion);
						j_CurrentRegion=j_RegionIndexModelMap.get(j_CurrentRegionIndex);
						j_PriorRegion.AssertNextRegion(j_CurrentRegion);
						j_CurrentRegion.AssertVisited(j_AvailableActions, j_CurrentUtility);
						
					}
					else
					{
						j_ModelToIndexMap.put(j_ExpectedNextRegion, j_CurrentRegionIndex);
						j_RegionIndexModelMap.put(j_CurrentRegionIndex, j_ExpectedNextRegion);
						j_CurrentRegion=j_ExpectedNextRegion;
						j_CurrentRegion.AssertVisited(j_AvailableActions, j_CurrentUtility);
						// Don't need to call j_PriorRegion.AssertNextRegion(j_CurrentRegion)
						// because prior region already maps to current region by definition
						 
					}
					
					UpdateOppositeActions();
					
					return CONDITIONS.EXPECTED_REGION_VIRTUAL;
				}
				else
				{
					// there was a prediction
					if (mappedIndex.equals(j_CurrentRegionIndex))
					{// accurate prediction
						j_ExpectedNextRegion.AssertVisited(j_AvailableActions, j_CurrentUtility);
						j_CurrentRegion=j_ExpectedNextRegion;
						UpdateOppositeActions();
						return CONDITIONS.CURRENT_REGION_EXPECTED;
					}
					else
					{
						if (j_RegionIndexModelMap.containsKey(j_CurrentRegionIndex))
						{
							j_CurrentRegion=j_RegionIndexModelMap.get(j_CurrentRegionIndex);
							j_CurrentRegion.AssertVisited(j_AvailableActions, j_CurrentUtility);
						}
						else
						{
							CreateDisconnectedRegion();
						}
						
						j_PriorRegion.AssertNextRegion(j_CurrentRegion);
						UpdateOppositeActions();
						// May need to distinguish between new and unexpected and existing and unexpected
						return CONDITIONS.CURRENT_REGION_UNEXPECTED;
					}
				}
				
			}
			return CONDITIONS.ERROR;
		}
		else
			return CONDITIONS.NO_PRIOR_REGION;
	}
	
	
	
	public BeamRegion MakeRegion()
	{
		BeamRegion newRegion = new BeamRegion(j_GoalIndicator,this);
		j_ModelToIndexMap.put(newRegion, null);
		return newRegion;
	}
	
	private void CreateDisconnectedRegion()
	{
		j_CurrentRegion = new BeamRegion(j_GoalIndicator,this);
		j_ModelToIndexMap.put(j_CurrentRegion, j_CurrentRegionIndex);
		j_RegionIndexModelMap.put(j_CurrentRegionIndex, j_CurrentRegion);
		j_CurrentRegion.AssertVisited(j_AvailableActions, j_CurrentUtility);
	}
	
	private void SetScores()
	{
		j_TotalRegionList=new LinkedList<WeightedValue<BeamRegion>>();
		
		
		for (BeamRegion bRegion:j_ModelToIndexMap.keySet())
		{
			j_TotalRegionList.add(new WeightedValue<BeamRegion>(bRegion,bRegion.GetScore()));
		}
		
	}
	
	
	private BeamRegion GetGoalRegion()
	{
		SetScores();
		
		return AITools.ChooseWeightedRandom(j_TotalRegionList,true).GetValue();
	}
	private CONDITIONS ProcessNextAction()
	{
		BeamRegion.RegionPointer rPointer;
		
		if (j_ActionList==null) // failed to determine how to reach goal
		{
			j_ChosenAction=null;
			return CONDITIONS.ACTION_NOT_AVAILABLE;
		}
		if (j_ActionList.size()==0)
		{
			j_ChosenAction=null;
		}
		else
		{
			ChoicePointer genericChoicePointer=j_ActionList.removeFirst();
			rPointer= (BeamRegion.RegionPointer)genericChoicePointer;
			j_ChosenAction=rPointer.GetActionId();
			
		}
		
		j_ExpectedNextRegion=j_CurrentRegion.AssertActionTaken(j_ChosenAction);
		if (j_ExpectedNextRegion==null)
		{
			j_ChosenAction=null;
			return CONDITIONS.ACTION_NOT_AVAILABLE;
		}
		
		if (!ActionReallyPossible(j_ChosenAction))
			System.out.println("Problem\r\n");
		
		
		j_PriorRegion=j_CurrentRegion;
		return CONDITIONS.CAN_EXECUTE_ACTION;
	}
	
	private boolean ActionReallyPossible(Integer index)
	{
		if (index==null)
			return true;
		for (Integer aindex:j_AvailableActions)
		{
			if (aindex.intValue()==index.intValue())
				return true;
		}
		return false;
	}
	private void UpdateAllRegions()
	{
		
		//Enumeration<BeamRegion> regionIterator = j_ModelToIndexMap.keys();
		for (BeamRegion nextRegion:j_ModelToIndexMap.keySet())
		{
			nextRegion.AssertNextStep();
		}
	
		
	}
	
	private void UpdateOppositeActions()
	{
		// Two cases that matter:
		/*
		 * Case 1:
		 * Agent moves into a region for the first time.  Expected region will be virtual, current region index will not be in 
		 * RegionIndexToModelMap.  Once current region is created, it will be surrounded by virtual regions.  The key thing is that
		 * if the prior action that lead to the current region has an opposite then the current region should not be completely
		 * surrounded by virtual regions but rather the action leading to the prior region should be mapped to the prior region
		 * 
		 * Case 2:
		 * Agent moves into a region that it expects to be virtual but is, in fact, a region it has encountered before.  After this, 
		 * same logic applies that 
		 * 
		 * Case 3: 
		 * Agent moves into a region that it expects not be 
		 * 
		 * Overall precondition:
		 * 
		 * Current region exists and is mapped as well as prior region
		 */
		
		// set up learning inverse actions
		/* Methodology is:
		 *  If observe A(R1, R2) then check if exists ~A
		 *  if so then assert ~A(R2, R1)
		 * 
		 */
		BeamRegion reverseRegion;
		Hashtable<BeamRegion, Integer> regionActionMap;
		Integer oppositeAction;
		BeamRegion oppositeRegion;
		
		if (!j_RegionRelationMap.containsKey(j_PriorRegion))
		{
			// Assert that executing prior action from prior region 
			// leads to current region
			regionActionMap = new Hashtable<BeamRegion, Integer>();
		}
		else
			regionActionMap = j_RegionRelationMap.get(j_PriorRegion);
		regionActionMap.put(j_CurrentRegion, j_PreviousAction);
		j_RegionRelationMap.put(j_PriorRegion, regionActionMap);
		
		// Check if opposite action exists
			
		if (j_OppositeActionMap.containsKey(j_PreviousAction))
		{
			oppositeAction=j_OppositeActionMap.get(j_PreviousAction);
			// Get the region that the agent expects to be in if it moves in
			// the opposite direction from current region
			oppositeRegion=j_CurrentRegion.AssertActionTaken(oppositeAction);
						
			if ((oppositeRegion!=null)&&(j_ModelToIndexMap.get(oppositeRegion)==null)) // expects to be in a virtual region
			{
				// Set the actual opposite region to the prior region
				j_CurrentRegion.AssertNextRegion(j_PriorRegion);
				j_ModelToIndexMap.remove(oppositeRegion); // get rid of virtual region
				
			}
			else
			{
				// non- virtual opposite region
				// if the non-virtual opposite region is not the prior region then can't treat j_PreviousAction
				// and oppositeAction as opposites
				if (!j_PriorRegion.equals(oppositeRegion))
				{
					// opposite region is null or different from prior region
					j_OppositeActionMap.remove(j_PreviousAction);
					// Usually if Opp(j_PreviousAction)=A then Opp(A)=j_PreviousAction but am not
					// assuming this so not removing j_Opposite
					if ((j_AssumeSymmetricalOppositesP)&&(j_OppositeActionMap.containsKey(oppositeAction)))
						j_OppositeActionMap.remove(oppositeAction);		
				}
			}
		}
		else
		{
			// Get the action that last lead from the current region to previous region
			if (j_RegionRelationMap.containsKey(j_CurrentRegion))
			{
				regionActionMap = j_RegionRelationMap.get(j_CurrentRegion);
				if (regionActionMap.containsKey(j_PriorRegion))
				{ // If it has moved from the current region to the prior region before
					// get the action that took it from current region to prior region
					oppositeAction=regionActionMap.get(j_PriorRegion);
					// 
					j_OppositeActionMap.put(j_PreviousAction, oppositeAction);
					if (j_AssumeSymmetricalOppositesP)
						j_OppositeActionMap.put(oppositeAction, j_PreviousAction);
				}
			}
		}
		
	}
	
	private void HandleTimeoutInterrupt()
	{
		j_PriorRegionIndex=j_CurrentRegionIndex;
		j_PreviousUtility=j_CurrentUtility;
	}
}
