package ipc;


import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import ipc.Relay;

/**
 * Arbiter is used to regulate agents with the simulator. It also collects
 * reward and timing data.
 */
public class Arbiter<S extends State, A> {
    /** Real world domain being used. */
    private Simulator<S, A> world_;

    /** Agents taking action in domain. */
    private List<Agent<S, A>> agents_;

	private double mean;
	
	private double sumofsq;

	private double variance;

    public Arbiter(Simulator<S, A> world, List<Agent<S, A>> agents) {
        if (world.getNumberOfAgents() != agents.size()){
        	Main.exceptionLog.log("Not enough agents provided to arbiter");
            throw new IllegalArgumentException("Expects "
                    + world.getNumberOfAgents() + " agents: "
                    + agents.size() + " provided");
        }
        world_ = world;
        agents_ = agents;
    }

    private void runSimulation(Simulator<S, A> simulatedWorld,
            List<Agent<S, A>> agents, int[] agentMoveOrder, boolean strict, long decision_time) {
        world_.setInitialState();
        
        while (!world_.isTerminalState()) {
//        	Read state from server
        	S temps = world_.getState();
//		System.err.println("State:\n"+temps.toString());
        	Relay.readTurnMessage((rddl.State)temps);

//		System.err.println("\nUpdated State:\n"+temps.toString());
        	world_.setState(temps);
//		System.err.println("\nAfter Set state \n"+world_.getState().toString());
        	
//		try{
//			System.in.read();
//		}catch(Exception e){
//		}
//        	Main.activityLog.log("State: " + world_.state_.toString());
            int agentTurn = world_.getState().getAgentTurn();
            long startTime = System.currentTimeMillis();
            A action = agents.get(agentTurn).selectAction(world_.getState(), decision_time, world_.getTime());
            
            //Send action to server and get new state
            Relay.sendAction(((IPCAction)action).actions_);
            
            world_.incTime();
            long total = (System.currentTimeMillis() - startTime);
            Main.timeLog.log("Action taken in " + total);
            Main.activityLog.log("WORLD :: Action taken:" + action);
//            TimedUCT tu  = (TimedUCT) agents.get(agentTurn);
//            long others = (total-tu.timeout);
//            long newtimeout = tu.timeout-others;
//            if(newtimeout > 0)tu.timeout = newtimeout;
            
        }
    }

    public void runSimulations(Simulator<S, A> simulatedWorld, int numTrials, long totaltime, int hor) {
        int moveOrderDisplacement = 0;
        int[] agentMoveOrder = new int[agents_.size()];
        List<Agent<S, A>> agents = new ArrayList<Agent<S, A>>();
        for (int i = 0; i < agents_.size(); i++)
            agents.add(agents_.get(i));
        long elapsed = 0;
        for (int i = 0; i < numTrials; i++) {
            for (int j = 0; j < agentMoveOrder.length; j++)
                agentMoveOrder[j] = (j + moveOrderDisplacement)
                        % agents_.size();
            
            double timeleft = Relay.roundRequest();
            //TODO: Use timeleft
            
            long time = System.currentTimeMillis();
            
            boolean strict;
            long timeout=0;
            
        	strict = true;
        	timeout = totaltime/numTrials;
            
//	    	System.out.println(strict+" " +timeout);

            try{
            	runSimulation(simulatedWorld.copy(), agents, agentMoveOrder, strict, timeout/hor);
            	
            	Runtime RUNTIME = Runtime.getRuntime();
            	System.err.println("[ Memory usage: " + 
            			((RUNTIME.totalMemory() - RUNTIME.freeMemory())/1e6d) + "Mb / " + 
            			(RUNTIME.totalMemory()/1e6d) + "Mb" + 
            			" = " + (((double) (RUNTIME.totalMemory() - RUNTIME.freeMemory()) / 
            							   (double) RUNTIME.totalMemory())) + " ]\n");
            }catch(OutOfMemoryError e){
            	e.printStackTrace();
				System.gc();
            	runSimulation(simulatedWorld.copy(), agents, agentMoveOrder, strict, timeout/hor);
            }
            
            elapsed += System.currentTimeMillis()-time;
            Main.timeLog.log("Trial took " + (System.currentTimeMillis()-time));
            
            double r = Relay.roundEnd();//TODO: Use this
            mean += (r-mean)/(i+1);
            
            sumofsq += (r*r);
            
            Main.rewardLog.log("reward of trial "+r);
//            System.out.println("REWARD of trial "+r);
            
            agents.add(0, agents.remove(agents.size() - 1));
            moveOrderDisplacement += 1;
        }
        sumofsq /= (numTrials-1);
        variance = sumofsq - (numTrials/(numTrials-1))*(mean*mean);
	variance = Math.sqrt(variance);
    }

    public Simulator<S, A> getSimulator() {
        return world_;
    }

    public List<Agent<S, A>> getAgents() {
        return agents_;
    }

    @Override
    public String toString() {
        StringBuilder output = new StringBuilder();
        output.append(world_.toString() + "\n");
        for (int i = 0; i < agents_.size(); i++)
            output.append(agents_.get(i).toString() + "\n");
        return output.toString();
    }

	public double getVariance() {
		return variance;
	}

	public double getMean() {
		return mean;
	}
}
