package net.yters.model.agents;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import java.util.TreeMap;
import java.util.Map.Entry;

import net.yters.model.ModelState;
import net.yters.model.agents.actuators.ActuatorAgent;
import net.yters.model.agents.actuators.sensors.SensorAgent;
import net.yters.model.agents.brains.BrainAgent;
import net.yters.util.Box;

public abstract class DeciderAgent extends Agent {
	private static final long serialVersionUID = -2163070180101528185L;
	
	public ArrayList<SensorAgent> f_sensors = new ArrayList<SensorAgent>();
	public ArrayList<BrainAgent> f_brains = new ArrayList<BrainAgent>();
	public ArrayList<ActuatorAgent> f_actuators = new ArrayList<ActuatorAgent>();
	public TreeMap<String, Box> f_input = new TreeMap<String, Box>();
	public Box f_choice = new Box(new Double(-1.0)); // TODO add this to the sensors, that way the hero itself can have latent AI as well as human control

	public Box f_type = new Box(new Double(0));
	public Box f_alive = new Box(new Double(1));
	public Box f_hp = new Box(new Double(0));
	public ArrayList<Box> f_position = new ArrayList<Box>();
	public ArrayList<Box> f_direction = new ArrayList<Box>();

	public ModelState f_model = null;
	
	public DeciderAgent() {
		super();
		this.init();
	}
	
	public DeciderAgent(DeciderAgent agent) throws IOException, ClassNotFoundException {
		super(agent);
		f_sensors = agent.f_sensors;
		f_brains = agent.f_brains;
		f_actuators = agent.f_actuators;
		f_input = agent.f_input;
		
		f_type = agent.f_type;
		f_alive = agent.f_alive;
		f_hp = agent.f_hp;
		f_position = agent.f_position;
		f_direction = agent.f_direction;
		
		f_model = agent.f_model;
	}

	private void init() {
		addBoxedState("type", f_type); 
		addBoxedState("hp", f_hp);
		addBoxedState("alive", f_alive);
	}
	
	public DeciderAgent(ModelState model) {
		f_model = model;
		this.init();
	}

	public void addBoxedInput(String name, Box value) {
		f_state.put(qualifyField(name), value);
	}

	public void addAllBoxedInput(Map<String, Box> input) {
		addAllBoxedStates(input);

		for(Entry<String, Box> i : input.entrySet()) {
			addBoxedState(i.getKey(), i.getValue());
		}
	}

	public SensorAgent addSensorAgent(SensorAgent sensorAgent) {
		sensorAgent.f_ownerAgent = this;
		f_sensors.add(sensorAgent);
		addAllBoxedInput(sensorAgent.f_observations);

		return sensorAgent;
	}

	public ActuatorAgent addActuatorAgent(ActuatorAgent actuatorAgent) {
		actuatorAgent.f_ownerAgent = this;
		f_actuators.add(actuatorAgent);
		addAllBoxedStates(actuatorAgent.f_parameters);
		
		return actuatorAgent;
	}

	@Override
	public void execute() {
		for(SensorAgent s : f_sensors) {
			s.execute();
		}
		
		for(BrainAgent b : f_brains) {
			b.execute();
		}
		
		initDecision(); // NOTE Incase some setup is required each iteration to make a decision.
		
		for(Agent a : f_actuators) { 
			((ActuatorAgent) a).f_fired.f_value = 0.0;
			
			if(decide(f_input, a)) { // TODO redo this structure, the choice should be relegated to the brain rules
				((ActuatorAgent) a).f_fired.f_value = 1.0;
				a.execute();
			}
		}
		
		f_choice.f_value = -2.0; // NOTE Reset choice to default of doing nothing.

//		System.err.println(this.toString() + " state: ");  // DEBUG
//		for(Entry<String, Box> v : this.f_state.entrySet()) { // DEBUG
//			System.err.println("	" + v.getKey() + " >> " + v.getValue().f_value + " | "); // DEBUG
//		} // DEBUG
//		System.err.println(); // DEBUG

		// Remove agent once it's run out of hp.
		// TODO do this more appropriately with a death rule
		if((Double) f_hp.f_value <= 0) {
			this.f_alive.f_value = 0.0;
			f_model.removeAgent(this);
		}  
	}

	protected abstract void initDecision();
	
	protected abstract boolean decide(TreeMap<String, Box> input, Agent actuator);
}
