package org.slusk.thynwor.corestructures;

import java.awt.EventQueue;
import java.io.BufferedWriter;
import java.io.IOException;
import java.util.LinkedList;

import org.slusk.thynwor.corestructures.Drone.OnlineLearningType;
import org.slusk.thynwor.corestructures.Region.RegionType;
import org.slusk.thynwor.feedback.AbstractFeedbackProvider;
import org.slusk.thynwor.feedback.FeedbackProviderFactory;
import org.slusk.thynwor.layout.LayoutApplier;
import org.slusk.thynwor.neat.ActivatableNet;
import org.slusk.thynwor.neat.ActivatableNetFactory;
import org.slusk.thynwor.test.GenericNetFacade;
import org.slusk.thynwor.ui.IThynworWindow;
import org.slusk.thynwor.ui.NetSelectorDialog;
import org.slusk.thynwor.ui.OptionsDialog;
import org.slusk.thynwor.ui.ThynworGraphics;
import org.slusk.thynwor.ui.ThynworGraphics2;
import org.slusk.thynwor.ui.ThynworWindow;
import org.slusk.thynwor.util.Coordinates;
import org.slusk.thynwor.util.ThynworException;
import org.slusk.thynwor.util.Util;

import com.anji.hyperneat.nd.ActivatorND;
import com.anji.hyperneat.nd.ActivatorNDFacade;
import com.anji.hyperneat.nd.GridNetND;
import com.anji.hyperneat.nd.NDActivatorArray;
import com.anji.hyperneat.nd.NDFloatArray;
import com.anji.hyperneat.nd.NDActivatorArray.ActivatorArrayIterator;
import com.anji.hyperneat.onlinereinforcement.ActivatorNDBackPropagator;
import com.anji.hyperneat.onlinereinforcement.ActivatorNDHebbian;
import com.anji.hyperneat.onlinereinforcement.ActivatorNDLR.LearningRateGranularity;
import com.anji.hyperneat.onlinereinforcement.trainingbank.TrainingBank;
import com.anji.hyperneat.onlinereinforcement.trainingbank.TrainingSequence;
import com.anji.hyperneat.onlinereinforcement.GridNetNDLR;
import com.anji.hyperneat.onlinereinforcement.OnlineLearningNetType;
import com.anji.hyperneat.onlinereinforcement.RandomGridNet;
import com.anji.util.Properties;

public class Environment {
	
	private static final String REWARD_ATTACHED_TO_FOOD = "Environment.rewardFactor.attachedToFood";
	private static final String REWARD_ASSISTED_WITH_FOOD = "Environment.rewardFactor.assistedWithFood";
	private static final String REWARD_DELIVERED_FOOD = "Environment.rewardFactor.deliveredFood";
	private static final String REWARD_MOVED_FOOD_TOWARD_GOAL = "Environment.rewardFactor.movedTowardGoalWithFood";
	private static final String REWARD_DROPPED_FOOD = "Environment.rewardFactor.droppedFood";
	private static final String REWARD_BUMPED = "Environment.rewardFactor.bumped";
	private static final String REWARD_MOVED_TOWARD_SIGNAL = "Environment.rewardFactor.movedTowardSignal";
	private static final String REWARD_MOVED_AWAY_FROM_GOAL = "Environment.rewardFactor.movedAwayFromGoal";
	private static final String MULTI_NET_MODE = "Environment.multiNetMode";
	private static final String USE_TASK_NET = "Environment.useTaskNet";
	private static final String LEARNING_TYPE_KEY = "ann.hyperneat.online.learning.type";
	
	private static Properties defaultConfig;
	private static final String configFileName = "configs/thynwor.properties";
	private Properties config;
	
	private GridMap map;
	private LinkedList<Drone> droneList;
	private LinkedList<Manager> managerList;
	private LinkedList<Resource> resourceList;
	private LinkedList<Resource> newResourcesList;
	private LinkedList<Threat> threatList;
	private LinkedList<Threat> newThreatList;
	private LinkedList<Obstacle> obstacleList;
	private LinkedList<Entity> deadEntitiesList;
	
	private DisplayWindow window;
	private NetSelectorDialog netSelectorDialog;
	private OptionsDialog optionsDialog;
	private Thread simThread;
	private Boolean isSimRunning;
	
	private BufferedWriter actionLog;	
	private LayoutApplier layout;
	
	private int timeSteps;

	// Config variables
	private int mapWidth;
	private int mapHeight;
	private int numManagers;
	private int numDrones;
	private int initialFood;
	private int maintainFood;
	private int initialThreats;
	private int maintainThreats;
	private boolean singleAgentTowMode;
	private boolean useOnlineReinforcement;
	private int obstacleCount;
	private boolean useRandomLayout;
	private OnlineLearningType onlineLearningType;
	private OnlineLearningNetType learningNetType;
	private boolean multinetMode;
	private boolean useTaskNet;
	private boolean useTrainingBank;
	private boolean useIntermittentTrainingOnly;
	private boolean useGraphics;
	
	// TD Reinforcement config - rf=rewardFactor
	private double rfAttachedToFood;
	private double rfAssistedWithFood;
	private double rfMovedFoodTowardGoal;
	private double rfDeliveredFood;
	private double rfDroppedFood;
	private double rfBumped;
	private double rfMovedTowardSignal;
	private double rfMovedAwayFromGoal;
	
	private int simSpeed;
	private String layoutName;
	
	// Nets
	private ActivatableNet bo4Net;
	private ActivatorND satsNet;
	private ActivatorND sasasNet;
	
	private NDActivatorArray sasasNetArray;
	
	private AbstractFeedbackProvider abstractFeedbackProvider;
	private TrainingBank trainingBank;
	
	// Stats.  ***IMPORTANT - if included here, must be reset in calls to resetStats()!
	private int foodCollected;
	private int moveFoodTowardGoal;
	private int assistWithFood;
	private int attachedToFood;
	private int handsFull;
//	private int hitWall;
	private int moveTowardSignal;
	private int hitObstacle;
	private int droppedFood;
		
	
//	static {
//		try {
//			defaultConfig = Util.loadConfig(configFileName);
//		} catch (Exception e) {}
//	}
	
	
	public Environment() {
		init(defaultConfig);
	}

	public Environment(Properties alternateConfig) {
		init(alternateConfig);
	}
	
	private void init(Properties config) {
		this.config = config;
		mapWidth = Integer.parseInt(config.getProperty("MapWidth"));
		mapHeight = Integer.parseInt(config.getProperty("MapHeight"));
		window = null;
		isSimRunning = false;
				
		numDrones = Integer.parseInt(config.getProperty("Environment.numDrones", "1"));
		initialFood = Integer.parseInt(config.getProperty("Environment.initialFood", "1"));
		numManagers = Integer.parseInt(config.getProperty("Environment.numManagers", "4"));
		maintainFood = Integer.parseInt(config.getProperty("Environment.maintainFood", "1"));
		initialThreats = Integer.parseInt(config.getProperty("Environment.initialThreats", "1"));
		maintainThreats = Integer.parseInt(config.getProperty("Environment.maintainThreats", "1"));
		singleAgentTowMode = Boolean.parseBoolean(config.getProperty("Environment.singleAgentTowMode"));
		obstacleCount = Integer.parseInt(config.getProperty("Environment.obstacleCount"));
		useOnlineReinforcement = Boolean.parseBoolean(config.getProperty("Environment.useOnlineReinforcement"));
		useRandomLayout =  Boolean.parseBoolean(config.getProperty("Environment.useRandomLayout"));
		layoutName = config.getProperty("Environment.layoutName", null);
		onlineLearningType = OnlineLearningType.valueOf(config.getProperty("Environment.onlineLearningType"));
		learningNetType = OnlineLearningNetType.valueOf(config.getProperty(LEARNING_TYPE_KEY));
		multinetMode = Boolean.parseBoolean(config.getProperty(MULTI_NET_MODE));
		useTaskNet = Boolean.parseBoolean(config.getProperty(USE_TASK_NET));
		useTrainingBank = Boolean.parseBoolean(config.getProperty("TrainingBank.useTrainingBank", "false"));
		useIntermittentTrainingOnly = Boolean.parseBoolean(config.getProperty("TrainingBank.useIntermittentTrainingOnly", "false"));
		useGraphics = Boolean.parseBoolean(config.getProperty("useGraphics", "false"));
		
		rfAttachedToFood = Double.parseDouble(config.getProperty(REWARD_ATTACHED_TO_FOOD));
		rfAssistedWithFood = Double.parseDouble(config.getProperty(REWARD_ASSISTED_WITH_FOOD));
		rfMovedFoodTowardGoal = Double.parseDouble(config.getProperty(REWARD_MOVED_FOOD_TOWARD_GOAL));
		rfDeliveredFood = Double.parseDouble(config.getProperty(REWARD_DELIVERED_FOOD));
		rfDroppedFood = Double.parseDouble(config.getProperty(REWARD_DROPPED_FOOD));
		rfBumped = Double.parseDouble(config.getProperty(REWARD_BUMPED));
		rfMovedTowardSignal = Double.parseDouble(config.getProperty(REWARD_MOVED_TOWARD_SIGNAL));
		rfMovedAwayFromGoal = Double.parseDouble(config.getProperty(REWARD_MOVED_AWAY_FROM_GOAL));
				
		layout = new LayoutApplier(layoutName);
		
		simSpeed = Integer.parseInt(config.getProperty("SimSpeed", "1000"));
		foodCollected = 0;
		
		abstractFeedbackProvider = FeedbackProviderFactory.getFeedbackProvider(this, onlineLearningType, learningNetType);
		
		trainingBank = useTrainingBank 
			? new TrainingBank(
					config.getIntProperty("TrainingBank.sampleCap")
					, config.getFloatProperty("TrainingBank.acceptPercent", 1.0f) )
			: null;
		
		// Initialize Nets
		//bo4Net = ActivatableNetFactory.createNet("bo4", "961272");	//530242
//		satsNet = ActivatableNetFactory.createGridNetND("sats", "11720");
//		sasasNet = ActivatableNetFactory.createGridNetND("sasas", "11683");
		
		//sasasNet = ActivatableNetFactory.createGridNetND("sasas", "38097");
		
		// HACK
//		if (useOnlineReinforcement) {
//			
//			// POC stuff
//			NDFloatArray[] weightLearningRates = new NDFloatArray[1];
//			weightLearningRates[0] = new NDFloatArray(new int[] {1});
//			weightLearningRates[0].set(0.2f, 0);
//			
//			NDFloatArray[] biasLearningRates = new NDFloatArray[1];
//			biasLearningRates[0] = new NDFloatArray(new int[] {1});
//			biasLearningRates[0].set(0.2f, 0);
//			
//			GridNetND nd = (GridNetND)sasasNet;
//			
////			GridNetNDLR gnndlr = new GridNetNDLR((GridNetND)substrate, weightLearningRates, biasLearningRates, null, null, LearningRateGranularity.SINGLE, LearningRateGranularity.SINGLE, false);
//			GridNetNDLR gnndlr = new GridNetNDLR(nd, weightLearningRates, biasLearningRates, null, null, LearningRateGranularity.SINGLE, LearningRateGranularity.SINGLE, false);
//			sasasNet = new ActivatorNDBackPropagator(gnndlr);
//			
//		}
		
		//resetLayout();
	}
	
	public void resetLayout() {
		clearLayout();
//		if (useRandomLayout)
//			randomLayout();
//		else
			layout.apply(this);
		
		if (null != window) window.showMap();
	}
	
	private void clearLayout() {		
		droneList = new LinkedList<Drone>();
		managerList = new LinkedList<Manager>();
		resourceList = new LinkedList<Resource>();
		deadEntitiesList = new LinkedList<Entity>();
		newResourcesList = new LinkedList<Resource>();
		threatList =  new LinkedList<Threat>();
		newThreatList =  new LinkedList<Threat>();
		obstacleList = new LinkedList<Obstacle>();
		
		resetStats();
	}
	
	private void resetStats() {
		timeSteps = 0;
		foodCollected = 0;
		moveFoodTowardGoal = 0;
		assistWithFood = 0;
		attachedToFood = 0;
		handsFull = 0;
//		hitWall = 0;
		moveTowardSignal = 0;
		hitObstacle = 0;
		droppedFood = 0;
	}

	public int getMaintainFood() {return maintainFood;}
	public void setMaintainFood(int mf) {maintainFood=mf;}
	public int getTimeSteps() {return timeSteps;}
	public synchronized void setIsSimRunning(boolean bool) {this.isSimRunning = bool;}
	public synchronized boolean getIsSimRunning() {return isSimRunning;}
	public Properties getConfig() {return config;}
	public GridMap getMap() {return map;}
	public void setMap(GridMap map) {this.map = map;}
	public void stopSim() {setIsSimRunning(false);}
	public LinkedList<Resource> getResourceList() {return resourceList;}
	public LinkedList<Drone> getDroneList() {return droneList;}
	public LinkedList<Manager> getManagerList() {return managerList;}
	public LinkedList<Threat> getThreatList() {return threatList;}
	public LinkedList<Obstacle> getObstacleList() {return obstacleList;}
	public boolean isSingleAgentTowMode() {return singleAgentTowMode;}
	public void setSingleAgentTowMode(boolean mode) {this.singleAgentTowMode = mode;}
	public boolean isUseOnlineReinforcement() {return useOnlineReinforcement;}
	public void setUseOnlineReinforcement(boolean mode) {this.useOnlineReinforcement = mode;}
	public void setUseRandomLayout(boolean mode) {this.useRandomLayout = mode;}
	public boolean isUseRandomLayout() {return useRandomLayout;}
	public String getLayoutName() {return layoutName;}
	public void setLayoutName(String layoutName) { this.layoutName = layoutName; this.layout = new LayoutApplier(layoutName);}
	public OnlineLearningType getOnlineLearningType() {return onlineLearningType;}
	public void setOnlineLearningType(OnlineLearningType type) {this.onlineLearningType = type;}
	public OnlineLearningNetType getLearningNetType() {return learningNetType;}
	public boolean getMultinetMode() {return multinetMode;}
	public boolean getUseTaskNet() {return useTaskNet;}

	
	public double getRFAttachedToFood() {return rfAttachedToFood;}
	public double getRFAssistedWithFood() {return rfAssistedWithFood;}
	public double getRFMovedFoodTowardGoal() {return rfMovedFoodTowardGoal;}
	public double getRFDeliveredFood() {return rfDeliveredFood;}
	public double getRFDroppedFood() {return rfDroppedFood;}
	public double getRFBumped() {return rfBumped;}
	public double getRFMovedTowardSignal() {return rfMovedTowardSignal;}
	public double getRFMovedAwayFromGoal() {return rfMovedAwayFromGoal;}
	
	public int getSimSpeed() {return simSpeed;}
	public void setSimSpeed(int speed) {simSpeed = speed;}
	
	public int getFoodCollected() {return foodCollected;}
	public void incrementFoodCollected() {foodCollected++;}
	public void incrementMoveFoodTowardGoal() {moveFoodTowardGoal++;}
	public int getMoveFoodTowardGoal() {return moveFoodTowardGoal;}
	public void incrementAssistWithFood() {assistWithFood++;}
	public int getAssistWithFood() {return assistWithFood;}
	public void incrementAttachedToFood() {attachedToFood++;}
	public int getAttachedToFood() {return attachedToFood;}
	public void incrementHandsFull() {handsFull++;}
	public int getHandsFull() {return handsFull;}
//	public void incrementHitWall() {hitWall++;}
//	public int getHitWall() {return hitWall;}
	public void incrementMoveTowardSignal() {moveTowardSignal++;}
	public int getMoveTowardSignalCount() {return moveTowardSignal;}
	public void incrementHitObstacle() {hitObstacle++;}
	public int getHitObstacle() {return hitObstacle;}
	public void incrementDroppedFood() {droppedFood++;}
	public int getDroppedFood() {return droppedFood;}

	
	public ActivatableNet getBo4Net() {return bo4Net;}
	public void setBo4Net(ActivatableNet net) {this.bo4Net = net;}
	public ActivatorND getSatsNet() {return satsNet;}
	public void setSatsNet(ActivatorND net) {this.satsNet = net;}
	public ActivatorND getSasasNet() {return sasasNet;}
	public void setSasasNet(ActivatorND net) {this.sasasNet = net;}
	public NDActivatorArray getSasasNetArray() {return sasasNetArray;}
	public void setSasasNetArray(NDActivatorArray nets) {this.sasasNetArray = nets;}
	public AbstractFeedbackProvider getFeedbackProvider() {return abstractFeedbackProvider;}
	public void addTrainingSequenceToBank(TrainingSequence sequence) {trainingBank.addTrainingSequence(sequence);}
	public TrainingBank getTrainingBank() {return trainingBank;}
	public void setTrainingBank(TrainingBank bank) { this.trainingBank = bank;}
	public boolean getUseTrainingBank() {return useTrainingBank;}
	public boolean getUseIntermittentTrainingOnly() {return useIntermittentTrainingOnly;}
	
	
	// TODO fix this shit
	public void startSim() {
		Runnable r = new Runnable() {
			public void run() {
				long elapsed, sleep;
				setIsSimRunning(true);
				long before = System.currentTimeMillis();
				while (getIsSimRunning()) {
					//System.out.println(System.currentTimeMillis() + ": About to perform timestep...");
					timeStep();
//					System.out.println(System.currentTimeMillis() + ": About to send update to graphics...");
					if (null != window) window.update();
					elapsed = System.currentTimeMillis() - before;
					sleep = simSpeed-elapsed;
//					System.out.println(System.currentTimeMillis() + ": Returned from graphics update, about to sleep (elapsed: " + elapsed + ", will sleep: " + sleep + ")...");
					
					try {
						Thread.sleep(sleep);
					} catch (InterruptedException e) {
						e.printStackTrace();
					}
//					System.out.println(System.currentTimeMillis() + ": Waking up!");
					before = System.currentTimeMillis();
				}
			}
		};
		simThread = new Thread(r);
		simThread.start();
	}

	
	private void startSim(final int i) {
		Runnable r = new Runnable() {
			int limit = i;
			public void run() {
				synchronized(isSimRunning) {
					isSimRunning = true;
					int counter = 0;
					while (counter < limit) {
						timeStep();
						window.update();
						try {
							Thread.sleep(simSpeed);
						} catch (InterruptedException e) {
							e.printStackTrace();
						}
						counter++;
					}	
					isSimRunning = false;	
				}
			}
		};
		simThread = new Thread(r);
		simThread.start();		
	}
	
	public void runSim(int timeSteps) {
		if (null != window) {
			try {
				startSim(timeSteps);
//				synchronized(isSimRunning) {
//					while (isSimRunning) {
//						isSimRunning.wait();
//					}
//				}
				simThread.join();
			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		} else {
			for (int i = 0; i < timeSteps; i++) {
				timeStep();
			}
		}
	}

	public void createWindow() {
		window = new DisplayWindow(this);
		netSelectorDialog = new NetSelectorDialog();
		netSelectorDialog.setEnvironment(this);
		
		optionsDialog = new OptionsDialog();
		optionsDialog.setEnvironment(this);
		
		EventQueue.invokeLater(window);
	}
	
	public void randomLayout() {
		
		GridMap map = new GridMap(mapWidth, mapHeight);
		setMap(map);
		
		// Create Regions
		createRegions();
		
		// Generate Managers		
		for (int i = 0; i < numManagers; i++) {
	
		}
		
		// Generate Drones
		
		if (multinetMode && !useTaskNet) {
			ActivatorArrayIterator net = sasasNetArray.iterator();
			for (int i = 0; i < numDrones; i++) {
	//			Drone d = new Drone(this, map.generateRandomFreeCoordinates());
				Drone d = new Drone(this, map.generateRandomFreeCoordinatesInRegion(map.getRegionsOfType(RegionType.BASE).getFirst()), net.next());
				map.get(d.getCoordinates()).setContents(d);
				droneList.add(d);
				
				if (!net.hasNext()) net.reset();
			}
		} else if (useTaskNet) {
			ActivatorArrayIterator net = sasasNetArray.iterator();
			for (int i = 0; i < numDrones; i++) {
				Drone d = new Drone(this, map.generateRandomFreeCoordinatesInRegion(map.getRegionsOfType(RegionType.BASE).getFirst()), net.next(), net.next());
				map.get(d.getCoordinates()).setContents(d);
				droneList.add(d);
				
				if (!net.hasNext()) net.reset();
			}
		} else {
			for (int i = 0; i < numDrones; i++) {
				Drone d = new Drone(this, map.generateRandomFreeCoordinates());
				map.get(d.getCoordinates()).setContents(d);
				droneList.add(d);
			}
		}
		
		
		// Generate Food
		for (int i = 0; i < initialFood; i++) {
			Food f = new Food(this, map.generateRandomFreeCoordinatesNotInRegion(RegionType.BASE));
			map.get(f.getCoordinates()).setContents(f);
			resourceList.add(f);
		}
		
		// Generate Threats
		for (int i = 0; i < initialThreats; i++) {
		
		}
		
		// Generate Obstacles
		for (int i = 0; i < obstacleCount; i++) {
			Obstacle o = new Obstacle(this, map.generateRandomFreeCoordinatesNotInRegion(RegionType.BASE));
			map.get(o.getCoordinates()).setContents(o);
			obstacleList.add(o);
		}
	}
		
	
	private void createRegions() {
		// For now, just 1 Base Region
		try {
			map.createRegion(new Coordinates(6, 6), new Coordinates(8, 8), RegionType.BASE);
		} catch (ThynworException e) {
			// Should probably log this at some point
		}
	}

	/**
	 * Perform one time step; doTurnActions() is called for each currently existing entity.
	 */
	public synchronized void timeStep() {
		for (Manager manager : managerList) {
			manager.doTurnActions();
		}
		
		for (Entity entity : droneList) {
			entity.doTurnActions();
		}		
		
		for (Entity entity : resourceList) {
			entity.doTurnActions();
		}
		
		for (Entity entity : threatList) {
			entity.doTurnActions();
		}
		
		cleanUpTheDead();	// wait til the end to prevent borking the lists during iteration
		spawnNewEntities();
		updateResourceList();
		updateThreatList();
		
		timeSteps++;
	}	


	private void updateThreatList() {
		for (Threat e : newThreatList) {
			threatList.add(e);
		}
		newThreatList.clear();
	}

	private void spawnNewEntities() {
		int num;
		if ((num = maintainFood - resourceList.size()) > 0) {
			for (int i = 0; i < num; i++) {
				Food f = new Food(this, map.generateRandomFreeCoordinatesNotInRegion(RegionType.BASE));
//				// On the map...
				map.get(f.getCoordinates()).setContents(f);
//				// ...but not yet active.
				newResourcesList.add(f);
			}
		}
	}

	private void updateResourceList() {
		for (Resource e : newResourcesList) {
			resourceList.add(e);
		}
		newResourcesList.clear();
	}

	private void cleanUpTheDead() {
		for (Entity e : deadEntitiesList) {
			removeEntity(e);
		}
		deadEntitiesList.clear();
	}
	
	public double calculateTotalFoodSpread() {
		double dist = 0;
		for (Resource resource : resourceList) {
			dist += GridMap.calculateSpecialManhattanDistance(resource.getCoordinates(), map.getRegionsOfType(RegionType.BASE).getFirst().getCenter());
		}	
		
		return dist;
	}


	/**
	 * @param args
	 * @throws IOException 
	 * @throws InitialisationFailedException 
	 */
	public static void main(String[] args) throws IOException {

    	Properties props = new Properties();
        props.loadFromResources(args);
		Environment env = new Environment(props);

//		inputDimensions = new int[] {3,3,3,2};
//		outputDimensions = new int[] {3,3,2};
//		int[][] dims = new int[][] {
//			{3, 3, 4}
//			,{3,3,1}
//			,{3,3,1}
//		};
//		RandomGridNet net = RandomGridNet.getRandomGridNet(3, dims);
//		ActivatorNDBackPropagator bp = new ActivatorNDBackPropagator(net);
//		ActivatorNDHebbian bp = new ActivatorNDHebbian(net);
	

		NDActivatorArray sasasNetArray = ActivatableNetFactory.createGridNets(props);	

		env.setSasasNetArray(sasasNetArray);
//		
//		if (props.getBooleanProperty("Evaluation.usePreTraining"))	{
//			env.setUseOnlineReinforcement(true);
//			env.performTraining("configs/layout.xml", 150, 0, 0, 0);
//			env.setUseOnlineReinforcement(props.getBooleanProperty("Environment.useOnlineReinforcement"));
//		}
//		
//		env.setUseOnlineReinforcement(props.getBooleanProperty("Environment.useOnlineReinforcement"));
//		env.setLayoutName(props.getProperty("Environment.layoutName"));
		
		env.performTraining("configs/layout.xml", 150, 8, 75, 0.2f);
		//env.setUseOnlineReinforcement(false);
		env.resetLayout();
		env.createWindow();
	}


	/**
	 * @param environment
	 * @param layoutName May be null or empty string - in that case, use random layouts.
	 * @param timeStepsToTrain
	 * @param trainingBankIterationsToPerform
	 * @param maxNumTrainingEpochs
	 * @param mseThreshold
	 */
	public void performTraining(String layoutName
			, int timeStepsToTrain
			, int trainingBankIterationsToPerform
			, int maxNumTrainingEpochs
			, float mseThreshold) {
		if (this.isUseOnlineReinforcement() && !this.getUseIntermittentTrainingOnly()) {
			System.out.println("Performing basic pre-training.");
			this.useRandomLayout=false;
			this.setLayoutName(layoutName);
			this.resetLayout();
			this.runSim(timeStepsToTrain);
			this.setLayoutName(config.getProperty("Environment.layoutName"));
			this.useRandomLayout = config.getBooleanProperty("Environment.useRandomLayout");
		}
		
		if (this.getUseTrainingBank() && this.getUseIntermittentTrainingOnly()){
			System.out.println("Performing training bank training.");
			this.setLayoutName(layoutName);
			for (int tbIt = 0; tbIt < trainingBankIterationsToPerform; tbIt++) {
				this.resetLayout();
				this.runSim(50);
				TrainingBank trainingBank = this.getTrainingBank();
				trainingBank.trainToMseThreshold(mseThreshold, maxNumTrainingEpochs);
				trainingBank.clear();
				tbIt++;
			}
			this.setLayoutName(config.getProperty("Environment.layoutName"));
			this.useRandomLayout = config.getBooleanProperty("Environment.useRandomLayout");
		}
	}

	public void removeEntity(Entity e) {
		resourceList.remove(e);
	}

	public void setEntityAsDead(Entity e) {
		map.removeEntity(e);
		deadEntitiesList.add(e);
	}
	
	/**
	 * Builds a list of all signals in the environment.
	 * @return The list of signal coordinates.
	 */
	public LinkedList<Coordinates> buildSignalCoordinatesList() {
		LinkedList<Coordinates> list = new LinkedList<Coordinates>();
		for (Drone d : droneList) {
			Coordinates c = d.getSignalCoordinates();
			if (null != c) list.add(c);
		}
		return list;
	}

	/**
	 * Builds a list of all signals in the environment that are within maxDistance of a set of origin coordinates.
	 * @param origin The coordinates to measure from.
	 * @param maxDistance The maximum distance away from the origin coordinates to consider.
	 * @return The list of signals.
	 */
	public LinkedList<Coordinates> buildSignalCoordinatesListInRange(Coordinates origin, double maxDistance) {
		LinkedList<Coordinates> list = new LinkedList<Coordinates>();
		for (Drone d : droneList) {
			Coordinates c = d.getSignalCoordinates();
			if (null != c && GridMap.calculateSpecialManhattanDistance(origin, c) <= maxDistance) list.add(c);
		}
		return list;
	}
	
	public void logAction(Object... args) {
		if (null != actionLog) {
			try {
				for (Object arg : args) {
					actionLog.write(arg.toString() + ",");
				} 
				actionLog.write("\n");
			} catch (IOException e) {
					e.printStackTrace();
			}
		}
		
	}
	
	public void closeActionLog() {
		if (actionLog != null) {
			try {
				actionLog.close();
			} catch (Exception e) {
				e.printStackTrace();
			} finally {
				actionLog = null;
			}
		}
	}
	
	public void finalize() {
		closeActionLog();
		if (null == window) window.dispose();
		window = null;
	}


	public String getStatsString() {
		StringBuilder s = new StringBuilder();
		
		s.append("TimeSteps:");
		s.append(this.getTimeSteps());
		s.append("\nFoodCollected: ");
		s.append(foodCollected);
		s.append("\nmoveFoodTowardGoal: ");
		s.append(moveFoodTowardGoal);

		s.append("\nassistWithFood: ");
		s.append(assistWithFood);
		
		s.append("\nattachedToFood: ");
		s.append(attachedToFood);
		
		s.append("\nhandsFull: ");
		s.append(handsFull);
		
//		s.append("\nhitWall: ");
//		s.append(hitWall);
		
		s.append("\nmovedTowardSignal: ");
		s.append(moveTowardSignal);
		
		s.append("\nhitObstacle: ");
		s.append(hitObstacle);
		
		s.append("\nDropped Food: ");
		s.append(droppedFood);
		
		return s.toString();
	}
	


	class DisplayWindow implements Runnable {
		private final Environment env;
		private IThynworWindow window;
		
		public DisplayWindow(Environment env) {
			this.env = env;			
		}

		public void dispose() {
			window.dispose();		
			this.window = null;
		}

		public void update() {
			if (null != window) {
				window.update();
			}
		}
		public void showMap() {
			if (null != window) {
				window.setMap(map);
			}
		}
		
		public void run() {
			try {
				window = useGraphics ? new ThynworGraphics2(env): new ThynworWindow(env);
				window.setMap(map);
			} catch (Exception e) {
				e.printStackTrace();
			}
		}
	}


	
	public void showNetSelectionDialog() {
		Runnable r = new Runnable() {
			public void run() {
				stopSim();
				netSelectorDialog.display();
			}
		};
		Thread t = new Thread(r);
		t.start();
	}

	public void showOptionsDialog() {
		Runnable r = new Runnable() {
			public void run() {
				stopSim();
				optionsDialog.display();
			}
		};
		Thread t = new Thread(r);
		t.start();
	}

}
