package org.slusk.thynwor.feedback;

import java.util.LinkedList;

import org.slusk.thynwor.corestructures.Drone;
import org.slusk.thynwor.corestructures.Drone.MoveOrder;
import org.slusk.thynwor.corestructures.Entity;
import org.slusk.thynwor.corestructures.Environment;
import org.slusk.thynwor.corestructures.Food;
import org.slusk.thynwor.corestructures.GridMap;
import org.slusk.thynwor.corestructures.Region.RegionType;
import org.slusk.thynwor.util.Coordinates;

import com.anji.hyperneat.nd.NDFloatArray;
import com.anji.hyperneat.onlinereinforcement.trainingbank.Feedback;
import com.anji.hyperneat.onlinereinforcement.trainingbank.Feedback.FeedbackType;
import com.anji.hyperneat.onlinereinforcement.trainingbank.TrainingSample;

public class ReinforcementProvider extends AbstractFeedbackProvider {

	public ReinforcementProvider(Environment environment) {
		super(environment);
	}

	@Override
	public Feedback getFeedback(Drone bot, MoveOrder mo, NDFloatArray inputs, Coordinates outputCoords, NDFloatArray outputs) {
		Feedback feedback = getReinforcement(bot, mo, outputCoords);
		return feedback;
	}

	protected Feedback getReinforcement(Drone bot, MoveOrder mo, Coordinates outputCoords) {
			NDFloatArray feedback = null;
			boolean isTowingResource = bot.isTowingResource();	
			Coordinates coords = bot.getCoordinates();
			double rewardFactor = 0;
			LinkedList<Coordinates> signals = environment.buildSignalCoordinatesList();
			
			GridMap map = map();
			Entity entity = map.isInBounds(mo.proposedCoordinates) ? map.get(mo.proposedCoordinates).getContents() : null;
	
			// Did it pickup food?
			if (null != entity && entity instanceof Food && !isTowingResource) {
				rewardFactor = environment.getRFAttachedToFood();
					
				// Did it assist with food?
				Food f = (Food) entity;
				if (!f.isBeingTowed() && f.getInteractingAgents().size() > 0) {
					rewardFactor = environment.getRFAssistedWithFood();
				}
			}
			
			// Did it bump something
			else if (null != entity || !map.isInBounds(mo.proposedCoordinates)) {
				rewardFactor = environment.getRFBumped();
			}
			
			// Move toward signal
			else if (!isTowingResource && signals.size() > 0) {
				// If it moves toward any signal, that's good enough
				for (Coordinates signal : signals) {
					if (GridMap.calculateSpecialManhattanDistance(mo.proposedCoordinates, signal) < GridMap.calculateSpecialManhattanDistance(coords, signal)) {
						rewardFactor = environment.getRFMovedTowardSignal();
						break;
					}
				}
			}
	
			// Did it drop food?
			else if (isTowingResource && GridMap.calculateSpecialManhattanDistance(mo.proposedCoordinates, bot.getInteractee().getCoordinates()) > 2) {
				rewardFactor = environment.getRFDroppedFood();
			}
			
			// Did it deliver food?
			else if (isTowingResource && map.isInRegion(mo.proposedCoordinates, RegionType.BASE)) {
				rewardFactor = environment.getRFDeliveredFood();
			}
			
			// Did it move toward the goal while carrying food?
			else if (isTowingResource && 
					GridMap.calculateSpecialManhattanDistance(mo.proposedCoordinates, map.getClosestRegionOfType(mo.proposedCoordinates, RegionType.BASE).getCenter())
					< GridMap.calculateSpecialManhattanDistance(coords, map.getClosestRegionOfType(coords, RegionType.BASE).getCenter())) {
				rewardFactor = environment.getRFMovedFoodTowardGoal();
			}
			// Did it move away from the goal while carrying food?
			else if (isTowingResource &&
					GridMap.calculateSpecialManhattanDistance(mo.proposedCoordinates, map.getClosestRegionOfType(mo.proposedCoordinates, RegionType.BASE).getCenter())
					>   GridMap.calculateSpecialManhattanDistance(coords, map.getClosestRegionOfType(coords, RegionType.BASE).getCenter())) {
				rewardFactor = environment.getRFMovedAwayFromGoal();
			}
			
			feedback = processFeedbackForReinforcement(rewardFactor, outputCoords);
			
			return new Feedback(feedback, rewardFactor > 0 ? FeedbackType.REINFORCEMENT : rewardFactor < 0 ? FeedbackType.PUNISHMENT : FeedbackType.NEUTRAL);
		}

	protected NDFloatArray processFeedbackForReinforcement(double rewardFactor, Coordinates outputCoords) {
		NDFloatArray feedback = null;
		feedback = new NDFloatArray(3,3,1);
		feedback.clear();
			
		if (rewardFactor != 0) {
			feedback.set((float) rewardFactor, outputCoords.x, outputCoords.y, 0);
		}
			
		return feedback;
	}

	@Override
	public void convertTrainingSampleToReinforcement(ThynworTrainingSample sample, float weight) {
//		if (null == sample.feedback) {
//			NDFloatArray feedback = new NDFloatArray(3,3,1);
//			feedback.clear();
//			feedback.set(1.0f, sample.outputCoords.x, sample.outputCoords.y, 0);
//			sample.feedback = new Feedback(feedback, FeedbackType.REINFORCEMENT);
//		} else {
			sample.feedback.value.set(1.0F, sample.outputCoords.x, sample.outputCoords.y, 0);
			sample.feedback.type = FeedbackType.REINFORCEMENT;
//		}
		sample.weight = weight;
	}

	@Override
	public void convertTrainingSampleToPunishment(ThynworTrainingSample sample) {
//		if (null == sample.feedback) {
//			NDFloatArray feedback = new NDFloatArray(3,3,1);
//			feedback.clear();
//			feedback.set(0, sample.outputCoords.x, sample.outputCoords.y, 0);
//			sample.feedback = new Feedback(feedback, FeedbackType.PUNISHMENT);
//		} else {
			sample.feedback.value.set(0, sample.outputCoords.x, sample.outputCoords.y, 0);
			sample.feedback.type = FeedbackType.PUNISHMENT;
//		}
	}



}
