package ee.ut.aa.neuraltic.model;

import java.util.List;

import org.apache.log4j.Logger;

import ee.ut.aa.neuraltic.neural.Network;

public class NeuralPlayer implements Player {

	private static Logger log = Logger.getLogger( NeuralPlayer.class );

	private int value;
	private Network network;

	public NeuralPlayer( int value, Network network ) {

		this.value = value;
		this.network = network;
	}

	@Override
	public Board makeNextMove( Board board ) {

		log.debug( "Requesting next move for player " + value );

		int[] fields = board.getFields();
		double[] networkInput = new double[board.getSize()];
		double[] networkOutput = new double[board.getSize()];

		for( int i = 0; i < board.getSize(); i++ ) {
			networkInput[i] = fields[i];
		}

		network.getInput().feedInput( networkInput );
		network.getInput().initFeedForward();

		networkOutput = network.getOutput().retreiveOutput();

		List<Integer> legalMoves = board.getLegalMoves();

		int bestMove = legalMoves.get( 0 );

		for( int move : legalMoves )
			if( networkOutput[move] > networkOutput[bestMove] )
				bestMove = move;

		log.debug( "Next move is: " + bestMove );

		board.setValue( bestMove, value );

		return board;
	}

	@Override
	public int getValue() {

		return value;
	}

	@Override
	public void setValue( int value ) {
		this.value = value;
	}
}
