package ee.ut.aa.neuraltic.logic;

import java.util.List;

import org.apache.log4j.Logger;

import ee.ut.aa.neuraltic.genetic.Brain;
import ee.ut.aa.neuraltic.model.HumanPlayer;
import ee.ut.aa.neuraltic.model.NeuralPlayer;
import ee.ut.aa.neuraltic.model.Player;
import ee.ut.aa.neuraltic.model.RandomPlayer;
import ee.ut.aa.neuraltic.model.TicBoard;
import ee.ut.aa.neuraltic.neural.Network;

public class GeneticTrainer {

	private static Logger log = Logger.getLogger( GeneticTrainer.class );

	private static int WINVALUE = 10;
	private static int DRAWVALUE = 7;
	private static int LOSSVALUE = -5;
	
	Brain brain;
	Competition comp;

	public GeneticTrainer() {

		brain = new Brain();
		brain.initPopulation();
		comp = new Competition();
	}

	public void startTraining( int nrOfIterations ) {

		log.info( "Starting training." );

		int i = 0;
		while( i < nrOfIterations ) {

			int f = 0;

			log.info( "Training iteration=" + i++ );

			log.debug( "Starting brain iteration." );

			brain.iteration();

			List<Network> pop = brain.getPopulation();

			PopulationStats.logNetwork( pop.get( 0 ) );

			log.debug( "Starting training with popluation of size=" + pop.size() );

			for( Network netOne : pop ) {

				log.debug( "Training network=" + f++ );
				for( Network netTwo : pop ) {

					NeuralPlayer plOne = new NeuralPlayer( 1, netOne );
					NeuralPlayer plTwo = new NeuralPlayer( -1, netTwo );

					// RandomPlayer plTwo = new RandomPlayer( -1 );

					Player winner = comp.competition( plOne, plTwo, new TicBoard() );

					// TODO: constants
					if( winner == plOne ) {
						netOne.setValue( netOne.getValue() + WINVALUE );
						netTwo.setValue( netTwo.getValue() + LOSSVALUE );
					} else if( winner == plTwo ) {
						netOne.setValue( netOne.getValue() + LOSSVALUE );
						netTwo.setValue( netTwo.getValue() + WINVALUE );
					} else {
						netOne.setValue( netOne.getValue() + DRAWVALUE );
						netTwo.setValue( netTwo.getValue() + DRAWVALUE );
					}
				}
			}

			PopulationStats.logStats( pop );

			quicTest( new NeuralPlayer( 1, pop.get( 0 ) ) );
		}
	}

	private void quicTest( NeuralPlayer plOne ) {

		Logger netlog = Logger.getLogger( "networks" );

		Competition comp = new Competition();

		RandomPlayer plTwo = new RandomPlayer( -1 );

		int wins = 0;
		int drws = 0;
		int loss = 0;

		for( int i = 0; i < Brain.MAXPOP; i++ ) {

			Player winner = comp.competition( plOne, plTwo, new TicBoard() );

			// TODO: constants
			if( winner == plOne )
				wins++;
			else if( winner == plTwo )
				loss++;
			else
				drws++;
		}

		netlog.debug( "Network result against random player wins=" + wins + ";drws=" + drws + ";loss=" + loss );
		
		//comp.competition( plOne, new HumanPlayer(), new TicBoard() );
	}
}
