package controler.algorithms.ghsom;

import java.util.LinkedList;
import java.util.Random;
/** 
 * @file NeuronLayer.java  
 * 
 **/

/**
 *    Warstwa sieci neuronowej.
 *  
 *  Klasa NeuronLayer
 */

/**
 * @file NeuronLayer.java
 *
 */

/**
 * 
 *   Klasa NeuronLayer
 *
 */
public class NeuronLayer implements INeuronLayer {
	double superMQE;
	LinkedList<double[]> dataItems;
	int[] superPos;
	Neuron superNeuron;
	double MQE;
	int[] MQENeuron;
	double stupidity;
	double ini_learnrate;
	double learnrate;
	double min_learnrate;
	double ini_neighbourhood;
	double neighbourhood;
	double min_neighbourhood;
	double neighbourhood_falloff;
	int level;
	int dataLength;
	int x;
	int y;
	double STRETCH_PARAM_LEARN;
	double STRETCH_PARAM_NEIGHB;
	private Neuron[][] neuronMap;
	int currentCycle;
	Random rnd;

	/**
	 *   Konstruktor nowej warstwy neuronow w sieci neuronowej
	 * 
	 * @param sn - super Neuron
	 * @param dataItems - zbior danych znajdujacych sie na danym neuronie
	 * @param superMQE - 'duzy' blad, tzw super MQE
	 * @param level
	 * @param initialSizeX
	 * @param initialSizeY
	 * @param posX
	 * @param posY
	 * @param ULweight
	 * @param URweight
	 * @param LLweight
	 * @param LRweight
	 * 
	 */
	NeuronLayer(Neuron sn, LinkedList<double[]> dataItems, double superMQE,
			int level, int initialSizeX, int initialSizeY, int posX, int posY,
			double[] ULweight, double[] URweight, double[] LLweight,
			double[] LRweight) {
		rnd = new Random();
		this.dataItems = dataItems;
		dataLength = Globals.vectorlength;
		this.superMQE = superMQE;
		superNeuron = sn;
		superPos = new int[2];
		superPos[0] = posX;
		superPos[1] = posY;
		MQE = Double.MAX_VALUE;
		MQENeuron = new int[2];
		stupidity = Globals.TAU_1;
		ini_learnrate = Globals.INITIAL_LEARNRATE;
		learnrate = Globals.INITIAL_LEARNRATE;
		min_learnrate = Globals.MIN_LEARNRATE;
		ini_neighbourhood = Globals.INITIAL_NEIGHBOURHOOD;
		neighbourhood = Globals.INITIAL_NEIGHBOURHOOD;
		min_neighbourhood = Globals.MIN_NEIGHBOURHOOD;
		this.level = level;
		x = initialSizeX;
		y = initialSizeY;
		STRETCH_PARAM_LEARN = (double) (Globals.EXPAND_CYCLES * dataItems.size()) / 16.0;
		STRETCH_PARAM_NEIGHB = (double) (Globals.EXPAND_CYCLES * dataItems.size()) / (double) 6.67;
		currentCycle = 0;
		setNeuronMap(new Neuron[initialSizeX][]);
		for (int i = 0; i < initialSizeX; i++) {
			getNeuronMap()[i] = new Neuron[initialSizeY];
		}

		if ((Globals.ORIENTATION) && (sn != null) && (level > 1)) 
		{
			getNeuronMap()[0][0] = new Neuron(ULweight,  level, superPos[0], superPos[1]);
			getNeuronMap()[1][0] = new Neuron(URweight,  level, superPos[0], superPos[1]);
			getNeuronMap()[0][1] = new Neuron(LLweight,  level, superPos[0], superPos[1]);
			getNeuronMap()[1][1] = new Neuron(LRweight,  level, superPos[0], superPos[1]);
		} 
		else {
			for (int i = 0; i < initialSizeY; i++) {
				for (int j = 0; j < initialSizeX; j++) {
					getNeuronMap()[j][i] = new Neuron(dataLength, level, superPos[0], superPos[1]);
				}
			}
		}
	}

	/**
	 *    Metoda adaptujaca wagi neuronu 
	 * 
	 *  @param winner - wektor z polozeniem zwyciezcy
	 *  @param di - wagi wektora wejsciowego
	 *  
	 */
	@Override
	public void adaptWeights(int[] winner, double[] di) {
		for (int j = 0; j < y; j++) 
		{
			for (int i = 0; i < x; i++) 
			{
				getNeuronMap()[i][j].adaptWeights(di,
						(Math.sqrt(((winner[0] - i) * (winner[0] - i))
								+ ((winner[1] - j) * (winner[1] - j)))),
						learnrate, neighbourhood);
			}
		}

	}

	/**
	 *    Metoda wyliczajaca poczatkowy blad sieci na warstwie 0
	 *  
	 */
	@Override
	public void calcLayer0MQE() 
	{
			double[] meanVec;
			meanVec = Globals.meanVector(dataItems, dataLength);
			getNeuronMap()[0][0] = new Neuron(meanVec, level, superPos[0],superPos[1]);
			MQE = 0;
			for (int i = 0; i < dataItems.size(); i++) 
			{
				MQE += Globals.calcQE(((dataItems.get(i))),	getNeuronMap()[0][0].weights);
			}
			getNeuronMap()[0][0].addMap(dataItems, MQE, 1, Globals.INITIAL_X_SIZE,Globals.INITIAL_Y_SIZE, 0, 0);
	}
	
	/**
	 *    Metoda wyliczajaca aktualny blad dla sieci
	 *  
	 */
	@Override
	public void calcMQE() 
	{
		MQE = 0;
		int c = 0;
		double maxMQE = 0;
		testDataItems();
		for (int i = 0; i < y; i++) {
			for (int j = 0; j < x; j++) 
			{
				getNeuronMap()[j][i].calcMQE();
				if (getNeuronMap()[j][i].MQE > 0) 
				{
					MQE += getNeuronMap()[j][i].MQE;
					c += 1;
				}
				double currMQE = getNeuronMap()[j][i].MQE;
				if (currMQE > maxMQE) {
					maxMQE = currMQE;
					MQENeuron[0] = j;
					MQENeuron[1] = i;
				}
			}
		}
		MQE = MQE / (double) c;
	}


	/**
	 *    Metoda pobierajaca pierwsza warstwe z sieci
	 *  
	 *  @return Pierwsza warstwa sieci
	 */
	@Override
	public NeuronLayer getLayer1Map() 
	{
		return getNeuronMap()[0][0].getMap();
	}

	/**
	 *    Metoda znajdujaca najbardziej niepodobnego sasiada 
	 * 
	 *  @param n - aktualny neuron
	 * 
	 *  @return Najbardziej niepodobny sasiad do danego neuronu
	 */
	@Override
	public int[] getMaxDissNeighbour(int[] n) 
	{
		int[] res = new int[2];
		double diss = 0;
		if (n[1] > 0) { // upper
			double currDiss = Globals.calcQE(getNeuronMap()[n[0]][n[1]].weights,
					getNeuronMap()[n[0]][n[1] - 1].weights);
			if (currDiss >= diss) {
				diss = currDiss;
				res[0] = n[0];
				res[1] = n[1] - 1;
			}
		}
		if (n[1] < y - 1) { // lower
			double currDiss = Globals.calcQE(getNeuronMap()[n[0]][n[1]].weights,
					getNeuronMap()[n[0]][n[1] + 1].weights);
			if (currDiss >= diss) {
				diss = currDiss;
				res[0] = n[0];
				res[1] = n[1] + 1;
			}
		}
		if (n[0] > 0) { // left
			double currDiss = Globals.calcQE(getNeuronMap()[n[0]][n[1]].weights,
					getNeuronMap()[n[0] - 1][n[1]].weights);
			if (currDiss >= diss) {
				diss = currDiss;
				res[0] = n[0] - 1;
				res[1] = n[1];
			}
		}
		if (n[0] < x - 1) { // right
			double currDiss = Globals.calcQE(getNeuronMap()[n[0]][n[1]].weights,
					getNeuronMap()[n[0] + 1][n[1]].weights);
			if (currDiss >= diss) {
				diss = currDiss;
				res[0] = n[0] + 1;
				res[1] = n[1];
			}
		}
		return res;
	}

	/**
	 *    Metoda obliczajaca nowe wagi
	 * 
	 *  @param xPos
	 *  @param yPos
	 *  @param UL
	 *  @param UR
	 *  @param LL
	 *  @param LR
	 * 
	 */
	@Override
	public void getNewWeights(int xPos, int yPos, double[] UL, double[] UR,
			double[] LL, double[] LR) 
	{
		double[] twgt = getNeuronMap()[xPos][yPos].weights;
		double[] nUL = null;
		double[] nU = null;
		double[] nL = null;
		double[] nBL = null;
		double[] nB = null;
		double[] nBR = null;
		double[] nUR = null;
		double[] nR = null;
		double[] dUL = new double[Globals.vectorlength];
		double[] dUR = new double[Globals.vectorlength];
		double[] dLL = new double[Globals.vectorlength];
		double[] dLR = new double[Globals.vectorlength];

		if ((xPos > 0) && (yPos > 0)) {
			nUL = getNeuronMap()[xPos - 1][yPos - 1].weights;
		}
		if (yPos > 0) {
			nU = getNeuronMap()[xPos][yPos - 1].weights;
		}
		if (xPos > 0) {
			nL = getNeuronMap()[xPos - 1][yPos].weights;
		}
		if ((xPos > 0) && (yPos < y - 1)) {
			nBL = getNeuronMap()[xPos - 1][yPos + 1].weights;
		}
		if (yPos < y - 1) {
			nB = getNeuronMap()[xPos][yPos + 1].weights;
		}
		if ((xPos < x - 1) && (yPos < y - 1)) {
			nBR = getNeuronMap()[xPos + 1][yPos + 1].weights;
		}
		if ((xPos < x - 1) && (yPos > 0)) {
			nUR = getNeuronMap()[xPos + 1][yPos - 1].weights;
		}
		if (xPos < x - 1) {
			nR = getNeuronMap()[xPos + 1][yPos].weights;
		}

		if ((xPos == 0) && (yPos == 0)) { // UpperL
			dUL = twgt;
			dUR = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nR, twgt)));
			dLL = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nB, twgt)));
			dLR = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nR, twgt), Globals.vectorDiff(nBR, twgt),
							Globals.vectorDiff(nB, twgt)));
		} else if ((xPos == x - 1) && (yPos == 0)) { // UpperR
			dUL = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nL, twgt)));
			dUR = twgt;
			dLL = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nL, twgt), Globals.vectorDiff(nBL, twgt),
							Globals.vectorDiff(nB, twgt)));
			dLR = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nB, twgt)));
		} else if ((xPos == 0) && (yPos == y - 1)) { // LowerL
			dUL = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nU, twgt)));
			dUR = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nU, twgt), Globals.vectorDiff(nUR, twgt),
							Globals.vectorDiff(nR, twgt)));
			dLL = twgt;
			dLR = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nR, twgt)));
		} else if ((xPos == x - 1) && (yPos == y - 1)) { // LowerR
			dUL = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nUL, twgt), Globals.vectorDiff(nU, twgt),
							Globals.vectorDiff(nL, twgt)));
			dUR = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nU, twgt)));
			dLL = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nL, twgt)));
			dLR = twgt;
		} else if ((yPos == 0) && (xPos > 0) && (xPos < x - 1)) { // UpperB
			dUL = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nL, twgt)));
			dUR = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nR, twgt)));
			dLL = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nL, twgt), Globals.vectorDiff(nBL, twgt),
							Globals.vectorDiff(nB, twgt)));
			dLR = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nR, twgt), Globals.vectorDiff(nBR, twgt),
							Globals.vectorDiff(nB, twgt)));
		} else if ((xPos == x - 1) && (yPos > 0) && (yPos < y - 1)) { // RightB
			dUL = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nL, twgt), Globals.vectorDiff(nUL, twgt),
							Globals.vectorDiff(nU, twgt)));
			dUR = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nU, twgt)));
			dLL = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nL, twgt), Globals.vectorDiff(nBL, twgt),
							Globals.vectorDiff(nB, twgt)));
			dLR = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nL, twgt)));
		} else if ((yPos == y - 1) && (xPos > 0) && (xPos < x - 1)) { // LowerB
			dUL = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nL, twgt), Globals.vectorDiff(nUL, twgt),
							Globals.vectorDiff(nU, twgt)));
			dUR = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nR, twgt), Globals.vectorDiff(nUR, twgt),
							Globals.vectorDiff(nU, twgt)));
			dLL = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nL, twgt)));
			dLR = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nR, twgt)));
		} else if ((xPos == 0) && (yPos > 0) && (yPos < y - 1)) { // LeftB
			dUL = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nU, twgt)));
			dUR = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nR, twgt), Globals.vectorDiff(nUR, twgt),
							Globals.vectorDiff(nU, twgt)));
			dLL = Globals.vectorAdd(twgt, (Globals.vectorDiffMean(nB, twgt)));
			dLR = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nR, twgt), Globals.vectorDiff(nBR, twgt),
							Globals.vectorDiff(nB, twgt)));
		} else if ((xPos > 0) && (xPos < x - 1) && (yPos > 0) && (yPos < y - 1)) { // M
			dUL = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nL, twgt), Globals.vectorDiff(nUL, twgt),
							Globals.vectorDiff(nU, twgt)));
			dUR = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nR, twgt), Globals.vectorDiff(nBR, twgt),
							Globals.vectorDiff(nB, twgt)));
			dLL = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nL, twgt), Globals.vectorDiff(nBL, twgt),
							Globals.vectorDiff(nB, twgt)));
			dLR = Globals.vectorAdd(
					twgt,
					Globals.vectorAdd3Mean(Globals.vectorDiff(nR, twgt), Globals.vectorDiff(nBR, twgt),
							Globals.vectorDiff(nB, twgt)));
		}

		for (int k = 0; k < Globals.vectorlength; k++) 
		{
			UL[k] = dUL[k];
			UR[k] = dUR[k];
			LL[k] = dLL[k];
			LR[k] = dLR[k];
		}
	}

	/**
	 *    
	 * 
	 *  @param pos
	 * 
	 */
	@Override
	public void insertColumn(int pos) {
		x++;
		Neuron[][] newMap;
		newMap = new Neuron[x][];

		for (int i = 0; i < x; i++) {
			newMap[i] = new Neuron[y];
		}

		for (int i = 0; i < y; i++) {
			for (int j = 0; j < x; j++) {
				if (j < pos) {
					newMap[j][i] = getNeuronMap()[j][i];
				} else if (j == pos) {
					// insert new neuron with mean weights left/right
					newMap[j][i] = new Neuron(Globals.meanWeights(
							getNeuronMap()[j - 1][i].weights,
							getNeuronMap()[j][i].weights), level, superPos[0],
							superPos[1]);
				} else if (j > pos) {
					newMap[j][i] = getNeuronMap()[j - 1][i];
				}
			}
		}
		setNeuronMap(newMap);
	}

	/**
	 *    Metoda wstawiajaca nowy wiersz do aktualnej sieci
	 * 
	 *  @param pos - Pozycja na warstwie do ktorej ma byc wstawiony wiersz
	 * 
	 */
	@Override
	public void insertRow(int pos) {
		y++;
		Neuron[][] newMap;
		newMap = new Neuron[x][];

		for (int i = 0; i < x; i++) {
			newMap[i] = new Neuron[y];
		}

		for (int i = 0; i < y; i++) {
			for (int j = 0; j < x; j++) {
				if (i < pos) {
					newMap[j][i] = getNeuronMap()[j][i];
				} else if (i == pos) {
					// insert new neuron with mean weights upper/lower
					newMap[j][i] = new Neuron(Globals.meanWeights(
							getNeuronMap()[j][i - 1].weights,
							getNeuronMap()[j][i].weights), level, superPos[0],
							superPos[1]);
				} else if (i > pos) {
					newMap[j][i] = getNeuronMap()[j][i - 1];
				}
			}
		}
		setNeuronMap(newMap);
	}

	/**
	 *    Metoda testujaca wartosci dla danego zbioru danych w sieci
	 *  
	 */
	@Override
	public void testDataItems() {
		int[] winner = new int[2];
		for (int d = 0; d < dataItems.size(); d++) 
		{
			double winnerDist = Double.MAX_VALUE;
			for (int i = 0; i < y; i++) {
				for (int j = 0; j < x; j++) {
					double currDist = getNeuronMap()[j][i].calcDist(dataItems
							.get(d));
					if (currDist < winnerDist) {
						winnerDist = currDist;
						winner[0] = j;
						winner[1] = i;
					}
				}
			}
			getNeuronMap()[winner[0]][winner[1]].addRepresentingDataItem(dataItems.get(d));
		}
	}

	/**
	 *    Metoda uczaca siec
	 * 
	 */
	@Override
	public void train() 
	{
		  stupidity = Globals.TAU_1;
		  ini_learnrate = Globals.INITIAL_LEARNRATE;
		  learnrate = Globals.INITIAL_LEARNRATE;
		  min_learnrate = Globals.MIN_LEARNRATE;
		  min_neighbourhood = Globals.MIN_NEIGHBOURHOOD;
		  currentCycle = 0;

		  ini_neighbourhood = Math.sqrt(((double)((x>y)?x:y))/(2.0*Math.sqrt(-1.0*Math.log(Globals.NR))));
		  neighbourhood = ini_neighbourhood;
		  boolean run = true;
		  
		  while(run) 
		  {
		    currentCycle += 1;
		    // get next pattern
		    double[] currentDataItem = dataItems.get(rnd.nextInt(dataItems.size()));

		    // calculate activity and get winner
		    double winnerDist = Double.MAX_VALUE;
		    int[] winner = new int[2];
		    
		    for (int i=0; i<y; i++) 
		    {
		    	for (int j=0; j<x; j++) 
		    	{
		    		double currDist = getNeuronMap()[j][i].calcDist(currentDataItem);
		    		if (currDist < winnerDist) 
		    		{
		    			winnerDist = currDist;
		    			winner[0] = j;
		    			winner[1] = i;
		    		}
		    	}
		    }
		    // adapt weigths of winner and neighbours
		    adaptWeights(winner, currentDataItem);
		    
		    // decrease learnrate
		    learnrate = (ini_learnrate * Math.exp(-1.0*((double)((currentCycle) % (Globals.EXPAND_CYCLES * dataItems.size())))/STRETCH_PARAM_LEARN))+(double)0.0001;
		    // reduce neighbourhood
		    neighbourhood = (ini_neighbourhood * 
		    		Math.exp((double)-1.0*
		    				((double)((currentCycle) % ((double)Globals.EXPAND_CYCLES * dataItems.size())))
		    				/STRETCH_PARAM_NEIGHB))+(double)0.55;
		    
		    if (((currentCycle) % (Globals.EXPAND_CYCLES * dataItems.size())) == 0) 
		    {
		    		for (int i=0; i<y; i++) 
		    		{
		    			for (int j=0; j<x; j++) 
		    			{
		    				getNeuronMap()[j][i].clearRepresentingDataItems();
		    			}
		    		}
		    		calcMQE();
		    	
			if ((MQE <= (stupidity*superMQE)) || ((Globals.MAX_CYCLES > 0) && 
					(currentCycle >= (Globals.MAX_CYCLES * dataItems.size())))) 
			{
				run = false;
		    } 
			else 
			{
				// insert new row or column
				int[] dissNeighbour = getMaxDissNeighbour(MQENeuron);
			
				if (MQENeuron[0] > dissNeighbour[0]) 
				{ // left
					insertColumn(MQENeuron[0]);
				} 
				else if (MQENeuron[0] < dissNeighbour[0]) 
				{ // right
					insertColumn(dissNeighbour[0]);
				} 
				else if (MQENeuron[1] > dissNeighbour[1]) 
				{ // upper
					insertRow(MQENeuron[1]);
				} 
				else if (MQENeuron[1] < dissNeighbour[1]) 
				{ // lower
					insertRow(dissNeighbour[1]);
				}
				// set learnrate to initial value
				learnrate = ini_learnrate;
				// set neighbourhood
				ini_neighbourhood = Math.sqrt(((double)((x>y)?x:y))/((double)2.0*Math.sqrt((double)-1.0*Math.log(Globals.NR))));
				neighbourhood = ini_neighbourhood;
			}
		  }
		  
		  }

		  // TAU_2 is threshold for expansion
		  boolean r = true;
		  boolean expo = true;
		  
		  if (level > 1) 
		  {
		    int ii = 0;
		    while ((ii<y) && r) 
		    {
		      int jj = 0;
		      while ((jj<x) && r) 
		      {
		    	  if(superNeuron.representsMultiDataItems()==getNeuronMap()[jj][ii].representsMultiDataItems()) 
		    	  {
		    		  expo = false;
		    		  r = false;
		    	  } 
		    	  else if (superNeuron.representsMultiDataItems()>getNeuronMap()[jj][ii].representsMultiDataItems()) 
		    	  {
		    		  expo = true;
		    		  r = false;
		    	  }
		    	  jj++;
		      }
		      ii++;
		    }
		  }
		  
		  if (expo) 
		  {
		    for (int i=0; i<y; i++) {
		      for (int j=0; j<x; j++) 
		      {
		    	  if ((Globals.getFirstLayerMap().superMQE*Globals.TAU_2) < getNeuronMap()[j][i].MQE) 
		    	  {
		    		  if(Globals.ORIENTATION) 
		    		  {
		    			  double[] UL = new double [Globals.vectorlength];
		    			  double[] UR = new double [Globals.vectorlength];
		    			  double[] LL = new double [Globals.vectorlength];
		    			  double[] LR = new double [Globals.vectorlength];

		    			  getNewWeights(j,i,UL,UR,LL,LR);

		    			  getNeuronMap()[j][i].addMap(getNeuronMap()[j][i],getNeuronMap()[j][i].MQE,level+1,j,i,UL,UR,LL,LR);
		    		  } 
		    		  else 
		    		  {
		    			  getNeuronMap()[j][i].addMap(getNeuronMap()[j][i],getNeuronMap()[j][i].MQE,level+1,Globals.INITIAL_X_SIZE,Globals.INITIAL_Y_SIZE,j,i);
		    		  }
		    		  Globals.addLayer(level+1,getNeuronMap()[j][i].getMap());
		    	  }
		      }
		    }
		  }
	}
	
	/**
	 *	  Metoda testujaca siec (predykcja)  
	 *  
	 *  @param pattern - wzorzec wejsciowy
	 * 
	 *  @return - wynik predykcji
	 */
	public double[] test(double[] pattern)
	{
	    double winnerDist = Double.MAX_VALUE;
	    int[] winner = new int[2];
	    
	    for (int i=0; i<y; i++) 
	    {
	    	for (int j=0; j<x; j++) 
	    	{
	    		double currDis = alterCalc(getNeuronMap()[j][i].weights,pattern);
	    		if (currDis < winnerDist) 
	    		{
	    			winnerDist = currDis;
	    			winner[0] = j;
	    			winner[1] = i;
	    		}
	    	}
	    }
	    double[] result = new double[4];
	    result[0] = winnerDist;
	    result[1] = getNeuronMap()[winner[0]][winner[1]].maplevel;
	    result[2] = winner[0];
	    result[3] = winner[1];
	    return result;
	}
	
	/**
	 *	  Metoda obliczajaca podobienstwo czesci historycznych dla dwoch wektorow
	 *  
	 *  @param v1 - wektor 1
	 *  @param v2 - wektor 2
	 * 
	 *  @return Miara podobienstwa miedzy dwoma wektorami
	 */
	public double alterCalc(double[] v1, double[] v2)
	{
		  double res = 0;
		  for (int i=0;i<Globals.H;i++) 
		  {
			  res += ((v1[i]-v2[i]) * (v1[i]-v2[i]));
		  }
		  return res;
	}

	/**
	 *	  Metoda zwracajaca aktualna mape neuronow
	 * 
	 *  @return Aktualna mapa neuronow
	 */
	public Neuron[][] getNeuronMap() {
		return neuronMap;
	}

	/**
	 *	  Metoda ustawiajaca nowa mape neuronow
	 *  
	 *  @param neuronMap - nowa mapa neuronow
	 * 
	 */
	public void setNeuronMap(Neuron[][] neuronMap) {
		this.neuronMap = neuronMap;
	}
}
