package redesN;

import java.util.List;

import org.neuroph.core.Connection;
import org.neuroph.core.Layer;
import org.neuroph.core.NeuralNetwork;
import org.neuroph.core.Neuron;
import org.neuroph.core.Weight;

import org.neuroph.core.transfer.Sigmoid;

import org.neuroph.nnet.*;

public class redeMLP
{

	public static void criarede()
	{

		// anota pesos
		double bias[] = { 0.26473284, 0.22886133, 0.14611349, -0.06146123,
				1.72220170, 0.17908052, 1.42008960, 0.41444504, -0.48077717,
				-0.33544648, -0.16982262, -0.01660623, 0.13553423, 0.35274300,
				0.66665721, 1.05714178, 1.44783568, 1.53511238 };

		double[] pesosLayer21 = { -7.9522948, -0.6166771, -0.5030040,
				-5.0357342 };
		double[] pesosLayer22 = { -7.4003620, -0.4708695, -0.3811646,
				-4.6215096 };
		double[] pesosLayer23 = { -9.3123455, 8.1022892, -2.4317765, 0.5080084 };
		double[] pesosLayer24 = { -10.248049, 5.833312, 3.179726, -6.011730 };

		double[][] pesosLayer = { pesosLayer21, pesosLayer22, pesosLayer23,
				pesosLayer24 };
		// System.out.println(pesosLayer.length);

		double[] pesosLayer31 = { -1.3478824, -1.0627261, 0.2346033, -2.5867436 };
		double[] pesosLayer32 = { -1.19181430, -1.63708901, -0.04165911,
				-2.35845184 };
		double[] pesosLayer33 = { -1.343373, -1.686899, -0.388488, -1.915620 };
		double[] pesosLayer34 = { -1.6157781, -1.2952805, -0.6989595,
				-1.5438006 };
		double[] pesosLayer35 = { -1.4634124, -1.3294152, -0.9468921,
				-1.5849172 };
		double[] pesosLayer36 = { -1.136757, -1.287687, -1.277868, -1.823783 };
		double[] pesosLayer37 = { -0.8167267, -0.8794646, -1.7925934,
				-2.0847373 };
		double[] pesosLayer38 = { -0.4374857, -0.7360787, -2.4638116,
				-2.2077394 };
		double[] pesosLayer39 = { -0.2863807, -0.5892866, -3.1648684,
				-2.0464172 };
		double[] pesosLayer310 = { -0.1310653, -0.5850934, -3.2445083,
				-2.3374841 };

		double[][] pesosLayer2 = { pesosLayer31, pesosLayer32, pesosLayer33,
				pesosLayer34, pesosLayer35, pesosLayer36, pesosLayer37,
				pesosLayer38, pesosLayer39, pesosLayer310 };

		// cria topologia da rede
		NeuralNetwork neuralNetwork = new MultiLayerPerceptron(4, 4, 10);

		// salva valores dos vetores da rede

		// seleciona camada escondida
		Layer layer = neuralNetwork.getLayerAt(1);
		// pega neuronios
		List<Neuron> neuronLayer = layer.getNeurons();
		for (int n_neuron = 0; n_neuron < (neuronLayer.size() - 1); n_neuron++)
		{
			// seta função de ativação sigmoidal
			neuronLayer.get(n_neuron).setTransferFunction(new Sigmoid());
			// pega lista de conexões
			List<Connection> conec = neuronLayer.get(n_neuron).getInputConnections();
			// percorre conexões para setar pesos
			for (int n_conec = 0; n_conec < conec.size(); n_conec++)
			{
				if (n_conec == (conec.size() - 1))
				{
					// coloca valor de bias
					conec.get(n_conec).setWeight(
							new Weight(bias[(n_neuron + 4)]));
					// System.out.println(""+bias[(n_neuron+5)]);
				}
				else
				{
					// coloca outros pesos
					conec.get(n_conec).setWeight(
							new Weight(pesosLayer[n_neuron][n_conec]));
				}

			}
		}
		// TODO ¿Por que setar pesos de entrada e de saída?

		// seta valores dos pesos para SAIDA da camada escondida

		Layer layer2 = neuralNetwork.getLayerAt(1);
		List<Neuron> neuronLayer2 = layer2.getNeurons();
		// percorre os neuronios
		for (int n_neuron = 0; n_neuron < (neuronLayer2.size() - 1); n_neuron++)
		{
			List<Connection> conec = neuronLayer2.get(n_neuron).getOutConnections();
			// percorre as conexoes
			for (int n_conec = 0; n_conec < (conec.size()); n_conec++)
			{
				System.out.println("Connection size = " + conec.size());
				System.out.println("camada " + n_neuron);

				// coloca pesos
				conec.get(n_conec).setWeight(
						new Weight(pesosLayer2[n_conec][n_neuron]));

			}
		}

		// seta bias da camada de saida
		int n_bias = neuronLayer2.size() - 1;
		List<Connection> conec = neuronLayer2.get(n_bias).getOutConnections();
		for (int n_conec = 0; n_conec < (conec.size()); n_conec++)
		{
			conec.get(n_conec).setWeight(new Weight(bias[(n_conec + 8)]));
		}

		// seta a funcao de ativacao do neuronio para sigmoide
		Layer layer3 = neuralNetwork.getLayerAt(2);
		List<Neuron> neuronLayer3 = layer3.getNeurons();
		for (int n_neuron = 0; n_neuron < (neuronLayer3.size()); n_neuron++)
		{
			neuronLayer3.get(n_neuron).setTransferFunction(new Sigmoid());// Linear
		}

		// TODO ¿é necessário criar a rede neural toda vez, ou deve-se criar uma vez e depois abrir o arquivo?
		neuralNetwork.save("AimoresMLP.nnet");

		// List<Neuron> neuronLayer2 = layer2.getNeurons();
		// System.out.println((neuronLayer2.size()));
		// List<Connection> conec = neuronLayer2.get(0).getInputConnections();
		// System.out.println(conec.size());
		// System.out.println(conec.get(0).getWeight());
		// conec.get(0).setWeight(new Weight(-1));
		// System.out.println(conec.get(0).getWeight());

		// System.out.println(neuralNetwork.getLayerAt(1).getNeurons().get(0).getInputConnections().get(0).getWeight());
		// List<Neuron> tatu = neuralNetwork.getInputNeurons();
		// System.out.println((tatu.size()));
		// List<Connection> conec= tatu.get(0).getOutConnections();
		// System.out.println(conec.size());
		// System.out.println(conec.get(1).getWeight());
		// get network output
		// double[] networkOutput = neuralNetwork.getOutput();
		// System.out.println("" +networkOutput[0]);

	}
}
