package pl.edu.agh.student.nn.main;

//import pl.edu.agh.student.nn.chart.DividerLine;

import pl.edu.agh.student.nn.core.*;
import pl.edu.agh.student.nn.core.kohonen.KohonenLayer;
import pl.edu.agh.student.nn.loader.NetworkLoader;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;

/**
 * Klasa do wykonywania obliczen na sieci neuronowej. Umozliwia:
 * - podanie pliku z opisem sieci;
 * - podawanie roznych wartosci na wejscie wczytanej sieci.
 */
public class Application {

    public static String BASE_DIR = "src/main/resources/";
    public static String INPUT_FILE = "input.txt";
//    public static String INPUT_FILE = "before-learning-linear-AND.txt";
//    public static String INPUT_FILE = "after-learning-linear-AND.txt";
//    public static String INPUT_FILE = "before-learning-threshold-AND.txt";
//    public static String INPUT_FILE = "after-learning-threshold-AND.txt";
//    public static String INPUT_FILE = "after-learning-threshold-AND-additional-point.txt";
//    public static String INPUT_FILE = "before-learning-sigmoidal-AND.txt";
//    public static String INPUT_FILE = "after-learning-sigmoidal-AND.txt";
//    public static String INPUT_FILE = "after-learning-sigmoidal-XOR.txt";

    public static void main(String[] args) throws Exception {
        NeuralNetwork nn = null;
//        NeuralNetwork nn = NetworkLoader.loadFromFile(BASE_DIR + INPUT_FILE);

//        System.out.println("Loaded neural network:");
//        nn.display();
//
//        nn.calculate();
//
//        System.out.println("Output:");
//        nn.showOutput();
        String fileName = "";
        BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
        while (true) {
            System.out.print("Podaj:\n"
                    + "* quit\n"
                    + "* p <nazwa_pliku>\n"
                    + "* t (wynik dla zbiorow uczacych)\n"
                    + "* l <liczba iteracji> - learn\n"
                    + "* b <liczba iteracji> - backpropagation\n"
                    + "* d <liczba wag w wierszu> - display in MxX form\n"
                    + "* f <iteracje> <learning rate step> <momentum step> - find params\n"
                    + "* s <"
                    + (nn == null ? "-" : nn.getLayers().get(0).getNeurons().size())
                    + " liczby oddzielone spacja>\n"
                    + "> ");

            String in = input.readLine();
            if (in.trim().startsWith("q")) {
                break;
            }
            String inString[] = in.split(" ");
            if (in.trim().startsWith("p")) {
                fileName = BASE_DIR + inString[1];
                nn = NetworkLoader.loadFromFile(fileName);
                nn.display();
                nn.calculate();

                System.out.println("Output:");
                nn.showOutput();
            } else if (in.trim().startsWith("s")) {
                double inDouble[] = new double[inString.length - 1];
                for (int k = 1; k < inString.length; k++) {
                    inDouble[k - 1] = Double.parseDouble(inString[k]);
                }

                nn.setInput(inDouble);
                nn.calculate();
                System.out.println("Output:");
                nn.showOutput();
            } else if (in.trim().startsWith("l")) {
                for (int i = 0; i < Integer.parseInt(inString[1]); i++) {
                    nn.learn();
                }
                System.out.printf("Wagi po nauczeniu (%d):\n", nn.getCurrentIteration());
                if (nn.getLayers().get(nn.getLayers().size() - 1).getType() == LayerType.KOH) {
                    ((KohonenLayer) nn.getLayers().get(nn.getLayers().size() - 1)).display3x3();
                } else {
                    nn.showOutput();
                    ((KohonenLayer) nn.getLayers().get(nn.getLayers().size() - 2)).display3x3();
                }
            } else if (in.trim().startsWith("b")) {
                for (int i = 0; i < Integer.parseInt(inString[1]); i++) {
                    int displayRmsStep = 0;
                    if (inString.length > 2)
                        displayRmsStep = Integer.parseInt(inString[2]);
                    nn.backPropagation(displayRmsStep);
                }
                System.out.printf("Wagi po nauczeniu (%d):\n", nn.getCurrentIteration());
                nn.display();

            } else if (in.trim().startsWith("t")) {
                System.out.printf("Wagi po nauczeniu (%d):\n", nn.getCurrentIteration());
                for (TrainingSet trainingSet : nn.getLerningData()) {
                    nn.setInput(trainingSet.getElements());
                    nn.calculate();
                    nn.showOutput();
                    System.out.println("^^^^^^^^^^^^");
                }
            } else if (in.trim().startsWith("d")) {
                System.out.printf("Wagi polaczen (%d):\n", nn.getCurrentIteration());
                nn.displayMxX(Integer.parseInt(inString[1]));
            } else if (in.trim().startsWith("f")) {
                int iterations = Integer.parseInt(inString[1]);
                double learningRateStep = Double.parseDouble(inString[2]);
                double learningRate = 0.0;
                double momentumStep = Double.parseDouble(inString[3]);
                double momentum;
                List<BpResult> bpResultList = new ArrayList<BpResult>();
                while (learningRate <= 1.0) {
                    momentum = 0.0;
                    while (momentum <= 1.0) {
                        nn = NetworkLoader.loadFromFile(fileName);
                        for (int layerIndex = 1; layerIndex < nn.getLayers().size(); layerIndex++) {
                            Layer layerToChange = nn.getLayers().get(layerIndex);
                            layerToChange.getDoubleParameters().put(
                                    LayerParameter.ALPHA,
                                    Arrays.asList(learningRate, 0.0)
                            );
                            layerToChange.getDoubleParameters().put(
                                    LayerParameter.MOMENTUM,
                                    Arrays.asList(momentum, 0.0)
                            );
                        }
                        for (int i = 0; i < iterations; i++) {
                            nn.backPropagation(0);
                        }
                        double rmsError = 0.0;
                        for (TrainingSet trainingSet : nn.getLerningData()) {
                            nn.setInput(trainingSet.getElements());
                            rmsError += nn.calculateRmsError(trainingSet);
                        }
                        rmsError /= nn.getLerningData().size();
                        String message = String.format("RMS[momentum=%.4f][learningRate=%.4f] = %.6f",
                                momentum, learningRate, rmsError);
                        System.out.println(message);

                        BpResult bpResult = new BpResult();
                        bpResult.description = message;
                        bpResult.rmsError = rmsError;
                        bpResultList.add(bpResult);
                        momentum += momentumStep;
                    }
                    learningRate += learningRateStep;
                }
                Collections.sort(bpResultList);
                System.out.println("----\nSorted:");
                for (BpResult bpResult : bpResultList) {
                    System.out.println(bpResult.description);
                }
            } else {
                continue;
            }
        }
        input.close();
    }
}

class BpResult implements Comparable<BpResult> {
    public String description;
    public double rmsError;

    public int compareTo(BpResult o) {
        return Double.compare(rmsError, o.rmsError);
    }
}
