package com.cloudea.learn;

import com.cloudea.learn.component.Component;
import com.cloudea.learn.layer.Layer;
import com.cloudea.learn.metric.Metric;
import com.cloudea.learn.optimizer.Optimizer;
import com.cloudea.learn.optimizer.OptimizerBuilder;

import java.io.InputStream;
import java.io.OutputStream;
import java.util.*;
import java.util.concurrent.*;

public class Model {
    private final Component component;
    private final Random random;
    private final Map<Tensor, Optimizer> optimizers;

    public Model(Component component){
        this.component = component;
        this.random = new Random(new Date().getTime());
        this.optimizers = new HashMap<>();
    }

    /**
     * 训练模型
     * @param X  一批x数据
     * @param Y  一批y数据
     * @param lossFunction 损失函数
     * @param optimizerBuilder     优化器
     */
    public void train(List<Tensor[]> X, List<Tensor> Y, Metric lossFunction, OptimizerBuilder optimizerBuilder){

        //创建与核心数相同的线程个数
        final ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
        //前向传播
        List<Tensor> outputs = new LinkedList<>();
        for (Tensor[] inputs : X){
            outputs.add(component.forward(inputs));
        }

//        List<Future<Tensor>> futures = new LinkedList<>();
//        {
//            Callable<Tensor> callable = () -> ;
//            Future<Tensor> submit = executorService.submit(callable);
//            futures.add(submit);
//        }
//        for (Future<Tensor> future : futures){
//            try {
//                outputs.add(future.get());
//            } catch (InterruptedException | ExecutionException e) {
//                e.printStackTrace();
//            }
//        }

        //executorService.shutdown();
        //计算损失函数
        Tensor loss = lossFunction.measure(Y, outputs);
        System.out.println(String.format("loss: %s", loss.get(0)));
        //反向传播算法(图的编历)
        Layer.Node root = (Layer.Node) loss.getAttribute("node");
        Tensor rootGrid = new Tensor(loss.getShape());
        rootGrid.fill(1);
        root.grids.add(rootGrid);
        Queue<Layer.Node> queue = new PriorityQueue<>();
        queue.add(root);
        while (!queue.isEmpty()){
            Layer.Node node = queue.poll();
            if(node.next.size() != 0 || node.grids.size() == 0){
                throw new RuntimeException("计算图出错");
            }
            //把所有梯度加起来
            Tensor gridAll = new Tensor(node.grids.get(0).getShape());
            for (int i = 0; i < gridAll.getSize(); i++){
                double sum = 0;
                for (Tensor grid : node.grids){
                    sum += grid.getByLocation(i);
                }
                gridAll.setByLocation(i, sum);
            }
            //如果是参数，则梯度下降
            if (node.layer == null){
                Tensor parameter = node.output;
                Optimizer optimizer = optimizers.get(parameter);
                if(optimizer == null){
                    optimizer = optimizerBuilder.build();
                    optimizers.put(parameter, optimizer);
                }
                optimizer.optimize(parameter, gridAll);
            }
            // 计算得到目标函数对算子输入变量X的梯度
            else{
                Tensor[] gridX = node.layer.backward(node.context, gridAll, node.inputs);
                for (int i = 0; i < node.prev.size(); i++){
                    Layer.Node prev = node.prev.get(i);
                    if(prev != null && prev.layer == null){
                        var j = 1 + 1;
                    }
                    if (prev != null && prev.next.size() > 0){
                        //把梯度传播到上一个结点
                        prev.grids.add(gridX[i]);
                        //完成传播并把引用减1
                        prev.next.remove(0);
                        if(!queue.contains(prev)){
                            queue.add(prev);
                        }
                    }
                }
            }

        }

    }

    /**
     * 测试性能
     * @param X  一批x数据
     * @param Y  一批y数据
     * @param metrics 一批度量指标
     * @return 指标的值
     */
    public List<Double> Test(List<Tensor[]> X, List<Tensor> Y, List<Metric> metrics){
        //TODO: 实现测试代码
        return null;
    }

    /**
     * 做预测
     * @param inputs 输入
     * @return 输出
     */
    public Tensor predict(Tensor[] inputs){
        return component.forward(inputs);
    }

    /**
     * 保存模拟
     * @param outputStream 输出流
     */
    public void save(OutputStream outputStream){

    }

    /**
     * 加载模型
     * @param inputStream 输入流
     */
    public void load(InputStream inputStream){

    }
}
