/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package main;

/**
 *
 * @author ZSQ
 */
/**
 * 神经网络核心类
 * 由架构师实现骨架，其他人协作填充
 */

import java.util.ArrayList;
import java.util.List;

/**
 * 神经网络核心类
 */
public class NeuralNetwork {
    private Layer[] layers;
    private LossFunction lossFunction;
    
    public NeuralNetwork(int[] layerSizes, Object activation, LossFunction lossFunction) {
        this.lossFunction = lossFunction;
        layers = new Layer[layerSizes.length - 1];
        
        if (activation instanceof ActivationFunction) {
            ActivationFunction actFunc = (ActivationFunction) activation;
            for (int i = 0; i < layers.length; i++) {
                layers[i] = new Layer(layerSizes[i + 1], layerSizes[i], actFunc);
            }
        } else if (activation instanceof ActivationFunction[]) {
            ActivationFunction[] actFuncs = (ActivationFunction[]) activation;
            if (actFuncs.length != layers.length) {
                throw new IllegalArgumentException("激活函数数量必须等于网络层数");
            }
            for (int i = 0; i < layers.length; i++) {
                layers[i] = new Layer(layerSizes[i + 1], layerSizes[i], actFuncs[i]);
            }
        } else {
            throw new IllegalArgumentException("激活函数参数类型不正确");
        }
    }
    
    public double[] predict(double[] inputs) {
        double[] currentOutput = inputs;
        for (Layer layer : layers) {
            currentOutput = layer.calculateOutputs(currentOutput);
        }
        return currentOutput;
    }
    
    public double calculateLoss(double[][] trainingData, double[][] targets) {
        double totalLoss = 0.0;
        for (int i = 0; i < trainingData.length; i++) {
            double[] prediction = predict(trainingData[i]);
            totalLoss += lossFunction.calculate(prediction, targets[i]);
        }
        return totalLoss / trainingData.length;
    }
    
    public Layer[] getLayers() { return layers; }
    public LossFunction getLossFunction() { return lossFunction; }
    
    /**
     * 新增：获取所有权重和偏置的字符串表示
     */
    public String getWeightsString() {
        StringBuilder sb = new StringBuilder();
        sb.append("神经网络权重参数:\n");
        
        for (int i = 0; i < layers.length; i++) {
            sb.append("第").append(i + 1).append("层:\n");
            Neuron[] neurons = layers[i].getNeurons();
            
            for (int j = 0; j < neurons.length; j++) {
                sb.append("  神经元").append(j + 1).append(": 权重[");
                double[] weights = neurons[j].getWeights();
                for (int k = 0; k < weights.length; k++) {
                    sb.append(String.format("%.6f", weights[k]));
                    if (k < weights.length - 1) sb.append(", ");
                }
                sb.append("], 偏置: ").append(String.format("%.6f", neurons[j].getBias())).append("\n");
            }
        }
        
        return sb.toString();
    }
    
    /**
     * 新增：获取权重参数的扁平化数组（用于比较）
     */
    public double[] getFlattenedWeights() {
        List<Double> allWeights = new ArrayList<>();
        
        for (Layer layer : layers) {
            for (Neuron neuron : layer.getNeurons()) {
                for (double weight : neuron.getWeights()) {
                    allWeights.add(weight);
                }
                allWeights.add(neuron.getBias());
            }
        }
        
        double[] result = new double[allWeights.size()];
        for (int i = 0; i < allWeights.size(); i++) {
            result[i] = allWeights.get(i);
        }
        return result;
    }
    
    public void printNetworkInfo() {
        System.out.println("神经网络结构 (2-4-4-1):");
        for (int i = 0; i < layers.length; i++) {
            System.out.printf("第%d层: %d个神经元\n", i + 1, layers[i].getNeuronCount());
        }
    }
}