/*
卷积神经网络主类实现
实现完整的CNN训练和推理流程
支持多层网络架构和参数管理
*/
#include "ConvNeuralNetwork.h"
#include "LayerManager.h"
#include "FullConnectedLayer.h"
#include "ConvLayer.h"
#include "PoolingLayer.h"
#include "ActivationFunction.h"
#include "LossFunction.h"
#include <fstream>
#include <iostream>
#include <sstream>
#include <stdexcept>
// #include <Python.h>

using namespace std;

ConvNeuralNetwork::ConvNeuralNetwork(const string& configFile) {
    init(configFile);
}
ConvNeuralNetwork::~ConvNeuralNetwork() {
}
void ConvNeuralNetwork::init(const string& configFile) {
    json jsonConfig;
    readJson(configFile, jsonConfig);
    Tensor<double, 3> inputFeatureMap(jsonConfig["model"]["input_shape"][0], jsonConfig["model"]["input_shape"][1], jsonConfig["model"]["input_shape"][2]);
    inputFeatureMap.setRandom();

    auto lossFunctionName = jsonConfig["traning"]["loss_function"];
    
    // Create layers, cnn structure：INPUT -> [[CONV]*N -> POOLING?]*M -> [FC]*K -> OUTPUT
    int layerIndex = 0;
    for (const auto& layer : jsonConfig["model"]["layers"]) {
        string type = layer["type"];
        if (layerIndex == 0) {
            if (type == "CONV") {
                auto convLayer = make_shared<ConvLayer>(
                    layerIndex,
                    LayerType::CONV,
                    inputFeatureMap.dimension(0),
                    inputFeatureMap.dimension(1),
                    inputFeatureMap.dimension(2),
                    layer["num_filters"],
                    layer["kernel_size"][0],
                    layer["kernel_size"][1],
                    layer["strides"][0],
                    layer["strides"][1],
                    layer["padding"],
                    layer["activation"],
                    lossFunctionName
                );
                convLayer->setInputFeatureMap(inputFeatureMap);
                LayerManager::getInstance().addLayer(convLayer);
            }
            else {
                throw invalid_argument("Invalid layer type of layer, " + to_string(layerIndex) + ", type: " + type);
            }
        }
        else {
            int prevLayerOutputWidth = 0;
            int prevLayerOutputHeight = 0;
            int prevLayerOutputChannels = 0;
            int prevLayerOutputSize = 0;
            const shared_ptr<Layer>& prevLayer = LayerManager::getInstance().getLayerByIndex(layerIndex - 1);
            if (prevLayer->getLayerType() == LayerType::CONV) {
                prevLayerOutputWidth = dynamic_pointer_cast<ConvLayer>(prevLayer)->getOutputWidth();
                prevLayerOutputHeight = dynamic_pointer_cast<ConvLayer>(prevLayer)->getOutputHeight();
                prevLayerOutputChannels = dynamic_pointer_cast<ConvLayer>(prevLayer)->getOutputChannels();
                prevLayerOutputSize = prevLayerOutputWidth * prevLayerOutputHeight * prevLayerOutputChannels;
            }
            else if (prevLayer->getLayerType() == LayerType::POOLING) {
                prevLayerOutputWidth = dynamic_pointer_cast<PoolingLayer>(prevLayer)->getOutputWidth();
                prevLayerOutputHeight = dynamic_pointer_cast<PoolingLayer>(prevLayer)->getOutputHeight();
                prevLayerOutputChannels = dynamic_pointer_cast<PoolingLayer>(prevLayer)->getOutputChannels();
                prevLayerOutputSize = prevLayerOutputWidth * prevLayerOutputHeight * prevLayerOutputChannels;
            }
            else if (prevLayer->getLayerType() == LayerType::FC) {
                prevLayerOutputSize = dynamic_pointer_cast<FullConnectedLayer>(prevLayer)->getOutputSize();
            }
            else {
                throw invalid_argument("Invalid layer type of layer, " + to_string(layerIndex) + ", type: " + type);
            }
            if (type == "CONV") {
                auto convLayer = make_shared<ConvLayer>(
                    layerIndex,
                    LayerType::CONV,
                    prevLayerOutputWidth,
                    prevLayerOutputHeight,
                    prevLayerOutputChannels,
                    layer["num_filters"],
                    layer["kernel_size"][0],
                    layer["kernel_size"][1],
                    layer["strides"][0],
                    layer["strides"][1],
                    layer["padding"],
                    layer["activation"],
                    lossFunctionName
                );
                LayerManager::getInstance().addLayer(convLayer);
            }
            if (type == "POOLING") {
                PoolingType poolingType = PoolingType::MAX_POOLING;
                if (layer["pooling_type"] == "MAX_POOLING") {
                    poolingType = PoolingType::MAX_POOLING;
                } else if (layer["pooling_type"] == "MIN_POOLING") {
                    poolingType = PoolingType::MIN_POOLING;
                } else if (layer["pooling_type"] == "AVG_POOLING") {
                    poolingType = PoolingType::AVG_POOLING;
                } else {
                    throw invalid_argument("Invalid pooling type of layer, " + to_string(layerIndex) + ", pooling_type: " + to_string(layer["pooling_type"]));
                }
                auto poolingLayer = make_shared<PoolingLayer>(
                    layerIndex,
                    LayerType::POOLING,
                    prevLayerOutputWidth,
                    prevLayerOutputHeight,
                    prevLayerOutputChannels,
                    poolingType,
                    layer["pooling_size"][0],
                    layer["pooling_size"][1],
                    0
                );
                LayerManager::getInstance().addLayer(poolingLayer);
            }
            if (type == "FC") {
                auto fullConnectedLayer = make_shared<FullConnectedLayer>(
                    layerIndex,
                    LayerType::FC,
                    prevLayerOutputSize,
                    layer["units"]
                );
                LayerManager::getInstance().addLayer(fullConnectedLayer);
            }
        }
        ++layerIndex;
    }
    // Connect layers
    LayerManager::getInstance().connectLayers();
}
void ConvNeuralNetwork::train(const Tensor<double, 3>& input, const Tensor<double, 1>& target, const int batchSize, const int epochs, const double learningRate, const double momentum) {
    if (input.getSize() != target.getSize()) {
        throw invalid_argument("Input and target must have the same size");
    }
    for (int i = 0; i < epochs; ++i) {
        for (int j = 0; j < input.dimension(0); ++j) {
            forward(input.getData()[j]);
            backward(target.getData()[j]);
            if (j % batchSize == 0) {
                update(learningRate, momentum);
            }
        }
    }
}
void ConvNeuralNetwork::predict(const Tensor<double, 3>& input, Tensor<double, 1>& output) {
    forward(input);
    auto outputLayer = LayerManager::getInstance().getLayerByIndex(LayerManager::getInstance().getNumLayers() - 1);
    if (outputLayer->getLayerType() != LayerType::FC) {
        throw invalid_argument("Prev layer is not output layer");
    }
    output = dynamic_pointer_cast<FullConnectedLayer>(outputLayer)->getOutputFeatureMap();
}
void ConvNeuralNetwork::forward(const Tensor<double, 3>& input) {
    int numLayers = LayerManager::getInstance().getNumLayers();
    if (numLayers == 0) {
        throw runtime_error("No layers in the network");
    }
    for (int i = 0; i < numLayers; ++i) {
        auto layer = LayerManager::getInstance().getLayerByIndex(i);
        auto prevLayer = layer->getPrevLayer();
        if (layer == nullptr) {
            throw invalid_argument("Layer " + to_string(i) + " is null");
        }
        if (i == 0) {
            dynamic_pointer_cast<ConvLayer>(layer)->forward(input);
        }
        else {
            if (prevLayer == nullptr) {
                throw invalid_argument("Prev layer of layer " + to_string(i) + " is null");
            }
            if (layer->getLayerType() == LayerType::CONV) {
                if (prevLayer->getLayerType() == LayerType::CONV) {
                    dynamic_pointer_cast<ConvLayer>(layer)->forward(dynamic_pointer_cast<ConvLayer>(prevLayer)->getOutputFeatureMap());
                } else if (prevLayer->getLayerType() == LayerType::POOLING) {
                    dynamic_pointer_cast<ConvLayer>(layer)->forward(dynamic_pointer_cast<PoolingLayer>(prevLayer)->getOutputFeatureMap());
                } else {
                    throw invalid_argument("Invalid layer type of the prev layer of layer, " + to_string(i) + ", type: " + to_string(prevLayer->getLayerType()));
                }
            }
            else if (layer->getLayerType() == LayerType::POOLING) {
                if (prevLayer->getLayerType() == LayerType::CONV) {
                    dynamic_pointer_cast<PoolingLayer>(layer)->forward(dynamic_pointer_cast<ConvLayer>(prevLayer)->getOutputFeatureMap());
                } else {
                    throw invalid_argument("Invalid layer type of the prev layer of layer, " + to_string(i) + ", type: " + to_string(prevLayer->getLayerType()));
                }
            }
            else if (layer->getLayerType() == LayerType::FC) {
                if (prevLayer->getLayerType() == LayerType::CONV) {
                    dynamic_pointer_cast<FullConnectedLayer>(layer)->forward(dynamic_pointer_cast<ConvLayer>(prevLayer)->getOutputFeatureMapForFc());
                } else if (prevLayer->getLayerType() == LayerType::POOLING) {
                    dynamic_pointer_cast<FullConnectedLayer>(layer)->forward(dynamic_pointer_cast<PoolingLayer>(prevLayer)->getOutputFeatureMapForFc());
                } else {
                    throw invalid_argument("Invalid layer type of the prev layer of layer, " + to_string(i) + ", type: " + to_string(prevLayer->getLayerType()));
                }
            }
            else {
                throw invalid_argument("Invalid layer type of layer, " + to_string(i) + ", type: " + to_string(layer->getLayerType()));
            }
        }
    }
}
void ConvNeuralNetwork::backward(const Tensor<double, 1>& target) {
    int numLayers = LayerManager::getInstance().getNumLayers();
    if (numLayers == 0) {
        throw runtime_error("No layers in the network");
    }
    for (int i = numLayers - 1; i >= 0; --i) {
        auto layer = LayerManager::getInstance().getLayerByIndex(i);
        if (layer == nullptr) {
            throw invalid_argument("Layer " + to_string(i) + " is null");
        }
        auto nextLayer = layer->getNextLayer();
        if (layer->getLayerType() == LayerType::CONV) {
            if (nextLayer->getLayerType() == LayerType::POOLING) {
                dynamic_pointer_cast<ConvLayer>(layer)->backward(dynamic_pointer_cast<PoolingLayer>(nextLayer)->getOutputsGradient());
            }
            else if (nextLayer->getLayerType() == LayerType::FC) {
                Tensor<double, 1> nextFcLayerOutputsGradient = dynamic_pointer_cast<FullConnectedLayer>(layer)->getOutputsGradient();
                Tensor<double, 3> nextFcLayerOutputsGradient3D = nextFcLayerOutputsGradient.reshape(dynamic_pointer_cast<PoolingLayer>(layer)->getOutputWidth(), dynamic_pointer_cast<PoolingLayer>(layer)->getOutputHeight(), dynamic_pointer_cast<PoolingLayer>(layer)->getOutputChannels());
                dynamic_pointer_cast<ConvLayer>(layer)->backward(nextFcLayerOutputsGradient3D);
            } else {
                throw invalid_argument("Invalid layer type of the next layer of layer, " + to_string(i) + ", type: " + to_string(nextLayer->getLayerType()));
            }
        }
        else if (layer->getLayerType() == LayerType::POOLING) {
            if (nextLayer->getLayerType() == LayerType::FC) {
                Tensor<double, 1> nextFcLayerOutputsGradient = dynamic_pointer_cast<FullConnectedLayer>(layer)->getOutputsGradient();
                Tensor<double, 3> nextFcLayerOutputsGradient3D = nextFcLayerOutputsGradient.reshape(dynamic_pointer_cast<PoolingLayer>(layer)->getOutputWidth(), dynamic_pointer_cast<PoolingLayer>(layer)->getOutputHeight(), dynamic_pointer_cast<PoolingLayer>(layer)->getOutputChannels());

                dynamic_pointer_cast<PoolingLayer>(layer)->backward(nextFcLayerOutputsGradient3D);
            } 
            else if (nextLayer->getLayerType() == LayerType::CONV) {
                dynamic_pointer_cast<PoolingLayer>(layer)->backward(dynamic_pointer_cast<ConvLayer>(nextLayer)->getOutputsGradient());
            } else {
                throw invalid_argument("Invalid layer type of the next layer of layer, " + to_string(i) + ", type: " + to_string(nextLayer->getLayerType()));
            }
        }
        else if (layer->getLayerType() == LayerType::FC) {
            if (i == numLayers - 1) {
                dynamic_pointer_cast<FullConnectedLayer>(layer)->backward(target);
            } else {
                dynamic_pointer_cast<FullConnectedLayer>(layer)->backward(dynamic_pointer_cast<FullConnectedLayer>(nextLayer)->getOutputsGradient());
            }
        }
        else {
            throw invalid_argument("Invalid layer type of layer, " + to_string(i) + ", type: " + to_string(layer->getLayerType()));
        }
    }
}
void ConvNeuralNetwork::update(double learningRate, double momentum) {
    int numLayers = LayerManager::getInstance().getNumLayers();
    if (numLayers == 0) {
        throw runtime_error("No layers in the network");
    }
    for (int i = numLayers - 1; i >= 0; --i) {
        auto layer = LayerManager::getInstance().getLayerByIndex(i);
        if (layer->getLayerType() == LayerType::CONV) {
            dynamic_pointer_cast<ConvLayer>(layer)->updateWeights(learningRate, momentum);
            dynamic_pointer_cast<ConvLayer>(layer)->updateBiases(learningRate, momentum);
        }
        else if (layer->getLayerType() == LayerType::POOLING) {
            // needn't update weights and biases, do nothing
        }
        else if (layer->getLayerType() == LayerType::FC) {
            dynamic_pointer_cast<FullConnectedLayer>(layer)->updateWeights(learningRate, momentum);
            dynamic_pointer_cast<FullConnectedLayer>(layer)->updateBiases(learningRate, momentum);
        }
        else {
            throw invalid_argument("Invalid layer type of layer, " + to_string(i) + ", type: " + to_string(layer->getLayerType()));
        }
    }
}
void ConvNeuralNetwork::print() {
    int numLayers = LayerManager::getInstance().getNumLayers();
    if (numLayers == 0) {
        throw runtime_error("No layers in the network");
    }
    for (int i = 0; i < numLayers; ++i) {
        auto layer = LayerManager::getInstance().getLayerByIndex(i);
        if (layer->getLayerType() == LayerType::CONV) {
            dynamic_pointer_cast<ConvLayer>(layer)->print();
        }
        else if (layer->getLayerType() == LayerType::POOLING) {
            dynamic_pointer_cast<PoolingLayer>(layer)->print();
        }
        else if (layer->getLayerType() == LayerType::FC) {
            dynamic_pointer_cast<FullConnectedLayer>(layer)->print();
        }
        else {
            throw invalid_argument("Invalid layer type of layer, " + to_string(i) + ", type: " + to_string(layer->getLayerType()));
        }
    }
}
void ConvNeuralNetwork::printLayer(int layerIndex) {
    auto layer = LayerManager::getInstance().getLayerByIndex(layerIndex);
    if (layer->getLayerType() == LayerType::CONV) {
        dynamic_pointer_cast<ConvLayer>(layer)->print();
    }
    else if (layer->getLayerType() == LayerType::POOLING) {
        dynamic_pointer_cast<PoolingLayer>(layer)->print();
    }
    else if (layer->getLayerType() == LayerType::FC) {
        dynamic_pointer_cast<FullConnectedLayer>(layer)->print();
    }
    else {
        throw invalid_argument("Invalid layer type of layer, " + to_string(layerIndex) + ", type: " + to_string(layer->getLayerType()));
    }
}