#include "DumpLog.h"
#include "NetLayer.h"
#include "BaseFunByOneVar.h"
#include "NodeVarForArray.h"
#include "NodeVarForMatrix.h"
#include "NodeVarForValue.h"
#include "NodeVarForVector.h"

namespace Net
{

BaseFunByOneVar::BaseFunByOneVar(FunType_E type, const std::string &name): BaseFun(type, name)
{

}

BaseFunByOneVar::~BaseFunByOneVar()
{

}

void BaseFunByOneVar::fun(std::shared_ptr<NetLayer> &layer)
{
    //1. 这里不检查layer是否为空,且vecInputLayer有且仅有一个
    auto &src = layer->vecInputLayer.front();
    auto &dst = layer->outLayer;

    //2. src与dst类型以及大小必须完全一致,且不能为空
    runLayerFun(src, dst);
}

void BaseFunByOneVar::diff(std::shared_ptr<NetLayer> &layer)
{
    //1. 这里不检查layer是否为空,且vecInputLayer有且仅有一个
    auto &src = layer->vecInputLayer.front();
    auto &dst = layer->outLayer;

    //2. src与dst类型以及大小必须完全一致,且不能为空
    runLayerDiff(src, dst);
}

void BaseFunByOneVar::runLayerFun(std::shared_ptr<BaseNodeVar> &src, std::shared_ptr<BaseNodeVar> &dst)
{
    switch (src->getType())
    {
        case NodeVarType_E::Matrix_E:
        {
            Matrix_t &srcData = static_cast<NodeVarForMatrix*>(src.get())->value;
            Matrix_t &dstData = static_cast<NodeVarForMatrix*>(dst.get())->value;
            const std::size_t num = static_cast<std::size_t>(srcData.size());
            auto x = srcData.data();
            auto y = dstData.data();
            for (std::size_t i = 0; i != num; ++i, ++x, ++y)
            {
                *y = runFun(*x);
            }

            return;
        }
            
        case NodeVarType_E::Vector_E:
        {
            Vector_t &srcData = static_cast<NodeVarForVector*>(src.get())->value;
            Vector_t &dstData = static_cast<NodeVarForVector*>(dst.get())->value;
            const std::size_t num = static_cast<std::size_t>(srcData.size());
            auto x = srcData.data();
            auto y = dstData.data();
            for (std::size_t i = 0; i != num; ++i, ++x, ++y)
            {
                *y = runFun(*x);
            }

            return;
        }

        case NodeVarType_E::Array_E:
        {
            auto &srcData = static_cast<NodeVarForArray*>(src.get())->vecData;
            auto &dstData = static_cast<NodeVarForArray*>(dst.get())->vecData;
            for (std::size_t i = srcData.size(); i;)
            {
                --i;
                runLayerFun(srcData[i], dstData[i]);
            }

            return;
        }

        case NodeVarType_E::Value_E:
        {
            auto x = static_cast<NodeVarForValue*>(src.get());
            auto y = static_cast<NodeVarForValue*>(dst.get());
            y->value = runFun(x->value);
            return;
        }

        default:
        {
            LogError("不应该出现这种情况");
            return;
        }
    }
}

void BaseFunByOneVar::runLayerDiff(std::shared_ptr<BaseNodeVar> &src, std::shared_ptr<BaseNodeVar> &dst)
{
    switch (src->getType())
    {
        case NodeVarType_E::Matrix_E:
        {
            auto srcPtr           = static_cast<NodeVarForMatrix*>(src.get());
            Matrix_t &dstDiffData = static_cast<NodeVarForMatrix*>(dst.get())->diffValue;
            const std::size_t num = static_cast<std::size_t>(srcPtr->value.size());
            auto x = srcPtr->value.data();
            auto d = srcPtr->diffValue.data();
            auto y = dstDiffData.data();
            for (std::size_t i = 0; i != num; ++i, ++x, ++d, ++y)
            {
                *y = runDiff(*x) * (*d);
            }

            return;
        }
            
        case NodeVarType_E::Vector_E:
        {
            auto srcPtr           = static_cast<NodeVarForVector*>(src.get());
            Vector_t &dstDiffData = static_cast<NodeVarForVector*>(dst.get())->diffValue;
            const std::size_t num = static_cast<std::size_t>(srcPtr->value.size());
            auto x = srcPtr->value.data();
            auto d = srcPtr->diffValue.data();
            auto y = dstDiffData.data();
            for (std::size_t i = 0; i != num; ++i, ++x, ++d, ++y)
            {
                *y = runDiff(*x) * (*d);
            }

            return;
        }

        case NodeVarType_E::Array_E:
        {
            auto &srcData = static_cast<NodeVarForArray*>(src.get())->vecData;
            auto &dstData = static_cast<NodeVarForArray*>(dst.get())->vecData;
            for (std::size_t i = srcData.size(); i;)
            {
                --i;
                runLayerDiff(srcData[i], dstData[i]);
            }

            return;
        }

        case NodeVarType_E::Value_E:
        {
            auto x = static_cast<NodeVarForValue*>(src.get());
            auto y = static_cast<NodeVarForValue*>(dst.get());
            y->value = runDiff(x->value) * x->diffValue;
            return;
        }

        default:
        {
            LogError("不应该出现这种情况");
            return;
        }
    }
}

}