/*
RNN测试套件
包含基本功能测试、序列处理测试和反向传播测试
验证循环神经网络的正确性和性能
*/
#include "../include/RecurrentLayer.h"
#include <iostream>
#include <vector>
#include <cmath>
#include <iomanip>

using namespace std;

// 测试RNN基本功能
void testBasicFunctionality() {
    cout << "=== 测试RNN基本功能 ===" << endl;
    
    // 创建RNN层：输入大小3，隐藏层大小4，输出大小2
    RecurrentLayer rnn(1, 3, 4, 2);
    
    // 测试前向传播
    Tensor<double, 1> input(3);
    input(0) = 0.1;
    input(1) = 0.2;
    input(2) = 0.3;
    
    Tensor<double, 1> output = rnn.forward(input);
    
    cout << "输入: [" << input(0) << ", " << input(1) << ", " << input(2) << "]" << endl;
    cout << "输出: [" << output(0) << ", " << output(1) << "]" << endl;
    
    // 测试隐藏状态
    const Tensor<double, 1>& hiddenState = rnn.getHiddenState();
    cout << "隐藏状态: [";
    for (int i = 0; i < 4; i++) {
        cout << hiddenState(i);
        if (i < 3) cout << ", ";
    }
    cout << "]" << endl;
    
    cout << "基本功能测试通过!" << endl << endl;
}

// 测试序列处理
void testSequenceProcessing() {
    cout << "=== 测试序列处理 ===" << endl;
    
    RecurrentLayer rnn(1, 2, 3, 1);
    
    // 创建一个简单序列
    vector<Tensor<double, 1>> sequence = {
        Tensor<double, 1>({0.1, 0.2}),
        Tensor<double, 1>({0.3, 0.4}),
        Tensor<double, 1>({0.5, 0.6})
    };
    
    cout << "处理序列:" << endl;
    for (size_t t = 0; t < sequence.size(); t++) {
        Tensor<double, 1> output = rnn.forward(sequence[t]);
        const Tensor<double, 1>& hiddenState = rnn.getHiddenState();
        
        cout << "时间步 " << t << ": " << endl;
        cout << "  输入: [" << sequence[t](0) << ", " << sequence[t](1) << "]" << endl;
        cout << "  输出: [" << output(0) << "]" << endl;
        cout << "  隐藏状态: [";
        for (int i = 0; i < 3; i++) {
            cout << hiddenState(i);
            if (i < 2) cout << ", ";
        }
        cout << "]" << endl;
    }
    
    cout << "序列处理测试通过!" << endl << endl;
}

// 测试反向传播
void testBackwardPropagation() {
    cout << "=== 测试反向传播 ===" << endl;
    
    RecurrentLayer rnn(1, 1, 2, 1);
    
    // 前向传播
    Tensor<double, 1> input(1);
    input(0) = 0.5;
    Tensor<double, 1> output = rnn.forward(input);
    
    cout << "前向传播结果:" << endl;
    cout << "输入: [" << input(0) << "]" << endl;
    cout << "输出: [" << output(0) << "]" << endl;
    
    // 反向传播
    Tensor<double, 1> target(1);
    target(0) = 0.8;  // 目标值
    
    cout << "目标: [" << target(0) << "]" << endl;
    cout << "初始误差: " << abs(output(0) - target(0)) << endl;
    
    // 进行几次训练迭代
    int epochs = 10;
    for (int epoch = 0; epoch < epochs; epoch++) {
        // 重置状态（可选，取决于任务）
        if (epoch % 3 == 0) {
            rnn.resetState();
        }
        
        // 前向传播
        output = rnn.forward(input);
        
        // 反向传播
        rnn.backward(target, 0.1);
        
        if (epoch % 2 == 0) {
            cout << "Epoch " << epoch << ": 输出=" << output(0) 
                 << ", 误差=" << abs(output(0) - target(0)) << endl;
        }
    }
    
    cout << "最终误差: " << abs(output(0) - target(0)) << endl;
    cout << "反向传播测试通过!" << endl << endl;
}

// 测试状态重置
void testStateReset() {
    cout << "=== 测试状态重置 ===" << endl;
    
    RecurrentLayer rnn(1, 1, 2, 1);
    
    // 处理几个时间步
    Tensor<double, 1> input(1);
    input(0) = 0.5;
    
    cout << "处理序列（不重置状态）:" << endl;
    for (int t = 0; t < 3; t++) {
        Tensor<double, 1> output = rnn.forward(input);
        cout << "时间步 " << t << ": 输出=" << output(0) << endl;
    }
    
    // 重置状态
    rnn.resetState();
    cout << "状态已重置" << endl;
    
    // 再次处理序列
    cout << "处理序列（重置状态后）:" << endl;
    for (int t = 0; t < 3; t++) {
        Tensor<double, 1> output = rnn.forward(input);
        cout << "时间步 " << t << ": 输出=" << output(0) << endl;
    }
    
    cout << "状态重置测试通过!" << endl << endl;
}

// 测试参数获取
void testParameterAccess() {
    cout << "=== 测试参数获取 ===" << endl;
    
    RecurrentLayer rnn(1, 2, 3, 1);
    
    // 获取权重和偏置
    const Tensor<double, 2>& Wxh = rnn.getInputWeights();
    const Tensor<double, 2>& Whh = rnn.getHiddenWeights();
    const Tensor<double, 2>& Why = rnn.getOutputWeights();
    const Tensor<double, 1>& bh = rnn.getHiddenBiases();
    const Tensor<double, 1>& by = rnn.getOutputBiases();
    
    cout << "权重矩阵维度:" << endl;
    cout << "Wxh: " << Wxh.getSize() << " 个元素" << endl;
    cout << "Whh: " << Whh.getSize() << " 个元素" << endl;
    cout << "Why: " << Why.getSize() << " 个元素" << endl;
    cout << "偏置向量维度:" << endl;
    cout << "bh: " << bh.getSize() << " 个元素" << endl;
    cout << "by: " << by.getSize() << " 个元素" << endl;
    
    // 打印部分权重值
    cout << "Wxh矩阵示例值:" << endl;
    for (int i = 0; i < min(2, 3); i++) {
        for (int j = 0; j < min(2, 2); j++) {
            cout << "Wxh(" << i << "," << j << ") = " << Wxh(i, j) << endl;
        }
    }
    
    cout << "参数获取测试通过!" << endl << endl;
}

// 性能测试
void testPerformance() {
    cout << "=== 性能测试 ===" << endl;
    
    // 创建较大的RNN
    RecurrentLayer rnn(1, 10, 20, 5);
    
    // 测试处理速度
    Tensor<double, 1> input(10);
    for (int i = 0; i < 10; i++) {
        input(i) = 0.1 * i;
    }
    
    int iterations = 100;
    auto start = chrono::high_resolution_clock::now();
    
    for (int i = 0; i < iterations; i++) {
        rnn.forward(input);
    }
    
    auto end = chrono::high_resolution_clock::now();
    auto duration = chrono::duration_cast<chrono::microseconds>(end - start);
    
    cout << "处理 " << iterations << " 次前向传播耗时: " 
         << duration.count() << " 微秒" << endl;
    cout << "平均每次前向传播耗时: " 
         << duration.count() / double(iterations) << " 微秒" << endl;
    
    cout << "性能测试完成!" << endl << endl;
}

int main() {
    cout << "开始RNN测试..." << endl << endl;
    
    try {
        testBasicFunctionality();
        testSequenceProcessing();
        testBackwardPropagation();
        testStateReset();
        testParameterAccess();
        testPerformance();
        
        cout << "所有测试通过! RNN实现正常工作。" << endl;
        return 0;
    } catch (const exception& e) {
        cerr << "测试失败: " << e.what() << endl;
        return 1;
    }
}