import sys
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))

import numpy as np
from ml_lib.core import Tensor
from ml_lib.nn.module import Module
from ml_lib.nn.layers import Linear, ReLU
from ml_lib.nn.loss import CrossEntropyLoss, softmax
from ml_lib.optim import SGD

def test_cross_entropy():
    """
    测试交叉熵损失是否正确实现
    """
    # 创建一个简单的logits tensor和目标tensor
    logits = Tensor(np.array([[2.0, 1.0, 0.1], [0.1, 1.0, 3.0]]), requires_grad=True)
    targets = Tensor(np.array([0, 2]))  # 第一个样本属于类0，第二个样本属于类2
    
    # 计算交叉熵损失
    criterion = CrossEntropyLoss()
    loss = criterion(logits, targets)
    
    # 打印损失
    print(f"Loss: {loss.data}")
    
    # 反向传播
    loss.backward()
    
    # 手动计算梯度进行验证
    # 首先计算softmax
    exp_logits = np.exp(logits.data - np.max(logits.data, axis=1, keepdims=True))
    softmax_probs = exp_logits / np.sum(exp_logits, axis=1, keepdims=True)
    
    # 手动创建one-hot编码
    batch_size, n_classes = logits.data.shape
    target_one_hot = np.zeros((batch_size, n_classes))
    for i, idx in enumerate(targets.data.astype(int)):
        target_one_hot[i, idx] = 1
    
    # 计算预期梯度: (softmax - one_hot) / batch_size
    expected_grad = (softmax_probs - target_one_hot) / batch_size
    
    # 打印实际梯度和预期梯度进行比较
    print("Actual gradients:")
    print(logits.grad)
    print("\nExpected gradients:")
    print(expected_grad)
    
    # 验证梯度是否接近
    grad_diff = np.abs(logits.grad - expected_grad)
    is_close = np.all(grad_diff < 1e-5)
    print(f"\n梯度实现是否正确: {is_close}")
    
    return is_close

def test_gradients_with_model():
    """
    测试完整模型的梯度计算
    """
    # 创建一个简单的模型
    class SimpleModel(Module):
        def __init__(self):
            super().__init__()
            self.fc1 = Linear(2, 3)
            self.relu = ReLU()
            self.fc2 = Linear(3, 2)
            
        def forward(self, x):
            x = self.fc1(x)
            x = self.relu(x)
            x = self.fc2(x)
            return x
    
    # 创建模型和优化器
    model = SimpleModel()
    optimizer = SGD(model.parameters(), lr=0.1)
    
    # 创建输入和目标
    x = Tensor(np.array([[0.1, 0.2], [0.3, 0.4]]),requires_grad=True)
    y = Tensor(np.array([0, 1]),requires_grad=True)
    
    # 打印初始参数
    print("\n初始参数:")
    for name, param in model.named_parameters():
        print(f"{name}: {param.data}")
    
    # 前向传播
    outputs = model(x)
    
    # 计算损失
    criterion = CrossEntropyLoss()
    loss = criterion(outputs, y)
    
    # 打印初始损失
    print(f"\n初始损失: {loss.data}")
    
    # 反向传播和参数更新
    optimizer.zero_grad()
    loss.backward()
    
    # 检查输出是否有梯度
    print("\n输出梯度检查:")
    print(f"outputs.requires_grad: {outputs.requires_grad}")
    print(f"outputs.grad: {outputs.grad if hasattr(outputs, 'grad') else None}")
    
    # 打印网络中各层输出梯度
    print("\n检查网络中各层的输出:")
    x_fc1 = model.fc1(x)
    x_relu = model.relu(x_fc1)
    x_fc2 = model.fc2(x_relu)
    
    print(f"x_fc1.requires_grad: {x_fc1.requires_grad}")
    print(f"x_relu.requires_grad: {x_relu.requires_grad}")
    print(f"x_fc2.requires_grad: {x_fc2.requires_grad}")
    
    # 打印参数梯度
    print("\n参数梯度:")
    for name, param in model.named_parameters():
        print(f"{name} data type: {type(param.data)}")
        print(f"{name} requires_grad: {param.data.requires_grad if hasattr(param.data, 'requires_grad') else 'N/A'}")
        print(f"{name} grad: {param.grad}")
        if hasattr(param.data, 'grad'):
            print(f"{name} data.grad: {param.data.grad}")
        else:
            print(f"{name} data.grad: None")
    
    optimizer.step()
    
    # 打印更新后的参数
    print("\n更新后的参数:")
    for name, param in model.named_parameters():
        print(f"{name}: {param.data}")
    
    # 再次前向传播
    new_outputs = model(x)
    new_loss = criterion(new_outputs, y)
    
    # 打印更新后的损失
    print(f"\n更新后的损失: {new_loss.data}")
    
    # 检查损失是否减小
    return new_loss.data < loss.data

if __name__ == "__main__":
    print("测试交叉熵损失实现...")
    test_cross_entropy()
    
    print("\n测试完整模型梯度更新...")
    if test_gradients_with_model():
        print("测试通过！梯度更新正确，损失减小。")
    else:
        print("测试失败！梯度更新后损失没有减小。")