import numpy as np
import pytest
from layer import Layer, Sigmoid, ReLU, Affine, SoftmaxWithLoss, softmax, cross_entropy_error

def test_sigmoid_forward():
    """Test Sigmoid layer forward propagation"""
    sigmoid = Sigmoid()
    x = np.array([-1.0, 0.0, 1.0])
    expected = 1 / (1 + np.exp(-x))
    out = sigmoid.forward(x)
    np.testing.assert_array_almost_equal(out, expected)

def test_sigmoid_backward():
    """Test Sigmoid layer backward propagation"""
    sigmoid = Sigmoid()
    x = np.array([-1.0, 0.0, 1.0])
    out = sigmoid.forward(x)
    dout = np.array([1.0, 1.0, 1.0])
    dx = sigmoid.backward(dout)
    expected = dout * out * (1 - out)
    np.testing.assert_array_almost_equal(dx, expected)

def test_relu_forward():
    """Test ReLU layer forward propagation"""
    relu = ReLU()
    x = np.array([-2.0, 0.0, 2.0])
    expected = np.array([0.0, 0.0, 2.0])
    out = relu.forward(x)
    np.testing.assert_array_almost_equal(out, expected)

def test_relu_backward():
    """Test ReLU layer backward propagation"""
    relu = ReLU()
    x = np.array([-2.0, 0.0, 2.0])
    relu.forward(x)
    dout = np.array([1.0, 1.0, 1.0])
    dx = relu.backward(dout)
    expected = np.array([0.0, 0.0, 1.0])
    np.testing.assert_array_almost_equal(dx, expected)

def test_affine_forward():
    """Test Affine layer forward propagation"""
    # Initialize weights and bias
    W = np.array([[1, 2, 3], [4, 5, 6]])  # 2x3 matrix
    b = np.array([7, 8])  # 2D vector
    x = np.array([[1, 2, 3]])  # 1x3 matrix
    
    affine = Affine(W, b)  # Remove transpose
    out = affine.forward(x)
    expected = np.array([[21, 40]])  # Manual calculation: x @ W.T + b
    np.testing.assert_array_almost_equal(out, expected)

def test_affine_backward():
    """Test Affine layer backward propagation"""
    W = np.array([[1, 2, 3], [4, 5, 6]])  # 2x3 matrix
    b = np.array([7, 8])  # 2D vector
    x = np.array([[1, 2, 3]])  # 1x3 matrix
    
    affine = Affine(W, b)  # Remove transpose
    affine.forward(x)
    dout = np.array([[1, 1]])
    
    dx = affine.backward(dout)
    expected_dx = np.array([[5, 7, 9]])  # Manual calculation: dout @ W
    np.testing.assert_array_almost_equal(dx, expected_dx)

def test_softmax():
    """Test softmax function"""
    x = np.array([[1, 2, 3], [4, 5, 6]])
    y = softmax(x)
    
    # Test properties of softmax
    assert y.shape == x.shape
    np.testing.assert_array_almost_equal(np.sum(y, axis=1), np.array([1, 1]))
    assert np.all(y >= 0)
    assert np.all(y <= 1)

def test_cross_entropy_error():
    """Test cross entropy error function"""
    y = np.array([[0.1, 0.8, 0.1]])
    t = np.array([1])  # Correct label is 1 (second class)
    loss = cross_entropy_error(y, t)
    expected = -np.log(0.8)
    np.testing.assert_almost_equal(loss, expected)

def test_softmax_with_loss():
    """Test SoftmaxWithLoss layer"""
    loss_layer = SoftmaxWithLoss()
    x = np.array([[1, 2, 3]])
    t = np.array([1])  # Correct label is 1 (second class)
    
    # Forward pass
    y = loss_layer.forward(x, t)
    assert y.shape == x.shape
    np.testing.assert_array_almost_equal(np.sum(y, axis=1), np.array([1]))
    
    # Backward pass
    dx = loss_layer.backward()
    assert dx.shape == x.shape

def test_softmax_with_loss_one_hot():
    """Test SoftmaxWithLoss layer with one-hot encoded targets"""
    loss_layer = SoftmaxWithLoss()
    x = np.array([[1, 2, 3]])
    t = np.array([[0, 1, 0]])  # One-hot encoded: correct label is 1 (second class)
    
    # Forward pass
    y = loss_layer.forward(x, t)
    assert y.shape == x.shape
    np.testing.assert_array_almost_equal(np.sum(y, axis=1), np.array([1]))
    
    # Backward pass
    dx = loss_layer.backward()
    assert dx.shape == x.shape

if __name__ == '__main__':
    pytest.main([__file__]) 