import numpy as np



def affine_forward(x, w, b):
    # xr = x.reshape((x.shape[0], -1))
    out = x.dot(w) + b
    cache = (x, w, b)
    return out, cache


def affine_backward(dout, cache):
    x, w, b = cache
    dx, dw, db = None, None, None

    dx = dout.dot(w.T)
    dw = x.reshape(x.shape[0], -1).T.dot(dout)
    db = np.sum(dout, axis=0)

    dx = dx.reshape(x.shape)
    dw = dw.reshape(w.shape)

    return dx, dw, db


def relu_forward(x):
    out = np.multiply(x, x>0)
    cache = x
    return out, cache


def relu_backward(dout, cache):
    dx, x = None, cache
    dx = np.multiply(dout, x>0)
    return dx

# def dropout_forward(x, dropout_param):
#     p, mode = dropout_param["p"], dropout_param["mode"]
#     if "seed" in dropout_param:
#         np.random.seed(dropout_param["seed"])

#     mask = None
#     out = None

#     if mode == "train":
#         pass
#     elif mode == "test":
#         pass

#     cache = (dropout_param, mask)
#     out = out.astype(x.dtype, copy=False)

#     return out, cache


# def dropout_backward(dout, cache):
#     dropout_param, mask = cache
#     mode = dropout_param["mode"]

#     dx = None
#     if mode == "train":
#         pass
#     elif mode == "test":
#         dx = dout
#     return dx


# def conv_forward_naive(x, w, b, conv_param):
#     out = None
#     pass
#     cache = (x, w, b, conv_param)
#     return out, cache


# def conv_backward_naive(dout, cache):
#     dx, dw, db = None, None, None
#     pass
#     return dx, dw, db

def softmax_loss(x, y):
    loss, dx = None, None

    x = np.exp(x)
    s = np.sum(x, axis=1, keepdims=True)
    z = x / s
    zr = np.argmax(z, axis=1)

    loss =np.mean(zr - y)
    dx = np.multiply(z, (1-z))

    return loss, dx
