#!/usr/bin/env python2.7
#-*- coding:utf-8 -*-
#Author='He Rensheng'
#Email='hrs323@126.com'

import numpy as np

def affine_forward(x, w, b):
    """
    Inputs:
        x: A numpy array containing input data,  of shape (N, d1, d2, ..., dk), where d1 * d2 * ... * dk = D
        w: A numpy array of weights, of shape (D, M)
        b: A numpy array of bias, of shape (M,)
    Returns:
        out: output, of shape (N, M)
        cache: tuple of (x, w, b)
    """
    x_shaped = x.reshape(x.shape[0], -1)
    out = np.dot(x_shaped, w) + b
    cache = (x, w, b)

    return out, cache

def affine_backward(dout, cache):
    """
    Inputs:
        dout: shape of (N, M)
        cache: tuple of (x, w, b)
    Returns:
        dx: gradients with respect to x, of the same shape with x
        dw: gradients with respect to w, of the same shape with w
        db: gradients with respect to b, of the same shape with b
    """
    x, w, b = cache[0], cache[1], cache[2]
    db = dout.sum(axis=0)

    dx_shaped = np.dot(dout, w.T)
    dx = dx_shaped.reshape(x.shape)

    x_shaped = x.reshape(x.shape[0], -1)
    dw = np.dot(x_shaped.T, dout)

    return dx, dw, db

def relu_fowward(x):
    """
    Input:
        x: A numpy array containing input data, of shape (N, D)
    Return:
        out: output, of shape (N, D)
        cache: mask
    """
    mask = x > 0
    out = x * mask
    cache = x
    return out, cache

def relu_backward(dout, cache):
    """
    Inputs:
        dout: gradients with respect to out, of shape (N, M)
        cache: x, of shape (N, M)
    Return:
        dx: of shape (N, M)
    """
    x = cache
    mask = x > 0
    dx = dout * mask

    return dx