'''
Created on Aug 20, 2012

@author: Justin
'''

from numpy import *
from scipy import optimize
from copy import deepcopy

class Network(object):
  
  
  '''
  Creates an untrained neural network with randomly weighted edges.
  '''
  def __init__(self, dims, e=0.12):
    
    self.saved = None
    self.cost = None
    self.grad = None
    
    self.dims = dims
    L = len(dims) - 1
    self.Theta = [None]*L
    for i in range(L):
      weights = random.random((dims[i+1], dims[i] + 1))
      weights = e*(weights - 0.5)
      self.Theta[i] = weights
  
  
  '''
  Performs an analysis on a feature vector.
  '''
  def feed(self, x):
    
    L = len(self.dims)
    
    Z = array([None]*L)
    A = Z
    A[1] = x
    
    for l in range(L-1):
      Z[l+1] = self.Theta[l].dot( vstack((array(1), A[l])) )
      A[l+1] = sigmoid(Z[l+1])
    
    return A[L]
  
  
  def train(self, X, Y, lam=0):
    costFun, gradFun = self.getFuns(X, Y, lam)
    theta = self.getParams()
    theta = optimize.fmin_cg(costFun, theta, fprime = gradFun, maxiter = 10)
    self.setParams(theta)
    return costFun(theta)
  
  
  def getFuns(self, X, Y, lam=0):
    return ( lambda theta : self.costFun(theta, X, Y, lam), lambda theta : self.gradFun(theta, X, Y, lam) )
  
  
  def compositeCostFunction(self, X, Y, lam=0):
    return lambda theta: self.costFun(theta, X, Y, lam)
  
  
  def costFun(self, theta, X, Y, lam):
    if not array_equal(theta, self.saved):
      self.saved = theta
      self.cost, self.gradFun = self.compositeCost(theta, X, Y, lam)
    return self.cost
  
  
  def gradFun(self, theta, X, Y, lam):
    if not array_equal(theta, self.saved):
      self.saved = theta
      self.costFun, self.grad = self.compositeCost(theta, X, Y, lam)
    return self.grad
      
  
  def compositeCost(self, theta, X, Y, lam):
    m = X.shape[0]
    L = len(self.dims)
    
    Theta = unroll(theta, self.dims)
    Grad = deepcopy(Theta)
    for l in range(L-1):
      Grad[l][:,:] = None
    Z = [None]*L
    A = [None]*L
    A[0] = X.transpose()
    
    for l in range(L-1):
      Z[l+1] = Theta[l].dot( vstack((ones((1, m)), A[l])) )
      A[l+1] = sigmoid(Z[l+1])
    
    J = -sum(sum( Y*log(A[L-1]).transpose() + (1-Y)*log(1 - A[L-1]).transpose() ))
    
    reg = 0
    for l in range(L-1):
      reg = reg + sum(sum( Theta[l][:,1:]**2 ))
    
    J = J + 1.0/2*lam*reg
    J = J/m
    
    Delta = [None]*L
    Delta[L-1] = A[L-1] - Y.transpose()
    
    for l in range(L-2, 0, -1):
      Delta[l] = Theta[l][:,1:].transpose().dot( Delta[l+1] ) * A[l]*(1-A[l])
    #good up to here
    for l in range(L-2, -1, -1):
      Grad[l] = Delta[l+1].dot( vstack((ones((1, m)), A[l])).transpose() )
      
      Grad[l][:,1:] = Grad[l][:,1:] + lam * Theta[l][:,1:]
    '''
    for i in range(L-1):
      d = Grad[i].shape
      for j in range(d[0]):
        for k in range(d[1]):
          print Grad[i][j,k]
    '''
    grad = roll(Grad);
    grad = grad/m;
    
    return (J, grad)
  
  
  def checkGrad(self, theta, costFun):
    
    h = 0.001
    
    grad = array([nan]*len(theta))
    thetaCost = costFun(theta)[0]
    
    for i in range(len(theta)):
      vec = theta.copy()
      vec[i] = vec[i] + h
      grad[i] = (costFun(vec)[0] - thetaCost)/h
    
    return grad
  
  
  def setParams(self, theta):
    self.Theta = unroll(theta, self.dims)
  
  
  def getParams(self):
    return roll(self.Theta)


def roll(Theta):
  
  length = len(Theta);
  s = 0;
  products = [nan]*length
  for i in range(length):
    products[i] = Theta[i].size
    s = s + products[i];
  theta = array([nan]*s)
  
  index = 0;
  for i in range(length):
    theta[index:index + products[i]] = float64(Theta[i].reshape(1, Theta[i].size, order='F'))
    index = index + products[i]
  
  return theta


def unroll(theta, dims):
  length = len(dims) - 1
  Theta = [None]*length
  index = 0;
  for i in range(length):
    m = dims[i+1]
    n = dims[i] + 1
    product = m*n
    Theta[i] = theta[index:index + product].reshape(m, n, order='F')
    index = index + product
  
  return Theta


def sigmoid(z):
  
  return 1/(1+exp(-z))
