import numpy,datasets,modules,sys
import os

# Parameters
path = os.path.normpath(os.path.join(os.getcwd(), '..', 'res'))
mb   = 25              # size of a minibatch
lr   = 0.001           # learning rate

# Extract dataset (training and test set)
Xtrain,Ttrain,Xtest,Ttest = datasets.mnist(path)

# Center data
xmean = Xtrain.mean()
Xtrain -= xmean
Xtest -= xmean

# Scale data
xstd  = Xtrain.std()
Xtrain /= xstd
Xtest /= xstd


# Construct a neural network
if len(sys.argv) <= 1: sys.exit('Error: please specify a neural network architecture')
elif sys.argv[1] == 'mlp': nn = modules.Sequential([
	modules.Linear(784,200),modules.Positive(),
	modules.Linear(200,50),modules.Positive(),
	modules.Linear(50,10,zero=True)]) # TODO: change "Positive" nonlinearity to "Sigmoid" nonlinearity once implemented
elif sys.argv[1] == 'lin': nn = modules.Linear(784,10,zero=True)
else: sys.exit('Error: neural network architecture not recognized')

# Construct an optimization criterion
nll = modules.NegLogLik()

# Train the neural network for 10000 iterations
for it in range(1,10001):

	# Build a mini-batch
	r = numpy.random.randint(0,len(Xtrain),[mb])
	x = Xtrain[r]
	t = Ttrain[r]

	# Do a stochastic gradient descent step
	y  = nn.forward(x)
	dy = nll.grad(y,t)
	nn.backward(dy)
	nn.update(lr)

	# Evaluate the network at regular intervals
	if it % 1000 == 0:
		Ytest = nn.forward(Xtest)
		Ytest = (Ytest == numpy.max(Ytest,axis=1)[:,numpy.newaxis])
		acc = (Ytest*Ttest).sum(axis=1).mean()
		print('%8d %.3f'%(it,acc))

