# -*- coding: utf-8 -*-
"""
Created on Thu May 17 11:15:54 2012

@author: Daniel
"""
import numpy as np
import nonlinopt as nl
import pylab as pylab
"-------------------------Testing 1D Optimizer--------------------------------"
print 'testing fMin1d'
def obj1d(x,params):
    return x**2-params
nlOpt=nl.nonlinopt()
lb=-1.0
ub=1.0
x0=0.5
tol=10**-10.0
f1,f2=nlOpt.goldenSelection(obj1d,lb,ub,x0,tol,5.0)
print f1
print f2
"---------------------Testing Gradient Functions------------------------------"
print 'testing gradient'
def objRosen(x,params):    
    return 100.0*(x[1] - x[0]**2.0)**2 + (1.0-x[0])**2.0
def gradRosen(x,params):
    grad=np.zeros([2])
    grad[0]=-400.0*x[0]*(-x[0]**2.0+x[1])-2.0*(-x[0]+1.0)
    grad[1]=200.0*(-x[0]**2.0+x[1])
    return grad
x=np.array([2.0,3.0])
lb=np.array([-3.0,-3.0])
ub=np.array([3.0,3.0])
dx=0.00001
grad=nlOpt.gradient(objRosen,x,dx)
print grad
"------------------Testing Normalizing Functions------------------------------"
print 'testing normalize'
xNorm=nlOpt._normalize(x,lb,ub)
print xNorm
print 'testing denormalize'
xDenorm=nlOpt._denormalize(xNorm,lb,ub)
print xDenorm
"---------------Testing Steepest Descent Functions----------------------------"
print 'testing Steepest Descent'
xOpt,fOpt,X,F=nlOpt.steepestDescent(objRosen,x,dx,tol)
print 'Optimizing the Rosenbrock Function -FWD Differencing'
print '{0:12} = {1:.8f},{1:.8f}'.format('xOpt',xOpt[0],xOpt[1])
print '{0:12} = {1:.8f}'.format('fOpt',fOpt)
print '{0:12} = {1:d}'.format('Iterations',X.shape[0])
xOpt,fOpt,X,F=nlOpt.steepestDescent(objRosen,x,dx,tol,gradRosen)
print 'Optimizing the Rosenbrock Function -Supplied Gradient'
print '{0:12} = {1:.8f},{1:.8f}'.format('xOpt',xOpt[0],xOpt[1])
print '{0:12} = {1:.8f}'.format('fOpt',fOpt)
print '{0:12} = {1:d}'.format('Iterations',X.shape[0])
Y=np.arange(0.0,float(X.shape[0]))
pylab.plot(Y[1:],F[1:],'-')
pylab.show()