# -*- coding: utf-8 -*-
"""
Created on Thu May 17 09:35:45 2012

@author: Daniel
Optimization Library
"""
import numpy as np
class evalGrad:
    def __init__(self,objFcn,gradFcn,dx,params,mode):
        self.gradFcn=gradFcn
        self.objFcn=objFcn
        self.dx=dx
        self.params=params
        self.mode=mode
    def getGradient(self,x):
        if self.mode=='fd':
            return self.gradFcn(self.objFcn,x,self.dx,self.params)
        else:
            return self.gradFcn(x,self.params)
class _fcnHolder:
    def __init__(self,fcn,x,s,dx,tol,params):
        self.function=fcn
        self.x=x
        self.s=s
        self.dx=dx
        self.tol=tol
        self.params=params
class nonlinopt:
    def _normalize(self,x,lb,ub):
        return (x-lb)/(ub-lb) 
    def _denormalize(self,xNorm,lb,ub):
        return xNorm*(ub-lb)+lb
    def gradient(self,fcn,x,dx,params=0.0): 
        grad=np.zeros([x.size])
        f0=fcn(x,params)
        for i in range(0,x.size):
            xt=x.copy()
            xt[i]=xt[i]+dx
            grad[i]=(fcn(xt,params)-f0)/dx
        return grad
    def goldenSelection(self,objFcn,x0,lb,ub,tol,params=0.0):
        """
        Golden Selection search 1-D minimization. Minimizes objFcn. objFcn 
        recieves an x value and returns the function value.  goldenSelection 
        returns the x value that minimize objFcn and the function value at the
        optimum x
        """
        cx=x0
        ax=lb
        bx=ub
        C=(3-5**.5)/2
        R=1-C
        x0=ax
        x3=cx
        if (abs(cx-bx))>(abs(bx-ax)):
            x1=bx
            x2=bx+C*(cx-bx)
        else:
            x2=bx
            x1=bx-C*(bx-ax)
        f1=objFcn(x1,params)
        f2=objFcn(x2,params)
        k=1
        while abs(x3-x0)>tol*(abs(x1)+abs(x2)):
            if f2<f1:
                x0=x1
                x1=x2
                x2=R*x1+C*x3
                f1=f2
                f2=objFcn(x2,params)
            else:
                x3=x2
                x2=x1
                x1=R*x2+C*x0
                f2=f1
                f1=objFcn(x1,params)
            k=k+1
        if f1>f2:
            xmin=x1
            fmin=f1
        else:
            xmin=x2
            fmin=f2
        return xmin,fmin
    def steepestDescent(self,objFcn,x0,dx,tol,gradFcn=[],maxStepSize=10,iMax=2000,params=0.0):
        """
        Steepest Descent Minimization.  Minimizes objFcn (unconstrained).  
        objFcn recieves a numpy array containing the design variable vector, x.
        It returns the function value.  A gradient function may be supplied.
        it must recieve a value of x and return a numpy array containing the 
        gradient values. steepestDescent returns the optimum design variable
        vector and the optimum function value as well as design variable and
        function value solution histories in numpy arrays
        """
        def _alphaEqn(a,fcn):
            return fcn.function(fcn.x+a*fcn.s,fcn.params)
        if type(gradFcn)==type([]):
            grad=evalGrad(objFcn,self.gradient,dx,params,'fd')
        else:
            grad=evalGrad(objFcn,gradFcn,dx,params,'supplied')
        err=float('inf')        
        X=np.zeros([iMax,x0.size])
        S=np.zeros([iMax,x0.size])
        A=np.zeros([iMax])
        F=np.zeros([iMax])        
        i=0
        X[i,:]=x0
        F[i]=objFcn(x0,params)
        while i<iMax-1 and err>tol:
            i=i+1
            S[i,:]=grad.getGradient(X[i-1,:])*(-1.0)       
            fcn=_fcnHolder(objFcn,X[i-1,:],S[i,:],dx,tol,params)
            A[i],fi=self.goldenSelection(_alphaEqn,-maxStepSize,maxStepSize,0.0,tol,fcn)
            X[i,:]=X[i-1,:]+A[i]*S[i,:]
            F[i]=objFcn(X[i,:],params)  
            err=abs(F[i]-F[i-1])
        X=X[0:i,:]
        S=S[0:i,:]
        A=A[0:i]
        F=F[0:i]      
        return X[i-1,:],F[i-1],X,F
def runExamples():
    import nonlinopt as nl    
    "-------------------------Testing 1D Optimizer--------------------------------"
    print 'testing fMin1d'
    def obj1d(x,params):
        return x**2-params
    nlOpt=nl.nonlinopt()
    lb=-1.0
    ub=1.0
    x0=0.5
    tol=10**-10.0
    f1,f2=nlOpt.goldenSelection(obj1d,lb,ub,x0,tol,5.0)
    print f1
    print f2
    "---------------------Testing Gradient Functions------------------------------"
    print 'testing gradient'
    def objRosen(x,params):    
        return 100.0*(x[1] - x[0]**2.0)**2 + (1.0-x[0])**2.0
    def gradRosen(x,params):
        grad=np.zeros([2])
        grad[0]=-400.0*x[0]*(-x[0]**2.0+x[1])-2.0*(-x[0]+1.0)
        grad[1]=200.0*(-x[0]**2.0+x[1])
        return grad
    x=np.array([2.0,3.0])
    lb=np.array([-3.0,-3.0])
    ub=np.array([3.0,3.0])
    dx=0.00001
    grad=nlOpt.gradient(objRosen,x,dx)
    print grad
    "------------------Testing Normalizing Functions------------------------------"
    print 'testing normalize'
    xNorm=nlOpt._normalize(x,lb,ub)
    print xNorm
    print 'testing denormalize'
    xDenorm=nlOpt._denormalize(xNorm,lb,ub)
    print xDenorm
    "---------------Testing Steepest Descent Functions----------------------------"
    print 'testing Steepest Descent'
    xOpt,fOpt,X,F=nlOpt.steepestDescent(objRosen,x,dx,tol)
    print 'Optimizing the Rosenbrock Function -FWD Differencing'
    print '{0:12} = {1:.8f},{1:.8f}'.format('xOpt',xOpt[0],xOpt[1])
    print '{0:12} = {1:.8f}'.format('fOpt',fOpt)
    print '{0:12} = {1:d}'.format('Iterations',X.shape[0])
    xOpt,fOpt,X,F=nlOpt.steepestDescent(objRosen,x,dx,tol,gradRosen)
    print 'Optimizing the Rosenbrock Function -Supplied Gradient'
    print '{0:12} = {1:.8f},{1:.8f}'.format('xOpt',xOpt[0],xOpt[1])
    print '{0:12} = {1:.8f}'.format('fOpt',fOpt)
    print '{0:12} = {1:d}'.format('Iterations',X.shape[0])
    Y=np.arange(0.0,float(X.shape[0]))
    import pylab as pylab
    pylab.plot(Y[1:],F[1:],'-')
    pylab.show()
if __name__=="__main__":
    runExamples()