# -*- coding: utf-8 -*-

from scipy.optimize import minimize, rosen, rosen_der
def objFcn(x):
    return (x[0] - 1)**2 + (x[1] - 2.5)**2
#A simple application of the Nelder-Mead method is:
print 'Unconstrained Non-Gradient Based Example: Nelder-Mead'
x0 = [1.3, 0.7, 0.8, 1.9, 1.2]
res = minimize(rosen, x0, method='Nelder-Mead')
print res
#consider a minimization problem with several constraints (namely Example 16.4 from [R69]). The objective function is:
print '\nConstrained Gradient Based Example: SLSQP'
fun = lambda x: (x[0] - 1)**2 + (x[1] - 2.5)**2
#There are three constraints defined as:
cons = ({'type': 'ineq', 'fun': lambda x:  x[0] - 2 * x[1] + 2},
        {'type': 'ineq', 'fun': lambda x: -x[0] - 2 * x[1] + 6},
        {'type': 'ineq', 'fun': lambda x: -x[0] + 2 * x[1] + 2})
#And variables must be positive, hence the following bounds:
bnds = ((0, None), (0, None))
#The optimization problem is solved using the SLSQP method as:
args=()
opts={'maxiter':100,'disp':True}
res = minimize(objFcn, (2, 0),args, method='SLSQP', bounds=bnds,constraints=cons,options=opts)
print res
print '\nConstrained Non-Gradient Based Example: COBYLA'
res = minimize(fun, (2, 0),args, method='COBYLA', bounds=bnds,constraints=cons,options=opts)
print res

