function history = solveNLP ()
% Optimize NLP
% Global variables before: who global
%   bus gen branch   genpara sbspara NTswitch   dataDir dataID dataSim 
%   nB nL nG         iG iG_cut iGB_cut iLD      idx_gx idx_gy
%   delta omega omgs   TJ TJsum EXd EXdTJ Xd1 Pm    Pgen_cut Pld_cut   
%   Ygen_cut Yld_cut   Y1 Y2 Y3 Y3fun Ycor corIdx   V I   nlp prob                         
global prob nlp sol                
% Get function handle for NLP solver
objGrad();             % objective, gradient
consJacBase();         % constrains, jacobian structure (calc JacBase)
jacobUpdate();         % get jacobian update function handle
hessianEval();

% Check Jacobian matrix
% check('jacobian');
% check('hessian');

% initialize solution
sol.time = nlp.collocT; sol.nnode = nlp.nnode;
sol.nstate = prob.nstate; sol.nctrl = prob.nctrl; sol.nparam = prob.nparam;

%% Solve NLP
% set options
nlp.options.lb = nlp.var_bnds(:,1); 
nlp.options.ub = nlp.var_bnds(:,2);
nlp.options.cl = nlp.con_bnds(:,1);
nlp.options.cu = nlp.con_bnds(:,2);
% Initialize the dual point.
nlp.options.zl     = 1/10*ones(nlp.nvar, 1);
nlp.options.zu     = 1/10*ones(nlp.nvar, 1);
nlp.options.lambda = -ones(nlp.ncon, 1);

if strcmp(nlp.method, 'IPOPT') == 1                    % set ipopt options
    nlp.options.ipopt.print_level = nlp.print_level;   % print level
    if strcmp(nlp.hessian, 'BFGS') == 1
        nlp.options.ipopt.hessian_approximation = 'limited-memory';
    end
    nlp.options.ipopt.mu_strategy = 'adaptive';
    nlp.options.ipopt.tol         = nlp.tol;
    nlp.options.ipopt.max_iter    = nlp.maxiter;
    %nlp.options.ipopt.full    = 0;
    nlp.options.ipopt.fixed_variable_treatment = 'make_parameter';%'relax_bounds';
    % Run IPOPT.
    %[sol.x, sol.info] = ipopt(nlp.guess, nlp.func, nlp.options);
    x = rsipopt(nlp.guess, nlp.func, nlp.options);
    sol.x=x;

elseif strcmp(nlp.method, 'KNITRO') == 1 
    TomProb = conAssign(nlp.func.objective, nlp.func.gradient, ...
        [], [], ...
        nlp.options.lb, nlp.options.ub, 'TestCase', nlp.guess, ...
        [], 0, ...
         [], [], [], ...
        nlp.func.constraints, nlp.func.jacobian, [], ...
        [], nlp.options.cl, nlp.options.cu);
  
    TomProb.LargeScale = 1;
    TomProb.KNITRO.options.MAXIT = nlp.maxiter; % Setting maximum number of iterations
    TomProb.KNITRO.options.BAR_FEASIBLE = 1; % Select feasible KNITRO
    TomProb.KNITRO.options.ALG = 2;
    
    madinitglobals;
    Prob.ADObj = 0; % Gradient calculated
    Prob.ADCons = 0; % Jacobian calculated
    Result = tomRun('knitro', TomProb, 2, 0);
    sol.x = Result.x_k;
end


% Solution
sol.uG = sol.x(nlp.uGidx); sol.uL = sol.x(nlp.uLidx); 
history = sol.x;

end