import matplotlib.pyplot as plt
import numpy as np
from scipy.optimize import minimize


# Utility Functions

# Plot Iteration History
# This function plots the iteration history of the optimization process.
# It takes a list of tuples (iteration number, objective function value).
def plot_iteration_history(iteration_history):
    x_values = [iter for iter, _ in iteration_history]  # Iteration numbers as x-axis
    y_values = [val for _, val in iteration_history]  # Objective function values as y-axis

    plt.plot(x_values, y_values, 'o-', markersize=3)
    plt.xlabel('Iteration Number')
    plt.ylabel('Objective Function Value')
    plt.title('Iteration History of Optimization Algorithms')
    plt.grid(True)
    plt.show()


# Callback Function
# This function is used to record the iteration history.
# It increments the iteration count and appends the current iteration number
# and objective function value to the iteration_history list.
def callback(x):
    global iter_count
    iter_count += 1
    iteration_history.append((iter_count, rosen(x)))


# Objective Function Setup

# Parameterized Rosenbrock Function
# This function takes additional parameters a and b.
def rosen_with_args(x, a, b):
    return sum(a * (x[1:] - x[:-1] ** 2.0) ** 2.0 + (1 - x[:-1]) ** 2.0) + b


# Parameters for the Rosenbrock function
parr1 = 3
parr2 = 5


# De-parameterized Rosenbrock Function
# This function uses the predefined parameters.
def rosen(x):
    return rosen_with_args(x, parr1, parr2)


# Initial Guess
x0 = np.array([1.3, 0.7, 0.8, 1.9, 1.2])

# Examples of Using Various Optimization Algorithms

# Nelder-Mead Algorithm Example
iteration_history = []  # List to record iteration history
iter_count = 0  # Counter for iterations

# Perform optimization using Nelder-Mead method
ret = minimize(rosen, x0, method='nelder-mead',
               options={'xatol': 1e-8, 'disp': True}, callback=callback)
print("Nelder-Mead solution:", ret.x)
print("Objective function value at solution:", rosen(ret.x))

# Plot the iteration history
plot_iteration_history(iteration_history)

# Broyden-Fletcher-Goldfarb-Shannon (BFGS) Algorithm Example
from numdifftools import Gradient


# Function to numerically compute the gradient of the Rosenbrock function
def rosen_der_numerical(x):
    return Gradient(rosen)(x)


iteration_history = []  # Reset iteration history
iter_count = 0  # Reset iteration counter

# Perform optimization using BFGS method
ret = minimize(rosen, x0, method='BFGS', jac=rosen_der_numerical,
               options={'disp': True}, callback=callback)
print("BFGS solution:", ret.x)
print("Objective function value at solution:", rosen(ret.x))

# Plot the iteration history
plot_iteration_history(iteration_history)

# Newton-Conjugate-Gradient Algorithm Example
from numdifftools import Hessian


# Function to numerically compute the Hessian of the Rosenbrock function
def rosen_hess_numerical(x):
    return Hessian(rosen)(x)


iteration_history = []  # Reset iteration history
iter_count = 0  # Reset iteration counter

# Perform optimization using Newton-CG method
res = minimize(rosen, x0, method='Newton-CG',
               jac=rosen_der_numerical, hess=rosen_hess_numerical,
               options={'xtol': 1e-8, 'disp': True}, callback=callback)
print("Newton-CG solution:", res.x)
print("Objective function value at solution:", rosen(res.x))

# Plot the iteration history
plot_iteration_history(iteration_history)

# Trust-Region Newton-Conjugate-Gradient Algorithm Example
iteration_history = []  # Reset iteration history
iter_count = 0  # Reset iteration counter

# Perform optimization using trust-ncg method
res = minimize(rosen, x0, method='trust-ncg',
               jac=rosen_der_numerical, hess=rosen_hess_numerical,
               options={'gtol': 1e-8, 'disp': True}, callback=callback)
print("Trust-NCG solution:", res.x)
print("Objective function value at solution:", rosen(res.x))

# Plot the iteration history
plot_iteration_history(iteration_history)

# Trust-Region Krylov Subspace Algorithm Example
iteration_history = []  # Reset iteration history
iter_count = 0  # Reset iteration counter

# Perform optimization using trust-krylov method
res = minimize(rosen, x0, method='trust-krylov',
               jac=rosen_der_numerical, hess=rosen_hess_numerical,
               options={'gtol': 1e-8, 'disp': True}, callback=callback)
print("Trust-Krylov solution:", res.x)
print("Objective function value at solution:", rosen(res.x))

# Plot the iteration history
plot_iteration_history(iteration_history)

# Trust-Region Exact Algorithm Example
iteration_history = []  # Reset iteration history
iter_count = 0  # Reset iteration counter

# Perform optimization using trust-exact method
res = minimize(rosen, x0, method='trust-exact',
               jac=rosen_der_numerical, hess=rosen_hess_numerical,
               options={'gtol': 1e-8, 'disp': True}, callback=callback)
print("Trust-Exact solution:", res.x)
print("Objective function value at solution:", rosen(res.x))

# Plot the iteration history
plot_iteration_history(iteration_history)
