import matplotlib.pyplot as plt
import numpy as np
from numdifftools import Gradient
from numdifftools import Hessian
from scipy.optimize import basinhopping


# Function to numerically compute the gradient of the Rosenbrock function
def rosen_der_numerical(x):
    return Gradient(rosen)(x)


# Function to numerically compute the Hessian of the Rosenbrock function
def rosen_hess_numerical(x):
    return Hessian(rosen)(x)


# Plot Iteration History
def plot_iteration_history(iteration_history):
    x_values = [iter for iter, _ in iteration_history]  # Iteration numbers as x-axis
    y_values = [val for _, val in iteration_history]  # Objective function values as y-axis

    plt.plot(x_values, y_values, 'o-', markersize=3)
    plt.xlabel('Iteration Number')
    plt.ylabel('Objective Function Value')
    plt.title('Iteration History of Basin-Hopping Algorithm')
    plt.grid(True)
    plt.show()


# Callback Function
def callback(x, f, accepted, verbose=False):
    global iter_count
    iter_count += 1
    iteration_history.append((iter_count, f))

    if verbose:
        print(f"Iteration {iter_count}: at minimum {f:.4f} accepted {int(accepted)}")


# Objective Function Setup
def rosen_with_args(x, a, b):
    return sum(a * (x[1:] - x[:-1] ** 2.0) ** 2.0 + (1 - x[:-1]) ** 2.0) + b


parr1 = 3
parr2 = 5


def rosen(x):
    return rosen_with_args(x, parr1, parr2)


# Initial Guess
x0 = np.array([1.3, 0.7, 0.8, 1.9, 1.2])

# Global iteration counter
iter_count = 0

# List to store iteration information
iteration_history = []

# Basin-hopping to find the global minimum

# Perform optimization using Nelder-Mead method
result = basinhopping(rosen, x0, minimizer_kwargs={"method": 'nelder-mead'},
                      niter=50, callback=lambda x, f, accepted: callback(x, f, accepted, verbose=False))

# Output the result
print("Trust-NCG solution:", result.x)
print("Objective function value at solution:", result.fun)

# Plot the iteration history
plot_iteration_history(iteration_history)

# Broyden-Fletcher-Goldfarb-Shannon (BFGS) Algorithm Example
iteration_history = []  # Reset iteration history
iter_count = 0  # Reset iteration counter

# Perform optimization using BFGS method
result = basinhopping(rosen, x0, minimizer_kwargs={"method": 'BFGS', 'jac': rosen_der_numerical},
                      niter=50, callback=lambda x, f, accepted: callback(x, f, accepted, verbose=False))

# Output the result
print("Trust-NCG solution:", result.x)
print("Objective function value at solution:", result.fun)

# Plot the iteration history
plot_iteration_history(iteration_history)

iteration_history = []  # Reset iteration history
iter_count = 0  # Reset iteration counter

# Perform optimization using Newton-CG method
result = basinhopping(rosen, x0, minimizer_kwargs={"method": 'Newton-CG', 'jac': rosen_der_numerical,
                                                   'hess': rosen_hess_numerical},
                      niter=50, callback=lambda x, f, accepted: callback(x, f, accepted, verbose=False))
print("Newton-CG solution:", result.x)
print("Objective function value at solution:", result.fun)

# Plot the iteration history
plot_iteration_history(iteration_history)

# Trust-Region Newton-Conjugate-Gradient Algorithm Example
iteration_history = []  # Reset iteration history
iter_count = 0  # Reset iteration counter

# Perform optimization using trust-ncg method
result = basinhopping(rosen, x0, minimizer_kwargs={"method": 'trust-ncg', 'jac': rosen_der_numerical,
                                                   'hess': rosen_hess_numerical},
                      niter=50, callback=lambda x, f, accepted: callback(x, f, accepted, verbose=False))
print("Trust-NCG solution:", result.x)
print("Objective function value at solution:", result.fun)

# Plot the iteration history
plot_iteration_history(iteration_history)

# Trust-Region Krylov Subspace Algorithm Example
iteration_history = []  # Reset iteration history
iter_count = 0  # Reset iteration counter

# Perform optimization using trust-krylov method
result = basinhopping(rosen, x0, minimizer_kwargs={"method": 'trust-krylov', 'jac': rosen_der_numerical,
                                                   'hess': rosen_hess_numerical},
                      niter=50, callback=lambda x, f, accepted: callback(x, f, accepted, verbose=False))
print("Trust-Krylov solution:", result.x)
print("Objective function value at solution:", result.fun)

# Plot the iteration history
plot_iteration_history(iteration_history)

# Trust-Region Exact Algorithm Example
iteration_history = []  # Reset iteration history
iter_count = 0  # Reset iteration counter

# Perform optimization using trust-exact method
result = basinhopping(rosen, x0, minimizer_kwargs={"method": 'trust-exact', 'jac': rosen_der_numerical,
                                                   'hess': rosen_hess_numerical},
                      niter=50, callback=lambda x, f, accepted: callback(x, f, accepted, verbose=False))
print("Trust-Exact solution:", result.x)
print("Objective function value at solution:", result.fun)

# Plot the iteration history
plot_iteration_history(iteration_history)
