import matplotlib.pyplot as plt
import numpy as np
from scipy.optimize import Bounds, minimize, LinearConstraint, NonlinearConstraint


# Define the Rosenbrock function with additional parameters
# This function is commonly used as a test problem for optimization algorithms
def rosen_with_args(x, a, b):
    """
    Rosenbrock function with parameters a and b.

    Parameters:
    x (numpy.ndarray): Array of variables.
    a (float): Parameter scaling the first term of the function.
    b (float): Constant added to the function's value.

    Returns:
    float: Value of the Rosenbrock function at x.
    """
    return sum(a * (x[1:] - x[:-1] ** 2.0) ** 2.0 + (1 - x[:-1]) ** 2.0) + b


# Constants for the Rosenbrock function parameters
parr1 = 3
parr2 = 5


# Wrapper function to call the Rosenbrock function with fixed parameters
def rosen(x):
    """
    Wrapper for the Rosenbrock function with predefined parameters.

    Parameters:
    x (numpy.ndarray): Array of variables.

    Returns:
    float: Value of the Rosenbrock function at x with predefined parameters.
    """
    return rosen_with_args(x, parr1, parr2)


# Initial guess for the optimization
x0 = np.array([1.3, 0.7])

# Define bounds for each variable
bnds = Bounds([0, -0.5], [1.0, 2.0])

# Define linear constraints
# Here, we have two linear constraints: 1*x0 + 2*x1 >= -inf and 2*x0 + 1*x1 <= 1
lin_constr = LinearConstraint([[1, 2], [2, 1]], [-np.inf, 1], [1, 1])


# Define nonlinear constraints
def nonlin_constr(x):
    """
    Nonlinear constraints for the optimization problem.

    Parameters:
    x (numpy.ndarray): Array of variables.

    Returns:
    list: List of constraint values.
    """
    return [x[0] ** 2 + x[1], x[0] ** 2 - x[1]]


# Bounds for the nonlinear constraints
nonlin_bnds = Bounds([-np.inf, -np.inf], [1.0, 1.0])
nonlinear_constraint = NonlinearConstraint(nonlin_constr, nonlin_bnds.lb, nonlin_bnds.ub)

# List to store function values at each iteration
history = []


# Callback function to record function values during optimization
def callback(*args):
    """
    Callback function to track the optimization process.

    Parameters:
    *args: Arguments passed by the optimizer.
    """
    x = args[0]
    history.append(rosen(x))


# Perform the optimization
res = minimize(rosen, x0, method='trust-constr', bounds=bnds,
               constraints=[lin_constr, nonlinear_constraint],
               options={'verbose': True, 'gtol': 1e-6, 'xtol': 1e-6, 'maxiter': 1000},
               callback=callback)

# Output the optimization results
print("Optimal solution:", res.x)
print("Optimal value:", res.fun)

# Verify linear constraints
lin_constr_val = np.dot(lin_constr.A, res.x)
for i, (lb, val, ub) in enumerate(zip(lin_constr.lb, lin_constr_val, lin_constr.ub)):
    print(f"Linear constraint {i + 1}: {lb} <= {val} <= {ub} -> {lb <= val <= ub}")

# Verify nonlinear constraints
nonlin_constr_val = nonlin_constr(res.x)
for i, (lb, val, ub) in enumerate(zip(nonlin_bnds.lb, nonlin_constr_val, nonlin_bnds.ub)):
    print(f"Nonlinear constraint {i + 1}: {lb} <= {val} <= {ub} -> {lb <= val <= ub}")

# Plot the function value over iterations
plt.plot(history)
plt.xlabel('Iteration')
plt.ylabel('Function Value')
plt.title('Optimization Process of Rosenbrock Function')
plt.grid(True)
plt.show()
