import numpy as np
import optimizer_common


def optimize(f, g, h, x0, max_iter, eps, ls):
    """
    Damped Newton optimizer

    :param f: objective function
    :param g: gradient of f
    :param h: hessian of f
    :param x0: starting point
    :param max_iter: max iterations
    :param eps: precision
    :param ls: line search
    :return: x, Newton iterations, line search iterations, terminate reason
    """
    it_ls_sum = 0
    x_opt = x = x0
    f_opt = fk = f(x)
    for it in range(max_iter):
        print("#Iteration %d" % it)
        gk = g(x)
        if abs(np.sqrt(np.dot(gk, gk))) < eps:
            return x_opt, it + 1, it_ls_sum, 'small gradient'
        d = np.linalg.solve(h(x), -1.0 * gk)
        stop, x_new, f_new, it_ls = optimizer_common.step(f, g, x, d, fk, gk, eps, ls)
        it_ls_sum += it_ls
        if stop:
            return x_opt, it + 1, it_ls_sum, "no further improvement"
        fk = f_new
        x = x_new
        if fk < f_opt:
            f_opt = fk
            x_opt = x
    print('###WARNING: failed to converge')
    return x_opt, max_iter, it_ls_sum, 'expired'
