import numpy as np
import utils.Tensor

"""
Optimizor methods in order to get grad.
"""


def numerical_gradient(f, x, *target):
    h = utils.Tensor(np.full(x.shape, 1e-4))  # 0.0001
    return (f(x + h, *target) - f(x - h, *target)) / (utils.Tensor(2) * h)


class SGD:
    def __init__(self, lr):
        self.lr = lr

    def step(self, w, dw):
        for i in range(len(w)):
            w[i] -= self.lr * dw[i]


class Momentum:
    def __init__(self, lr, momentum=0.9):
        self.lr = lr
        self.momentum = momentum
        self.v = None

    def step(self, w, dw):
        if self.v is None:
            self.v = []
            for i in range(len(w)):
                self.v.append(np.zeros_like(w[i]))

        for i in range(len(w)):
            self.v[i] = self.momentum * self.v[i] - self.lr * dw[i]
            w[i] += self.v[i]


class AdaGrad:
    def __init__(self, lr):
        self.lr = lr
        self.h = None

    def step(self, w, dw):
        if self.h is None:
            self.h = []
            for i in range(len(w)):
                self.h.append(np.zeros_like(w[i]))

        for i in range(len(w)):
            self.h[i] += dw[i] * dw[i]
            w[i] -= self.lr / (np.sqrt(self.h[i]) + 1e-6) * dw[i]
