import pathlib

import torch
from checkpoint_manager import CheckpointManager
from checkpoint import Checkpoint
import torch.utils.data
import itertools
import torch_plus
from lvminn import Lvminn
from scaler import Scaler
from lotka_volterra_dataset import LotkaVolterraDataset


def main():
    checkpoint_manager = CheckpointManager(pathlib.Path(f"./checkpoints"))
    dataset = LotkaVolterraDataset.load(pathlib.Path(f"./checkpoints/data.pt"))
    epoch, checkpoint = checkpoint_manager.last_existed_loaded(Checkpoint.load)

    t_min = float(dataset.t_min())
    t_max = float(dataset.t_max())

    random = torch.Generator()
    random.manual_seed(1234)
    if checkpoint:
        random.set_state(checkpoint.random_state())

    model = Lvminn(Scaler(t_min, t_max))
    if checkpoint:
        model.load_state_dict(checkpoint.model_state())
    # else:
    #    model.a2.data = torch_plus.as_tensor(dataset.problem().a2())
    # model.a2.requires_grad_(False)

    optimizer = torch.optim.Adam(model.parameters())
    if checkpoint:
        optimizer.load_state_dict(checkpoint.optimizer_state())

    model.train()

    def criterion() -> torch.Tensor:
        def physics_loss():
            t = torch_plus.rand(1000,
                                t_min, t_max,
                                random, True)

            f = model(t)
            x1 = f[:, 0]
            x2 = f[:, 1]
            d_x1_d_t, = torch_plus.grad(x1, [t], True, True)
            d_x2_d_t, = torch_plus.grad(x2, [t], True, True)

            loss1 = d_x1_d_t - model.a1 * x1 + model.b1 * x1 * x2
            loss2 = d_x2_d_t - model.a2 * x1 * x2 + model.b2 * x2

            return (torch.mean(loss1 ** 2) + torch.mean(loss2 ** 2)) / 2

        def data_loss():
            loss1 = model(dataset.t1())[:, 0] - dataset.x1()
            loss2 = model(dataset.t2())[:, 1] - dataset.x2()
            return (torch.mean(loss1 ** 2) + torch.mean(loss2 ** 2)) / 2

        p = physics_loss()
        d = data_loss()
        return p * 1 + d * 1000

    for epoch in itertools.count(epoch + 1):
        optimizer.zero_grad()
        loss = criterion()
        loss.backward()
        optimizer.step()

        print(f"Epoch {epoch}: "
              f"a1={float(model.a1):.2e} "
              f"b1={float(model.b1):.2e} "
              f"a2={float(model.a2):.2e} "
              f"b2={float(model.b2):.2e} "
              f"loss={loss}")
        if epoch % 500 == 0:
            checkpoint = Checkpoint(
                model.state_dict(),
                random.get_state(),
                optimizer.state_dict()
            )
            checkpoint.save(checkpoint_manager.new(epoch))


if __name__ == "__main__":
    main()
