import time
import os

from dataset_004 import train_loader, val_data, test_data, origin_data
from model import Model
import torch.nn.functional as F
from fasttext import FTModel
import torch
from gragh_tools import draw

model = Model(768)
# model.change_model(train_data)
model = model.to("cuda:0")

optimizer = torch.optim.RMSprop(model.parameters(), lr=0.0001, alpha=0.99, eps=1e-08, weight_decay=5e-4, momentum=0,
                                centered=False)
# batch = next(iter(train_loader))
best_loss = 1000000.0
MODEL_PATH = "/home/Dyf/code/storage_models/alarms/"
i = 0
# for batch in train_loader:
#     i += 1
#     draw(batch, origin_data, 0, i)
#     break

idx = 0
for epoch in range(50):
    try:
        model.train()
        epoch_loss = 0.0
        for batch in train_loader:
            idx += 1
            # draw(batch, origin_data, epoch, idx)
            optimizer.zero_grad()

            out = model(batch)
            label = batch.edge_label.to(torch.float).to("cuda")
            loss = F.binary_cross_entropy_with_logits(out, label)
            # print(i, loss)
            loss.backward(retain_graph=True)
            optimizer.step()
            # print("batch {} train_loss {}".format(epoch, loss.item()))
            out = model(val_data)
            edge_label = val_data.edge_label.to(torch.float).to("cuda")
            v_loss = F.binary_cross_entropy_with_logits(out, edge_label)
            # print("batch {} val_loss {}".format(epoch, v_loss.item()))
            epoch_loss += v_loss.item()

        print("batch {} val_all_loss {}".format(epoch, epoch_loss))
        if epoch_loss < best_loss:
            best_loss = epoch_loss
            torch.save(model, os.path.join(MODEL_PATH, 'Alarms_{}.pth').format(epoch))
            print("=> saved best model", epoch, epoch_loss)
    except Exception:
        import traceback
        traceback.print_exc()

model.eval()
edge_label = test_data.edge_label.to(torch.float).to("cuda")
out = model(test_data)
t_loss = F.binary_cross_entropy_with_logits(out, edge_label)
print("Test_loss {}".format(t_loss.item()))
# draw(test_data, data)

# data = data.to("cuda")
