# -*- coding: utf-8 -*-
# input: (batch_size, 3, seq_len)
# output: (batch_size, 6, len(seq)-199)
# naing figure by id.
import os
import sys
from typing import Generator
from tqdm import tqdm
import math
import pymongo
import numpy as np
import torch
from matplotlib import pyplot as plt
from torch._C import device

sys.path.append("/gpfs/scratch/chgwang/XI/Scripts/Refactoring_1/MLModel")
sys.path.append("/gpfs/scratch/chgwang/XI/Scripts/Refactoring_1/getData")
import matplotlib
import ParallelNet_1d  # type: ignore
import FedSeq # type: ignore
matplotlib.use("Agg")


def insert_modeled_label(model,
                        map_ids,
                        num_worker,
                        pbar,
                        col:pymongo.collection.Collection, 
                        device:torch.device):
    model.eval()
    offset_list = range(0, 402, 1)
    modeled_label_list = []
    for offset in offset_list:
        pbar.update(1)
        out_seq, _ = FedSeq.getBatchData(map_ids, num_worker, offset=offset)
        out_seq = out_seq.to(device)
        # out_seq = torch.unsqueeze(out_seq, 1)
        with torch.no_grad():
            modeled_label = model(out_seq)
            modeled_label = np.squeeze(modeled_label.cpu().numpy())
            modeled_label_list.append(modeled_label)
    modeled_labels = np.stack(modeled_label_list)
    modeled_labels = np.transpose(modeled_labels, (1,2,0))
    assert modeled_labels.shape == (len(map_ids), 6, len(offset_list))
    for col_id, modeled_label_sig in zip(map_ids, modeled_labels):
        modeled_label_sig = modeled_label_sig.tolist()
        col.find_one_and_update({"_id":col_id},
                                {"$set":{"modeled_label":modeled_label_sig}})


if __name__ == "__main__":
    model = ParallelNet_1d.ParallelNet_1d(output_size=6)
    model_path = "/gpfs/scratch/chgwang/XI/DataBase/Model_1d_20/PN-18-0.959-0.942.pt"
    trained_dict = torch.load(model_path, map_location="cpu")   
    model.load_state_dict(trained_dict, strict=True)
    if torch.cuda.is_available():
        device = torch.device("cuda:1")
        model = model.cuda(device)
        model = torch.nn.DataParallel(model, device_ids=[1])
    else:
        device = torch.device("cpu")
    client = pymongo.MongoClient("mongodb://127.0.0.1:27017/")
    db  = client["Power_Fault"]
    col_sour = db["data_sour"]
    ids = col_sour.distinct("_id")
    batch_size = 64
    num_worker = 1
    batch_amount = math.ceil(len(ids) / batch_size)
    eval_times = batch_amount * len(range(0, 402, 1))
    pbar = tqdm(total=eval_times)
    for i in range(batch_amount):
        map_ids = ids[i*batch_size:min(len(ids), (i+1)*batch_size)]
        insert_modeled_label(model, map_ids, num_worker,
        pbar, col_sour, device=device)
    pbar.close()

