#%%
# conda activate OpenLabCluster
import numpy as np
import torch
import torch.nn as nn
import h5py
import os
import os.path as osp
from torch.utils.data import DataLoader, TensorDataset, SubsetRandomSampler
import tqdm
from openlabcluster.training_utils.ssl.SeqModel import SemiSeq2Seq, seq2seq,DecoderRNN
from openlabcluster.training_utils.ssl.data_loader import SupDataset, pad_collate_iter
from openlabcluster.utils import auxiliaryfunctions
import lilab.OpenLabCluster_train.model
from torch import optim
from openlabcluster.training_utils.ssl.utilities import load_model
from openlabcluster.training_utils.ssl.seq_train import training
from pathlib import Path
import sys

project = '/DATA/taoxianming/rat/data/Mix_analysis/SexAgeDay55andzzcWTinAUT_MMFF/result32/olc-iter4-2024-05-27'
# model_path0 = '/DATA/taoxianming/rat/data/Mix_analysis/SexAgeDay55andzzcWTinAUT_MMFF/result32/olc-iter1-2024-05-23/models/OPCA0.00_P100_en3_hid30_epoch14'
model_path0 = '/DATA/taoxianming/rat/data/Mix_analysis/SexAgeDay55andzzcWTinAUT_MMFF/result32/olc-iter1-2024-05-23/models/FWPCA0.00_P100_en3_hid30_epoch2' 
class FAKE: pass
cfg = osp.join(project, 'config.yaml')
cfg_data = auxiliaryfunctions.read_config(cfg)
self = FAKE()
self.cfg = cfg
self.model_name = model_path0
self.cfg_data = cfg_data

num_class = self.cfg_data['num_class'][0]
root_path = self.cfg_data["project_path"]
batch_size = self.cfg_data['batch_size']
feature_length = self.cfg_data['feature_length']
hidden_size = self.cfg_data['hidden_size']
cla_dim = self.cfg_data['cla_dim']
en_num_layers = self.cfg_data['en_num_layers']
de_num_layers = self.cfg_data['de_num_layers']
cla_num_layers = self.cfg_data['cla_num_layers']
fix_state = self.cfg_data['fix_state']
fix_weight = self.cfg_data['fix_weight']
teacher_force = self.cfg_data['teacher_force']
device = 'cuda:0'




label_path = os.path.join(self.cfg_data['label_path'],'label.npy')
if not os.path.exists(label_path):
    label_path = None
dataset_traintest = SupDataset(root_path, self.cfg_data['data_path'], self.cfg_data['train'] , label_path)
nsample_traintest = len(dataset_traintest)
nsample_train = int(nsample_traintest * 0.9)

#random sample nsample_train from dataset_traintest
indices_shuffle =np.arange(nsample_traintest)
random_seed = 11111
np.random.seed(random_seed)
np.random.shuffle(indices_shuffle)
indices_train = indices_shuffle[:nsample_train]
indices_test = indices_shuffle[nsample_train:]


# seperate train and validation
train_sampler = SubsetRandomSampler(indices_train)
train_loader = torch.utils.data.DataLoader(dataset_traintest, batch_size=batch_size,
                                            sampler=train_sampler, collate_fn=pad_collate_iter)
test_sampler = SubsetRandomSampler(indices_test)
test_loader = torch.utils.data.DataLoader(dataset_traintest, batch_size=batch_size,
                                            sampler=test_sampler, collate_fn=pad_collate_iter)

print("training data length: %d, train_loader: %d" % (len(indices_train), len(train_loader)))
print("testing data length: %d, test_loader: %d" % (len(indices_test), len(test_loader)))

phase = 'PC'
fix_weight = True

if fix_weight:
    network = 'FW' + phase

if fix_state:
    network = 'FS' + phase

if not fix_state and not fix_weight:
    network = 'O' + phase

# hyperparameter
learning_rate = self.cfg_data['learning_rate']
epoch = self.cfg_data["su_epoch"]
epoch = 3   #训练2-3轮


model:nn.Module = SemiSeq2Seq(feature_length, hidden_size, feature_length, batch_size,
                                cla_dim, en_num_layers, de_num_layers, cla_num_layers, fix_state, fix_weight, teacher_force, device).to(device)
with torch.no_grad():
    for child in list(model.children()):
        print(child)
        for param in list(child.parameters()):
            if param.dim() == 2:
                # nn.init.xavier_uniform_(param)
                nn.init.uniform_(param, a=-0.05, b=0.05)

if model_path0 is not None:
    model_dict = torch.load(model_path0)
    if True:
        print('load from seq2seq model')
        model.seq.load_state_dict(model_dict['model_state_dict'])
    else:
        model.load_state_dict(model_dict['model_state_dict'])



k = 2  # top k accuracy
# for classification
percentage = 1
few_knn = False
# global variable
cla_dim = self.cfg_data['cla_dim']  # 0 non labeled class
print('network fix state=', fix_state)

optimizer = optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=learning_rate)

if False:
    model, optimizer = load_model(self.model_name, model, optimizer, device)

criterion_seq = nn.L1Loss(reduction='none')
criterion_cla = nn.CrossEntropyLoss(reduction='sum')

alpha = 0.1

file_output = open(os.path.join(root_path, self.cfg_data['output_path'], '%sA%.2f_P%d_en%d_hid%d.txt' % (
    network, alpha, percentage * 100, en_num_layers, hidden_size)), 'w')
file_test_output = open(os.path.join(root_path, self.cfg_data['output_path'], '%sA%.2f_P%d_en%d_hid%d_test.txt' % (
    network, alpha, percentage * 100, en_num_layers, hidden_size)), 'w')
model_prefix = os.path.join(root_path, self.cfg_data['model_path'], '%sA%.2f_P%d_en%d_hid%d' % (
    network, alpha, percentage * 100, en_num_layers, hidden_size))
model_path = Path(model_prefix).parent
pre = Path(model_prefix).name
lambda1 = lambda ith_epoch: 0.95 ** (ith_epoch)
model_scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda1)
past_loss = sys.float_info.max
self.train_loader = train_loader
self.hidden_size = hidden_size
self.num_class = num_class
self.alpha = alpha
self.few_knn = few_knn
self.device = device
print_every = 1
self.canvas = None


def testing(file_test_output):
    pred_label_l = []
    semi_label_l = []
    cla_loss_l = []
    seq_loss_l = []
    for data, seq_len, _, semi_label, _ in tqdm.tqdm(test_loader):
        input_tensor = data.to(device)
        semi_label = torch.tensor(semi_label, dtype=torch.long).to(device)
        with torch.no_grad():
            en_hi, de_out, cla_pre = model(input_tensor, seq_len)
            pred_label_l.extend(cla_pre.argmax(1).tolist())
            semi_label_l.extend((semi_label-1).tolist())
            label = semi_label
            if sum(label != 0) != 0:
                cla_loss = criterion_cla(cla_pre[label != 0], label[label != 0] - 1)
            else:
                cla_loss = 0

            mask = torch.zeros([len(seq_len), max(seq_len)]).to(device)
            for ith_batch in range(len(seq_len)):
                mask[ith_batch, 0:seq_len[ith_batch]] = 1
            mask = torch.sum(mask, 1)

            seq_loss = torch.sum(criterion_seq(de_out, input_tensor), 2)
            seq_loss = torch.mean(torch.sum(seq_loss, 1) / mask)
            cla_loss_l.append(cla_loss.item())
            seq_loss_l.append(seq_loss.item())

    seq = np.mean(seq_loss_l)
    cla = np.mean(cla_loss_l)

    pred_label_l = np.array(pred_label_l)
    semi_label_l = np.array(semi_label_l)
    acc_test = np.sum(pred_label_l == semi_label_l) / np.sum(semi_label_l >=0)

    file_test_output.write(f"{seq:.3f} {cla:.3f} {acc_test:.3f}\n")
    print(f"Test clas loss: {cla:.3f} seq_loss:{seq:.3f} acc:{acc_test:.3f}")
    return acc_test



for ith_epoch in range(epoch):
    past_loss, model_name, self.acc = training(ith_epoch, epoch, train_loader, print_every, self.canvas,
            model, optimizer, criterion_seq, criterion_cla, alpha, k, file_output, past_loss,model_path, pre,
            hidden_size, model_prefix, num_class,
            few_knn, device)
    acc_test = testing(file_test_output)
    if model_name:
        auxiliaryfunctions.edit_config(self.cfg, {'tr_modelType':'semi_seq2seq', 'tr_modelName': model_name })
    else:
        auxiliaryfunctions.edit_config(self.cfg, {'tr_modelType': 'semi_seq2seq'})
    model_scheduler.step()
    file_output.flush()
    file_test_output.flush()


if False:
    input_tensor, seq_len, batch_size = torch.rand((64,24,32)).to(device), [24]*64, 64



    inter, deout, pred, deout_seq = model.forward_test(input_tensor, seq_len)
    inter0, deout0, pred0 = model.forward(input_tensor, seq_len)
    assert torch.all(inter0 == inter)
    assert torch.all(deout0 == deout)
    assert torch.all(pred0 == pred)
