#%%
import pickle
import numpy as np
import torch
import torch.nn as nn
import h5py
import os
import os.path as osp
from torch.utils.data import DataLoader, TensorDataset
import tqdm
from openlabcluster.training_utils.ssl.SeqModel import SemiSeq2Seq
import lilab.OpenLabCluster_train.model
from sklearn.decomposition import PCA
import umap
from lilab.openlabcluster_postprocess.s1a_clipNames_inplace_parse import parse_name
import matplotlib.pyplot as plt

(feature_length, hidden_size, feature_length, batch_size,
    cla_dim, en_num_layers, de_num_layers, cla_num_layers, fix_state, fix_weight, teacher_force, device) = \
    (32, 30, 32, 64, [34], 3, 1, 1, False, False, False, 'cuda:0')

model:nn.Module = SemiSeq2Seq(feature_length, hidden_size, feature_length, batch_size,
                                cla_dim, en_num_layers, de_num_layers, cla_num_layers, fix_state, fix_weight, teacher_force, device).to(device)


model_path = '/DATA/taoxianming/rat/data/Mix_analysis/SexAgeDay55andzzcWTinAUT_MMFF/result32/olc-iter3-2024-05-27/models/FWPCA0.10_P100_en3_hid30_epoch2'
model_dict = torch.load(model_path)
model.load_state_dict(model_dict['model_state_dict'])

project = osp.dirname(osp.dirname(model_path))
clipNames_file = osp.join(project, 'videos/clipNames.txt')
clipNames = [osp.basename(i.strip()) for i in open(clipNames_file,'r').readlines()]
data_h5 = osp.join(project, 'datasets/data.h5')
hf = h5py.File(data_h5,'r')
label = np.array(hf['label'])
data = np.array([np.array(hf[f'{i}'], dtype=np.float32) for i in range(len(label))])
hf.close()
dataset = TensorDataset(torch.from_numpy(data).float(), torch.from_numpy(label).long())
dataLoader = DataLoader(dataset, batch_size=64, shuffle=False)


#%% model prediction
label_pred =[]
deout_seq_l = []
encoder_l = []
with torch.no_grad():
    for i, (feat32_B, label_B) in enumerate(tqdm.tqdm(dataLoader)):
        feat32_B = feat32_B.to(device)
        label_B = label_B.to(device)
        
        inter, deout, pred, deout_seq, encoder_hidden = model.forward_test(feat32_B, [24]*len(feat32_B))
        label_pred_B = torch.argmax(pred, dim=1) + 1
        label_pred.extend(label_pred_B.cpu().numpy().ravel().tolist())
        deout_seq_l.append(deout_seq.cpu().numpy())
        encoder_l.append(encoder_hidden[0].cpu().numpy())

deout_seq_l = np.concatenate(deout_seq_l, axis=0)
deout_seq_l = deout_seq_l.reshape(deout_seq_l.shape[0], -1)
encoder_l = np.concatenate(encoder_l, axis=0)
encoder_l = encoder_l.reshape(encoder_l.shape[0], -1)

label_pred = np.array(label_pred)

print('All sample: ', len(label))
print('Labeled sample: ', np.sum(label>0))
print('Unlabeled sample: ', np.sum(label==0))
correct_N = np.sum((label_pred == label) & (label>0))
print('Correct labeled sample: ', correct_N)
print('Correct probability labeled sample: %.2f' % (correct_N / np.sum(label>0)))

#%% save data
pca = PCA()
reducer = umap.UMAP(random_state=1000)
if False:
    # use encoder
    output_dir = osp.join(project, 'output/semisupervise-enc-2')
    feat_latent = encoder_l
else:
    output_dir = osp.join(project, 'output/semisupervise-decseq-2')
    feat_latent = deout_seq_l

feat_latent_pca = pca.fit_transform(feat_latent)
explained_var = pca.explained_variance_ratio_
pc_n_90 = np.sum(explained_var.cumsum() < 0.9)
feat_latent_pca = feat_latent_pca[:, :pc_n_90]
embedding_d2 = reducer.fit_transform(feat_latent_pca)

outdata_dict = {
    "ncluster": label_pred.max(),
    "embedding": feat_latent_pca,
    "embedding_d2": embedding_d2.astype(np.float64),
    "cluster_labels": label_pred,
    "ntwin": 24,
    "clipNames": np.array(clipNames),
    'df_clipNames': parse_name(clipNames)
}

os.makedirs(output_dir, exist_ok=True)
#%%
plt.figure(figsize=(12, 10))
plt.scatter(embedding_d2[::2, 0], embedding_d2[::2, 1], c=label_pred[::2], s=1, cmap='hsv') #
plt.colorbar()
plt.axis('off')
plt.savefig(osp.join(output_dir, f'olc-iter1-2024-05-23_semiseq2seq_pca{pc_n_90}.png'))

outdata_path = osp.join(output_dir, f'olc-iter1-2024-05-23_semiseq2seq_pca{pc_n_90}.clippredpkl')
pickle.dump(outdata_dict, open(outdata_path, 'wb'))

np.save(osp.join(output_dir, 'olc-iter2-2024-05-27_deout_seq.npy'), deout_seq_l)
np.save(osp.join(output_dir, 'olc-iter2-2024-05-27_encoder.npy'), encoder_l)
