import sys
sys.path.append("../../")
sys.path.append("../")
from Dataloader.dataloader_utils import Sample_data, Merge_data, Lemma_Factory
from Dataloader.twitterloader import TwitterSet, BiGCNTwitterSet, TreeTwitterSet
from SentModel.Sent2Vec import TFIDFBasedVec
from PropModel.GraphPropagation import BiGCN, BU_RvNN
from RumdetecFramework.GraphRumorDect import BiGCNRumorDetec, RvNNRumorDetec
from RumdetecFramework.BaseRumorFramework import RumorDetection
from RumdetecFramework.InstanceReweighting import ImprovedOnlineTrainer
from torch.utils.data import DataLoader
from sklearn.feature_extraction.text import TfidfVectorizer
import pickle
import torch
import torch.nn as nn
import os
from tqdm import trange
import fitlog

def Features(data, model:RumorDetection):
    model.eval()
    with torch.no_grad():
        vecs = []
        for i in trange(0, len(data), 20):
            batch = data.collate_raw_batch([data[j] for j in range(i, min(i + 20, len(data)), 1)])
            batch_vec = model.Batch2Vecs(batch)
            vecs.append(batch_vec)
    model.train()
    return torch.cat(vecs, dim=0)

def FeatureMatch(new_domain, old_domain, model:RumorDetection, convey_fn=None):
    if convey_fn is not None:
        unlabel_set = convey_fn(new_domain)
        label_set = convey_fn(old_domain)
    else:
        unlabel_set = new_domain
        label_set = old_domain

    unlabel_vecs = Features(unlabel_set, model)
    labeled_vecs = Features(label_set, model)
    dot = labeled_vecs.matmul(unlabel_vecs.T)
    norm = labeled_vecs.norm(2, dim=1).unsqueeze(1).matmul(unlabel_vecs.norm(2, dim=1).unsqueeze(0))
    cosine = dot/norm

    cosine_all = cosine.reshape([-1]).cpu().numpy()
    quantile_75 = cosine_all[
                        cosine_all.argsort()[int(len(cosine_all)*0.75)]
                    ]
    votes = cosine.__gt__(quantile_75).int().sum(dim=1)
    vals, idxs = votes.sort()
    return idxs

def WeakLabeling(model:RumorDetection, data, batch_size=20):
    data_loader = DataLoader(data,
                             batch_size=batch_size,
                             shuffle=False,
                             collate_fn=data.collate_raw_batch)
    preds = []
    with torch.no_grad():
        for batch in data_loader:
            pred = model.forward(batch)
            preds.append(pred)
    weak_label = (torch.cat(preds) > 0.5).long().tolist()
    data.data_y = weak_label
    pred_tensor = torch.cat(preds)
    entrophy = (pred_tensor.log().abs() * pred_tensor).sum(dim=1)
    return entrophy

def obtain_Domain_set(tr_prefix, dev_prefix, te_prefix):
    tr_set = TwitterSet()
    tr_set.load_data_fast(data_prefix=tr_prefix, min_len=5)
    dev_set = TwitterSet()
    dev_set.load_data_fast(data_prefix=dev_prefix)
    te_set = TwitterSet()
    te_set.load_data_fast(data_prefix=te_prefix)
    new_domain = Merge_data(dev_set, te_set)
    return tr_set, new_domain

def Convert_2_BiGCNFormat(data):
    new_data = BiGCNTwitterSet()
    new_data.data = data.data
    new_data.data_ID = data.data_ID
    new_data.data_len = data.data_len
    new_data.data_y = data.data_y
    return new_data

def Convert_2_BURvNNFormat(data):
    new_data = TreeTwitterSet()
    new_data.data = data.data
    new_data.data_ID = data.data_ID
    new_data.data_len = data.data_len
    new_data.data_y = data.data_y
    return new_data

BuRvNN_model_Paths = ["../../saved/TFIDF_BURvNN_charliehebdo_0.79.pkl",
                      "../../saved/TFIDF_BURvNN_ferguson_0.73.pkl",
                      "../../saved/TFIDF_BURvNN_germanwings-crash_0.68.pkl",
                      "../../saved/TFIDF_BURvNN_ottawashooting_0.67.pkl",
                      "../../saved/TFIDF_BURvNN_sydneysiege_0.71.pkl"
                      ]

BiGCN_Paths = ["../../saved/TFIDF_BiGCN_charliehebdo_0.80.pkl",
               "../../saved/TFIDF_BiGCN_ferguson_0.73.pkl",
               "../../saved/TFIDF_BiGCN_germanwings-crash_0.69.pkl",
               "../../saved/TFIDF_BiGCN_ottawashooting_0.68.pkl",
               "../../saved/TFIDF_BiGCN_sydneysiege_0.70.pkl"
               ]


Tf_Idf_twitter_file = "../../saved/TfIdf_twitter.pkl"
if os.path.exists(Tf_Idf_twitter_file):
    with open(Tf_Idf_twitter_file, "rb") as fr:
        tv = pickle.load(fr)
else:
    i = 1
    tr, dev = obtain_Domain_set("../../data/twitter_tr%d" % i,
                                "../../data/twitter_dev%d" % i,
                                  "../../data/twitter_te%d" % i)
    lemma = Lemma_Factory()
    corpus = [" ".join(lemma(txt)) for data in [tr, dev] for ID in data.data_ID for txt in data.data[ID]['text']]
    tv = TfidfVectorizer(use_idf=True, smooth_idf=True, norm=None)
    _ = tv.fit_transform(corpus)
    with open(Tf_Idf_twitter_file, "wb") as fw:
        pickle.dump(tv, fw, protocol=pickle.HIGHEST_PROTOCOL)

log_dir = str(__file__).rstrip(".py")
# log_dir = "MetaSelfTrain_0"
if not os.path.exists(log_dir):
    os.system("mkdir %s"%log_dir)
else:
    os.system("rm -rf %s" % log_dir)
    os.system("mkdir %s" % log_dir)

lvec = TFIDFBasedVec(tv, 20, embedding_size=300, w2v_dir="../../saved/glove_en/", emb_update=False)
prop = BiGCN(300, 256)
cls = nn.Linear(1024, 2)
BiGCN_model = BiGCNRumorDetec(lvec, prop, cls, batch_size=20, grad_accum_cnt=1)

lvec = TFIDFBasedVec(tv, 20, embedding_size=300, w2v_dir="../../saved/glove_en/", emb_update=False)
prop = BU_RvNN(300, 256)
cls = nn.Linear(256, 2)
BuRvNN_model = RvNNRumorDetec(lvec, prop, cls, batch_size=20, grad_accum_cnt=1)

few_shot_cnt = 20
dev_cnt = 100
fitlog.set_log_dir("%s/" % log_dir, new_log=True)

i = 1
old_domain, new_domain = obtain_Domain_set("../../data/twitter_tr%d"%i,
                                           "../../data/twitter_dev%d"%i,
                                           "../../data/twitter_te%d"%i)
few_shot_set, dev_set, new_domain = new_domain.split(percent=[few_shot_cnt*1.0/len(new_domain),
                                                              (few_shot_cnt+dev_cnt)*1.0/len(new_domain),
                                                              1.0])
new_domain_name = new_domain.data[new_domain.data_ID[0]]['event']
new_domain_label = new_domain.data_y


BiGCN_model.load_model(BiGCN_Paths[i])
Bi_new_domain = Convert_2_BiGCNFormat(new_domain)
Bi_new_domain_loader = DataLoader(Bi_new_domain, batch_size=20, shuffle=False,
                               collate_fn=Bi_new_domain.collate_raw_batch)
rst_BiGCN = BiGCN_model.valid(Bi_new_domain_loader, all_metrics=True)
print("Original Performance of BiGCN:", rst_BiGCN)
fitlog.add_best_metric({"BiGCN_Original_%s"%new_domain_name:
                                        {"valid_acc": rst_BiGCN[0],
                                         "valid_loss": rst_BiGCN[1],
                                         "valid_prec": rst_BiGCN[2],
                                         "valid_recall": rst_BiGCN[3],
                                         "valid_f1": rst_BiGCN[4]
                                           }})

BuRvNN_model.load_model(BuRvNN_model_Paths[i])
BURvNN_new_domain = Convert_2_BURvNNFormat(new_domain)
BR_new_domain_loader = DataLoader(BURvNN_new_domain, batch_size=20, shuffle=False,
                               collate_fn=BURvNN_new_domain.collate_raw_batch)
rst_BuRvNN_model = BuRvNN_model.valid(BR_new_domain_loader, all_metrics=True)
print("Original Performance of rst_BuRvNN_model:", rst_BuRvNN_model)
fitlog.add_best_metric({"BuRvNN_model_Original_%s"%new_domain_name:
                                    {"valid_acc": rst_BuRvNN_model[0],
                                    "valid_loss": rst_BuRvNN_model[1],
                                    "valid_prec": rst_BuRvNN_model[2],
                                    "valid_recall": rst_BuRvNN_model[3],
                                    "valid_f1": rst_BuRvNN_model[4]
                                           }})

new_domain = Convert_2_BURvNNFormat(new_domain)
e_arr = WeakLabeling(BuRvNN_model, new_domain)

#=============================Expand Few Shot Set======================================

idxs = FeatureMatch(new_domain, old_domain, BiGCN_model, convey_fn=Convert_2_BiGCNFormat)
data_idxs = idxs[-80:].tolist()
ID = [old_domain.data_ID[idx] for idx in data_idxs]
data_y = [old_domain.data_y[idx] for idx in data_idxs]
data_len = [old_domain.data_len[idx] for idx in data_idxs]
data_dic = {d_ID:old_domain.data[d_ID] for d_ID in ID}
few_shot_set.data_ID.extend(ID)
few_shot_set.data_len.extend(data_len)
few_shot_set.data_y.extend(data_y)
few_shot_set.data = dict(few_shot_set.data, **data_dic)
#======================================================================================


IR_weighting = ImprovedOnlineTrainer(BiGCN_model, new_domain, few_shot_set, new_domain_label,
                                       convey_fn=Convert_2_BiGCNFormat, lr4model=2e-3, scale_lr4model=1.0, batch_size=32)
IR_weighting.Training(max_steps=10000, validate_every_step=100,
                       model_file="./%s/MetaBiGCN_%s.pkl"%(log_dir, new_domain_name))
BiGCN_model.load_model("./%s/MetaBiGCN_%s.pkl"%(log_dir, new_domain_name))#(BiGCN_Paths[i])
rst_BiGCN = BiGCN_model.valid(Bi_new_domain_loader, all_metrics=True)
print("%3d | %3d Post-MetaTrain Performance of BiGCN:", rst_BiGCN)
fitlog.add_metric({"BiGCN_Post_%s"%new_domain_name:
                    {"valid_acc": rst_BiGCN[0],
                     "valid_loss": rst_BiGCN[1],
                     "valid_prec": rst_BiGCN[2],
                     "valid_recall": rst_BiGCN[3],
                     "valid_f1": rst_BiGCN[4]
                       }},  step=0)