import sys
sys.path.append("../../")
sys.path.append("../")
from Dataloader.dataloader_utils import Sample_data, Merge_data, Lemma_Factory
from Dataloader.twitterloader import TwitterSet, TreeTwitterSet
from SentModel.Sent2Vec import TFIDFBasedVec, W2VRDMVec
from PropModel.GraphPropagation import BU_RvNN
from RumdetecFramework.GraphRumorDect import RvNNRumorDetec
from RumdetecFramework.BaseRumorFramework import RumorDetection
from RumdetecFramework.TransferRumorFramework import InstanceReweighting
from torch.utils.data import DataLoader
from sklearn.feature_extraction.text import TfidfVectorizer
import pickle
import torch
import torch.nn as nn
import os
import fitlog

def obtain_model(pretrained_vectorizer):
    lvec = TFIDFBasedVec(pretrained_vectorizer, 20, embedding_size=300, w2v_dir="../../saved/glove_en/")
    prop = BU_RvNN(300, 256)
    cls = nn.Linear(256, 2)
    LSTMRD = RvNNRumorDetec(lvec, prop, cls, batch_size=20, grad_accum_cnt=1)
    return LSTMRD

def WeakLabeling(model:RumorDetection, data, convert_fn=None, batch_size=20):
    if convert_fn is not None:
        data = convert_fn(data)
    data_loader = DataLoader(data,
                             batch_size=batch_size,
                             shuffle=False,
                             collate_fn=data.collate_raw_batch)
    preds = []
    with torch.no_grad():
        for batch in data_loader:
            pred = model.forward(batch)
            preds.append(pred)
    weak_label = (torch.cat(preds) > 0.5).long().tolist()
    data.data_y = weak_label
    pred_tensor = torch.cat(preds)
    entrophy = (pred_tensor.log().abs() * pred_tensor).sum(dim=1)
    return entrophy

def obtain_Domain_set(tr_prefix, dev_prefix, te_prefix):
    tr_set = TwitterSet()
    tr_set.load_data_fast(data_prefix=tr_prefix, min_len=5)
    dev_set = TwitterSet()
    dev_set.load_data_fast(data_prefix=dev_prefix)
    te_set = TwitterSet()
    te_set.load_data_fast(data_prefix=te_prefix)
    new_domain = Merge_data(dev_set, te_set)
    return tr_set, new_domain

def Convert_2_BURvNNFormat(data):
    new_data = TreeTwitterSet()
    new_data.data = data.data
    new_data.data_ID = data.data_ID
    new_data.data_len = data.data_len
    new_data.data_y = data.data_y
    return new_data

Tf_Idf_twitter_file = "../../saved/TfIdf_twitter.pkl"
if os.path.exists(Tf_Idf_twitter_file):
    with open(Tf_Idf_twitter_file, "rb") as fr:
        tv = pickle.load(fr)
else:
    i = 1
    tr, dev = obtain_Domain_set("../../data/twitter_tr%d" % i, "../../data/twitter_dev%d" % i,
                                  "../../data/twitter_te%d" % i)
    lemma = Lemma_Factory()
    corpus = [" ".join(lemma(txt)) for data in [tr, dev] for ID in data.data_ID for txt in data.data[ID]['text']]
    tv = TfidfVectorizer(use_idf=True, smooth_idf=True, norm=None)
    _ = tv.fit_transform(corpus)
    with open(Tf_Idf_twitter_file, "wb") as fw:
        pickle.dump(tv, fw, protocol=pickle.HIGHEST_PROTOCOL)

log_dir = str(__file__).rstrip(".py")
if not os.path.exists(log_dir):
    os.system("mkdir %s"%log_dir)
else:
    os.system("rm -rf %s" % log_dir)
    os.system("mkdir %s" % log_dir)

BURvNN_model1 = obtain_model(tv)
BURvNN_model2 = obtain_model(tv)
BURvNN_model1.load_model()
BURvNN_model2.load_model()

few_shot_cnt = 50

fitlog.set_log_dir("%s/" % log_dir, new_log=True)
i = 2
old_domain, new_domain = obtain_Domain_set("../../data/twitter_tr%d"%i,
                                           "../../data/twitter_dev%d"%i,
                                           "../../data/twitter_te%d"%i)
new_domain_label = new_domain.data_y
leakage_frac = few_shot_cnt*1.0/len(new_domain)
few_shot_set, new_domain = new_domain.split(percent=[leakage_frac, 1.0])
new_domain_name = new_domain.data[new_domain.data_ID[0]]['event']

Bi_new_domain = Convert_2_BURvNNFormat(new_domain)
new_domain_loader = DataLoader(Bi_new_domain, batch_size=20, shuffle=False,
                               collate_fn=Bi_new_domain.collate_raw_batch)
rst_BURvNN = BURvNN_model1.valid(new_domain_loader, all_metrics=True)
fitlog.add_best_metric({"BURvNN1_Original_%s"%new_domain_name:
                                        {"valid_acc": rst_BURvNN[0],
                                         "valid_loss": rst_BURvNN[1],
                                         "valid_prec": rst_BURvNN[2],
                                         "valid_recall": rst_BURvNN[3],
                                         "valid_f1": rst_BURvNN[4]
                                           }})
rst_BURvNN = BURvNN_model2.valid(new_domain_loader, all_metrics=True)
fitlog.add_best_metric({"BURvNN2_Original_%s"%new_domain_name:
                                        {"valid_acc": rst_BURvNN[0],
                                         "valid_loss": rst_BURvNN[1],
                                         "valid_prec": rst_BURvNN[2],
                                         "valid_recall": rst_BURvNN[3],
                                         "valid_f1": rst_BURvNN[4]
                                           }})

for epoch in range(30):
    e_arr = WeakLabeling(BURvNN_model1, new_domain)
    IR_weighting = InstanceReweighting(BURvNN_model2, new_domain, few_shot_set, new_domain_label,
                                       convey_fn=Convert_2_BURvNNFormat,
                                       weak_set_weights= 1.0 / e_arr.cpu().numpy(), lr4model=2e-2, batch_size=20)

    IR_weighting.Meta_Training(max_iters=1000, model_file="../../saved/MetaBURvNN_%s.pkl"%new_domain_name)
    BURvNN_model2.load_model("../../saved/MetaBURvNN_%s.pkl"%new_domain_name)
    rst_BURvNN = IR_weighting.Valid(all_metrics=True)
    fitlog.add_metric({"BURvNN_%s"%new_domain_name:
                                {"valid_acc": rst_BURvNN[0],
                                 "valid_loss": rst_BURvNN[1],
                                 "valid_prec": rst_BURvNN[2],
                                 "valid_recall": rst_BURvNN[3],
                                 "valid_f1": rst_BURvNN[4]
                                }},
                      step=epoch
                      )

    e_arr = WeakLabeling(BURvNN_model2, new_domain)
    IR_weighting = InstanceReweighting(BURvNN_model1, new_domain, few_shot_set, new_domain_label,
                                       convey_fn=None,
                                       weak_set_weights= 1.0 / e_arr.cpu().numpy(), lr4model=2e-2, batch_size=20)
    IR_weighting.Meta_Training(max_iters=1000, model_file="../../saved/MetaRDM_%s.pkl"%new_domain_name)
    BURvNN_model1.load_model("../../saved/MetaBURvNN_%s.pkl"%new_domain_name)
    rst_RDM = IR_weighting.Valid(all_metrics=True)
    fitlog.add_metric({"RDM_%s"%new_domain_name:
                                {"valid_acc": rst_RDM[0],
                                "valid_loss": rst_RDM[1],
                                "valid_prec": rst_RDM[2],
                                "valid_recall": rst_RDM[3],
                                "valid_f1": rst_RDM[4]
                                }},
                      step=epoch
                      )