import sys
sys.path.append("..")
sys.path.append("../..")
from RumdetecFramework.BaseRumorFramework import RumorDetection
from Dataloader.weiboloader import *
from Dataloader.dataloader_utils import *
from SentModel.Sent2Vec import *
from PropModel.SeqPropagation import GRUModel
import pkuseg
import os
from torch.utils.data import DataLoader

if len(sys.argv) !=1:
    os.environ['CUDA_VISIBLE_DEVICES'] = sys.argv[1]

def obtain_RDM():
    # lvec = RDMVec("../saved/bert_embedding.pkl", "../../bert_en/", 768, 300, False)
    seg_dict = []
    seg = pkuseg.pkuseg(user_dict=seg_dict)
    lvec = W2VRDMVec("../../saved/word2vec_cn", 300, seg=seg)
    prop = GRUModel(300, 256, 1, 0.2)
    cls = nn.Linear(256, 2)
    LSTMRD = RumorDetection(lvec, prop, cls, batch_size=20, grad_accum_cnt=1)
    return LSTMRD

def obtain_Sort_set(tr_prefix, dev_prefix, te_prefix):
    tr_set = WeiboSet()
    tr_set.load_data_fast(data_prefix=tr_prefix)
    dev_set = WeiboSet()
    dev_set.load_data_fast(data_prefix=dev_prefix)
    te_set = WeiboSet()
    te_set.load_data_fast(data_prefix=te_prefix)
    return tr_set, dev_set, te_set
i = 1
tr, dev, te = obtain_Sort_set("../../data/weibo_tr_%s" %topics[i], "../../data/weibo_dev_%s" %topics[i], "../../data/weibo_te_%s" %topics[i])
tr, dev, te = Sort_data(tr, dev, te)
tr.filter_short_seq(min_len=5)
tr.trim_long_seq(10)
dev.trim_long_seq(20)
te.trim_long_seq(20)
print("Weibo Sort: (dev event)/(test event)/(train event) = %3d/%3d/%3d" % (len(dev), len(te), len(tr)))
print("\n\n===========Sort Train===========\n\n")
model = obtain_RDM()
# model.train_iters(tr, dev, te, max_epochs=100,
#                 log_dir="../../logs/", log_suffix="_RDM_G",
#                 model_file="../../saved/RDM_Sort_Weibo.pkl")
te_loader = DataLoader(te, batch_size=5, shuffle=False, collate_fn=te.collate_raw_batch)
rst = model.valid(te_loader, pretrained_file="../../saved/RDM_Sort_Weibo.pkl", all_metrics=True)
print("##Validation On Test Dataset####  te_acc:%3.4f, te_loss:%3.4f, te_prec:%3.4f, te_recall:%3.4f, te_f1:%3.4f"%rst)
print(rst)

# for i in [1, 5, 6]:
#     tr, dev, te = obtain_Sort_set("../../data/weibo_tr_%s" %topics[i], "../../data/weibo_dev_%s" %topics[i], "../../data/weibo_te_%s" %topics[i])
#     tr.filter_short_seq(min_len=5)
#     tr.ResortSample(10)
#     print("%s : (dev event)/(test event)/(train event) = %3d/%3d/%3d" % (topics[i], len(dev), len(te), len(tr)))
#     print("\n\n===========%s Train===========\n\n"%topics[i])
#     model = obtain_RDM()
#     model.train_iters(tr, dev, te, max_epochs=100,
#                       log_dir="../../logs/", log_suffix="TFIDF_GRURD_%s" %topics[i],
#                       model_file="../../saved/TFIDF_GRURD_%s.pkl" %topics[i])