from RumdetecFramework.BaseRumorFramework import RumorDetection
from Dataloader.weiboloader import *
from Dataloader.dataloader_utils import *
from SentModel.Sent2Vec import *
from PropModel.SeqPropagation import GRUModel
import torch.nn as nn
import pandas as pd

def obtain_BertRD():
    sent2vec = BertVec_CN("../../bert_cn/", bert_parallel=True)
    prop = GRUModel(768, 256, 1, 0.2)
    cls = nn.Linear(256, 2)
    BertRD = RumorDetection(sent2vec, prop, cls)
    return BertRD

def obtain_general_set(tr_prefix, dev_prefix, te_prefix):
    tr_set = WeiboSet()
    tr_set.load_data_fast(data_prefix=tr_prefix)
    dev_set = WeiboSet()
    dev_set.load_data_fast(data_prefix=dev_prefix)
    te_set = WeiboSet()
    te_set.load_data_fast(data_prefix=te_prefix)
    return tr_set, dev_set, te_set


for i in [1, 5, 6]:
    df = pd.read_csv('./data/weibo_ids.csv')
    dev_te = df[df.category==i]
    shape = dev_te.shape

    tr_df = df[df.category!=i]
    dev_df = dev_te.iloc[:int(shape[0]/2)]
    te_df = dev_te.iloc[int(shape[0]/2):]

    tr = WeiboSet()
    tr.load_data(weibo_dir="../data/Weibo/", weibo_df=tr_df)
    tr.Caches_Data(data_prefix="../data/weibo_tr_%s"%topics[i])

    dev = WeiboSet()
    dev.load_data(weibo_dir="../data/Weibo/", weibo_df=dev_df)
    dev.Caches_Data(data_prefix="../data/weibo_dev_%s"%topics[i])

    te = WeiboSet()
    te.load_data(weibo_dir="../data/Weibo/", weibo_df=te_df)
    te.Caches_Data(data_prefix="../data/weibo_te_%s"%topics[i])

    tr.filter_short_seq(min_len=5)
    tr.trim_long_seq(10)
    model = obtain_BertRD()
    model.train_iters(tr, dev, te, max_epochs=100,
                    log_dir="../logs/", log_suffix="_WeiboBertRD_%s"%topics[i],
                    model_file=u"WeiboBertRD_%s.pkl"%topics[i])
