from models.bert_attention_cross import BertSentiEntity
from data_utils.bert_attention_data_cross import get_train_val_data_loader_cross, get_test_loader, token_CLSE, \
    get_test_loader_by_split_id
from data_utils.basic_data_round2 import load_train_val_dataset_cross
import torch
from config import conf
import pandas as pd
from os.path import join, exists
from glob import glob
import os

FEATURE_ROOT_DIR = conf.get('linux_dir', 'feature_root_dir')
DEVICE_ID = 0#conf.get('gpu', 'device_id')
INFERENCE_DIR = conf.get('dir', 'inference_result_dir')

cross_total = 5
def train(model_clss, i):
    device = torch.device("cuda:%s" % (DEVICE_ID) if torch.cuda.is_available() else "cpu")
    torch.cuda.empty_cache()
    train_dataloader, val_dataloader, tokenizer = get_train_val_data_loader_cross(device, 4, test_number=i,
                                                                                  cross_number=cross_total, shuffle=True,
                                                                                  maxlen=400)

    bert_attn_model = model_clss(tokenizer, token_CLSE, 5e-6, model_name='senti_entity_goodremove_full').to(device)
    bert_attn_model.train_epochs(train_dataloader, val_dataloader, num_epoch=15, early_stop=4)
    return tokenizer


def test(model_class, tokenizer):
    device = torch.device("cuda:%s" % (DEVICE_ID) if torch.cuda.is_available() else "cpu")
    test_loader = get_test_loader(device, 4, maxlen=400)
    bert_attn_model = model_class(tokenizer, token_CLSE, 5e-6, version_id=1, model_name='senti_entity_goodremove_full').to(
        device)
    version_id = len(glob(bert_attn_model.model_root + '/*version*'))
    bert_attn_model = model_class(tokenizer, token_CLSE, 5e-6, version_id=version_id,
                                  model_name='senti_entity_goodremove_full').to(device)
    bert_attn_model.load_best_model()
    results = list(bert_attn_model.inference(test_loader))
    save_path = join(bert_attn_model.model_save_dir, 'data', 'raw_rs.csv')
    pd.DataFrame(results, columns=['id', 'negative', 'predict', 'entity_list']).to_csv(save_path, index=False)
    print('saved ', save_path)


def train_cross(model_class):
    for i in range(1, 6):
        tokenizer = train(model_class, i)
        test(model_class, tokenizer)


def predict_new_features(i):
    device = torch.device("cuda:%s" % (DEVICE_ID) if torch.cuda.is_available() else "cpu")
    train_dataloader, val_dataloader, tokenizer = get_train_val_data_loader_cross(device, 4, test_number=i,
                                                                                  cross_number=cross_total, shuffle=True,
                                                                                  maxlen=400)

    bert_attn_model = BertSentiEntity(tokenizer, token_CLSE, 5e-6, version_id=i,model_name='senti_entity_goodremove_full').to(device)
    bert_attn_model.load_best_model()
    test_loader = get_test_loader_by_split_id(device, 16,cross_number=cross_total, maxlen=400, test_number=i)
    results = list(bert_attn_model.inference_for_new_features(test_loader))
    save_path = join(FEATURE_ROOT_DIR, bert_attn_model.model_name, 'feature_split %d' % (i))
    save_dir = os.path.dirname(save_path)
    if not exists(save_dir):
        os.makedirs(save_dir)
    pd.DataFrame(results, columns=['id', 'negative', 'predict', 'entity_list']).to_csv(save_path, index=False)
    print('saved at %s' % save_path)


def predict_new_test_features(i):
    device = torch.device("cuda:%s" % (DEVICE_ID) if torch.cuda.is_available() else "cpu")
    train_dataloader, val_dataloader, tokenizer = get_train_val_data_loader_cross(device, 4, test_number=i,
                                                                                  cross_number=9, shuffle=True,
                                                                                  maxlen=400)

    bert_attn_model = BertSentiEntity(tokenizer, token_CLSE, 5e-6, version_id=i,model_name='senti_entity_goodremove_full').to(device)
    bert_attn_model.load_best_model()
    test_loader = get_test_loader(device, 16, maxlen=400)
    results = list(bert_attn_model.inference_for_new_features(test_loader))
    save_path = join(FEATURE_ROOT_DIR, bert_attn_model.model_name, 'test_features_round2_version_goodremove %d' % (i))
    save_dir = os.path.dirname(save_path)
    if not exists(save_dir):
        os.makedirs(save_dir)
    pd.DataFrame(results, columns=['id', 'negative', 'predict', 'entity_list']).to_csv(save_path, index=False)
    print('saved at %s' % save_path)


if __name__ == '__main__':
    train_cross(BertSentiEntity)
    for i in range(1, 6):
        predict_new_features(i)
    for i in range(1, 6):
        predict_new_test_features(i)
