
import os

import json
import pandas as pd
import numpy as np
import gc
import tensorflow as tf
import wandb
from wandb.keras import WandbCallback

from transformer_encoder import TransformerEncoder
from regex_encoder import RegexEncoder
from rnn_encoder import RNNEncoder
from data_process import preprocess_train_mutil_model, preprocess_test_mutil_model
from config_colab import Config, config
from multi_encoder_model import build_model
from submission import write_ensemble_submission

regex_list = [

    r'0+',
    r'A+',
    r'\W',
    r'一|二|三|四|五|六|七|八|九|十',
    r'东|西|南|北',
    r'左|右',
    r'大厦|中心|大楼',
    r'室|处',
    r'园|苑',
    r'购物|商务|金融|科技'
]


def setup_strategy():
    """Setup TFStrategy for TPU .

    Returns:
        [type]: [description]
    """

    try:
        tpu = tf.distribute.cluster_resolver.TPUClusterResolver()
        print(f'Running on TPU {tpu.master()}')
    except ValueError:
        tpu = None

    if tpu:
        tf.config.experimental_connect_to_cluster(tpu)
        tf.tpu.experimental.initialize_tpu_system(tpu)
        strategy = tf.distribute.TPUStrategy(tpu)

    else:
        strategy = tf.distribute.get_strategy()

    return strategy


strategy = setup_strategy()


# vocab_file = "./data/vocab.txt"

vocab_list = open(Config.vocab_file).readlines()
max_len = config["max_len"]

encoder_list = [
    RegexEncoder(regex_list, max_len),
    TransformerEncoder(config["architecture"], max_len),
    RNNEncoder(vocab_list, max_len)
]


def train(train_file, val_file):

    train_input_ids_list, train_labels, label_dic = preprocess_train_mutil_model(
        [train_file],
        encoder_list,
        max_len,
        include_fake_label=config["include_fake_label"],
        similar_fake_num=config["similar_fake_num"])
    val_input_ids_list, val_labels, label_dic = preprocess_train_mutil_model(
        [val_file],
        encoder_list,
        max_len,
        include_fake_label=False)

    test_input_ids_list = preprocess_test_mutil_model(
        Config.test_path,
        encoder_list)
    with strategy.scope():
        # model = build_model(encoder_list,len(label_dic),config["lr"])
        model = build_model(encoder_list, len(label_dic), config["lr"])

    model.summary()

    best_path = "./best_model.h5"

    ckpt = tf.keras.callbacks.ModelCheckpoint(best_path,
                                              monitor=config["monitor"],
                                              mode=config["mode"],
                                              save_best_only=True,
                                              save_weights_only=True)

    early_stop = tf.keras.callbacks.EarlyStopping(
        monitor=config["monitor"], mode=config["mode"], patience=config["patience"])
    wandbcallback = WandbCallback(
        monitor=config["monitor"], mode=config["mode"], save_model=False, log_weights=True)

    model.fit(
        x=train_input_ids_list,
        y=train_labels,
        shuffle=True,
        validation_data=(val_input_ids_list, val_labels),
        epochs=config["epochs"],
        batch_size=config["batch_size"],
        callbacks=[ckpt, early_stop, wandbcallback]
    )

    model.load_weights(best_path)

    test_probs = model.predict(test_input_ids_list, batch_size=16, verbose=1)

    if not config["save_model"]:

        os.remove(best_path)
    tf.keras.backend.clear_session()
    return test_probs


test_probs = None
config["is_generate"] = False
prefix = "/content/drive/MyDrive/lab-result-1/groups"
for fold_num in ["0", "1", "2", "3", "4"]:


   

    config["train_name"] = config["architecture"].split(
        "/")[-1]+"_fold_"+fold_num

    config["inputs_fold"] = f"/content/drive/MyDrive/ccks2021-wws/lab/data/5fold/fold_"+fold_num

    path = os.path.join(prefix, config["group_name"], config["train_name"])

    config["output_path"] = os.path.join(path, "outputs")
    config_path = os.path.join(path, "config.json")
    if not os.path.exists(path):
        os.makedirs(config["output_path"])
    json.dump(config, open(os.path.join(path, "config.json"), mode="w"))

    train_file = os.path.join(config["inputs_fold"], "train.conll")
    val_file = os.path.join(config["inputs_fold"], "dev.conll")

    wandb.init(project=config["project_name"],group=config["group_name"],name=config["train_name"],config=config,entity="trillionmonster")
    
    if test_probs is None:
        test_probs = train(train_file, val_file)
    else:
        test_probs = test_probs+train(train_file, val_file)
    np.save("/content/drive/MyDrive/ccks2021-wws/test_probs-concat.npy", test_probs)
    submission_path = f'/content/drive/MyDrive/ccks2021-wws/submission-concat-fold_{fold_num}.txt'
    write_ensemble_submission(test_probs, submission_path=submission_path)

    gc.collect()

    wandb.finish()

