from transformers import *

import tensorflow as tf
import numpy as np
from tqdm import tqdm
from encoder import BaseEncoder
import re


class RNNEncoder(BaseEncoder):

    def __init__(self, vocab_list, max_len):
        self.max_len = max_len
        self.vocab_dic = {" ": 0, "UNK": 1}

        for item in vocab_list:
            self.vocab_dic[item.strip()] = len(self.vocab_dic)

    def build(self):

        input_ids_l = tf.keras.layers.Input(
            [self.max_len], dtype=tf.int32, name=f"rnn_input_ids")

        encoder = tf.keras.layers.Embedding(
            len(self.vocab_dic), 128)(input_ids_l)

        encoder = tf.keras.layers.BatchNormalization()(encoder)

        encoder = tf.keras.layers.Bidirectional(
            tf.keras.layers.GRU(128, return_sequences=True))(encoder)

        encoder = tf.keras.layers.LeakyReLU()(encoder)

        return input_ids_l, encoder

    def preprocess_input(self, text_list):

        input_ids = np.zeros((len(text_list), self.max_len))

        for i, text in tqdm(enumerate(text_list)):

            encodes = [self.vocab_dic.get(char, self.vocab_dic["UNK"])
                       for char in text.strip()]

            for j, encode in enumerate(encodes):
                if j == self.max_len:
                    break
                input_ids[i, j] = encode

        return input_ids
