from transformers import *

import tensorflow as tf
import numpy as np
from tqdm import tqdm
from encoder import BaseEncoder


class TransformerEncoder(BaseEncoder):

    def __init__(self, architecture, max_len):
        self.max_len = max_len
        self.architecture = architecture
        self.tokenizer = AutoTokenizer.from_pretrained(self.architecture)
        

        
    def build(self):
        input_ids_l = tf.keras.layers.Input(
            [self.max_len, ], dtype=tf.int32, name=f"{self.architecture}input_ids")

        encoder =  TFAutoModel.from_pretrained(self.architecture, from_pt=True)

        # print(encoder)
        encoder =encoder(input_ids_l)
        return input_ids_l,encoder[0]


    def preprocess_input(self, text_list):

        input_ids = np.zeros((len(text_list),  self.max_len ))

        for i, line in tqdm(enumerate(text_list), desc=f"[{self.architecture} tokenizing]"):

            encodes = self.tokenizer.convert_tokens_to_ids(list(line))

            for j, encode in enumerate(encodes):
                if j ==  self.max_len :
                    break
                input_ids[i, j] = encode

        return input_ids


