danhtran2mind's picture
Upload folder using huggingface_hub
ee03b1a verified
import pickle
import tensorflow as tf
def load_tokenizers(en_path='tokenizers/en_tokenizer.pkl',
vi_path='tokenizers/vi_tokenizer.pkl'):
with open(en_path, 'rb') as f:
en_tokenizer = pickle.load(f)
with open(vi_path, 'rb') as f:
vi_tokenizer = pickle.load(f)
en_tokenizer = tf.keras.preprocessing.text.tokenizer_from_json(en_tokenizer)
vi_tokenizer = tf.keras.preprocessing.text.tokenizer_from_json(vi_tokenizer)
return en_tokenizer, vi_tokenizer