from bert4keras.backend import keras
from bert4keras.models import build_transformer_model
from bert4keras.tokenizers import Tokenizer
from bert4keras.snippets import to_array
import numpy as np

from utils.bert_info import BertInfo

bertinfo_obj = BertInfo()

tokenizer = Tokenizer(bertinfo_obj.dict_path,do_lower_case=False)
model = build_transformer_model(bertinfo_obj.config_path,bertinfo_obj.checkpoint_path)

# 编码测试
token_ids,segment_ids = tokenizer.encode(u"语言模型")
token_ids,segment_ids =  to_array([token_ids],[segment_ids])

print('\n ===== predicting =====\n')
print(model.predict([token_ids, segment_ids]))

print('\n ===== reloading and predicting =====\n')
model.save('test.model')

del model

model = keras.models.load_model('test.model')
print(model.predict([token_ids, segment_ids]))
