
import tensorflow_hub as hub
import tensorflow as tf

from gpt2_tokenizer import GPT2Tokenizer

tokenizer = GPT2Tokenizer(
    'CPM-Generate/bpe_3w_new/vocab.json',
    'CPM-Generate/bpe_3w_new/merges.txt',
    model_file='CPM-Generate/bpe_3w_new/chinese_vocab.model')

gpt = hub.load('./cpm-lm-tf2_v2/')


def sample(tokenizer, gpt, sentence, number=1, length=20, top_p=0.9, temperature=0.9):
    """
    numbert: 输出句子个数
    length: 输出最大长度
    top_p: token的概率排在这以上才有效
    temperature: 温度
    """
    inputs = tf.constant([tokenizer.encode(sentence)] * number, dtype=tf.int64)
    length = tf.constant(length, dtype=tf.int64)
    ret = gpt.signatures['serving_default'](
        inp=inputs,
        length=length,
        top_p=tf.constant(top_p, tf.float32),
        temperature=tf.constant(temperature, tf.float32)
    )['output_0']
    return [
        tokenizer.decode(s).replace(' ', '')
        for s in ret.numpy()
    ]


def do_predict(src_txt, char_num_dst=100, num_dst=3):
    # 默认100字
    try:
        ret = sample(tokenizer, gpt, src_txt, num_dst,
                     char_num_dst, top_p=0.9, temperature=0.9)
        for x in ret:
            print(x)
            print('-' * 20)
        return ret
    except Exception as e:
        print(e)
        return False


if __name__ == '__main__':

    ret = sample(tokenizer, gpt, '宽阔洁净的柏油马路，整齐的红墙院落，手工艺品农民专业合作社里一片忙碌，院内挂满了各式各样的笤帚成品。难以想象，6年多前，阿亚格曼干村还是个缺水少电的深度贫困村，村民住着土坯房，走着沙土路。', 3, 100, top_p=0.9, temperature=0.9)
    for x in ret:
        print(x)
        print('-' * 20)
