# coding:utf-8
## ERNIE-GEN 自动写诗
## 下载预训练模型

import paddle
import paddlenlp
from paddlenlp.transformers import ErnieForGeneration
paddle.set_device('gpu')
model = ErnieForGeneration.from_pretrained('ernie-1.0')

#!wget https://paddlenlp.bj.bcebos.com/models/transformers/ernie_gen_finetuned/ernie_1.0_poetry.pdparams

# 载入保存的模型， 进行增量训练
init_checkpoint = './ernie_1.0.poetry.pdparams'
model_state = paddle.load(init_checkpoint)
model.set_dict(model_state)

from paddlenlp.datasets import load_dataset
train_dataset, dev_dataset = load_dataset('poetry', splits=('train','dev'))

print(train_dataset[0]['tokens'])
print(train_dataset[0]['labels'])

# 数据预处理
from copy import deepcopy
import numpy as np
from paddlenlp.transformers import ErnieTokenizer

tokenizer = ErnieTokenizer.from_pretrained('ernie-1.0')
# ERNIE-GEN中填充了[ATTN] token作为预测位，由于ERNIE 1.0没有这一token，我们采用[MASK]作为填充

attn_id = tokenizer.vocab('[MASK]')
tgt_type_id = 1
# 设置最大输入。输出长度
max_encode_len = 24
max_decode_len = 72
def convert_example(example):
    """convert an example into necessary features"""
    encoder_src = tokenizer.encode(
        example['tokens'],
        max_seq_len=max_encode_len,
        pad_to_max_seq_len=False
    )

    src_ids, src_sids = encoder_src['input_ids'], encoder_src['token_type_ids']
    src_pids = np.arange(len(src_ids))

    encoded_tgt = tokenizer.encode(
        example['labels'],
        max_seq_len=max_decode_len,
        pad_to_max_seq_len=False
    )
    tgt_ids, tgt_sids = encoded_tgt['input_ids'], encoded_tgt['token_type_ids']
    tgt_ids = np.array(tgt_ids)
    tgt_sids = np.array(tgt_sids) + tgt_type_id
    tgt_pids = np.arange(len(tgt_ids)) + len(src_ids)

    attn_ids = np.ones_like(tgt_ids) * attn_id
    tgt_labels = tgt_ids

    return (src_ids, src_pids, src_sids, tgt_ids, tgt_pids, tgt_sids, attn_ids, tgt_labels)

tran_dataset = train_dataset.map(convert_example)
dev_dataset = dev_dataset.map(convert_example)

from paddle.io import DataLoader
from paddlenlp.data import Stack, Tuple, Pad

def gen_mask(batch_ids, mask_type='bidi', query_len=None, pad_value=0):
    if query_len is None:
        query_len = batch_ids.shape[1]
    if mask_type != 'empty':
        mask = (batch_ids != pad_value).astype(np.float32)
        mask = np.tile(np.expand_dims(mask, 1), [1, query_len, 1])
        if mask_type == 'causal':
            assert query_len == batch_ids.shape[1]
            mask = np.tril(mask)
        elif mask_type == 'causal_without_diag':
            assert query_len == batch_ids.shape[1]
            mask = np.tril(mask, -1)
        elif mask_type == 'diag':
            assert query_len == batch_ids.shape[1]
            mask = np.stack([np.diag(np.diag(m)) for m in mask])
    else:
        mask_type == 'empty'
        mask = np.zeros_like(batch_ids).astype(np.float32)
        mask = np.tile(np.expand_dims(mask, 1), [1, query_len, 1])

    return mask

def after_padding(args):
    '''
       attention mask:
       ***  src,  tgt, attn
       src  00,   01,   11
       tgt  10,   11,   12
       attn 20,   21,   22

       ***   s1, s2 | t1 t2 t3| attn1 attn2 attn3
       s1    1,  1  | 0, 0, 0,| 0,    0,    0,
       s2    1,  1  | 0, 0, 0,| 0,    0,    0,
       -
       t1    1,  1, | 1, 0, 0,| 0,    0,    0,
       t2    1,  1, | 1, 1, 0,| 0,    0,    0,
       t3    1,  1, | 1, 1, 1,| 0,    0,    0,
       -
       attn1 1,  1, | 0, 0, 0,| 1,    0,    0,
       attn2 1,  1, | 1, 0, 0,| 0,    1,    0,
       attn3 1,  1, | 1, 1, 0,| 0,    0,    1,

       for details, see Fig3. https://arxiv.org/abs/2001.11314
    '''

    src_ids, src_pids, src_sids, tgt_ids, tgt_pids, tgt_sids, attn_ids, tgt_lagels = args
    src_len = src_ids.shape[1]
    tgt_len = tgt_ids.shape[1]
    mask_00 = gen_mask(src_ids, 'bidi', query_len=src_len)
    mask_01 = gen_mask(tgt_ids, 'empty', query_len=src_len)
    mask_02 = gen_mask(attn_ids, 'empty', query_len=src_len)

    mask_10 = gen_mask(src_ids, 'bidi', query_len=tgt_len)
    mask_11 = gen_mask(tgt_ids, 'empty', query_len=tgt_len)
    mask_12 = gen_mask(attn_ids, 'empty', query_len=tgt_len)

    mask_20 = gen_mask(src_ids, 'bidi', query_len=tgt_len)
    mask_21 = gen_mask(tgt_ids, 'causal_without_diag', query_len=tgt_len)
    mask_22 = gen_mask(attn_ids, 'diag', query_len=tgt_len)

    mask_src_2_src = mask_00
