# 在ernie 上套一层crf
import os
import math

import numpy as np
import paddle
import paddle.nn as nn

from paddlenlp.layers.crf import LinearChainCrf, ViterbiDecoder, LinearChainCrfLoss
from paddlenlp.transformers import ErnieTokenizer, ErnieForTokenClassification, LinearDecayWithWarmup


class ErnieCrf(nn.Layer):
    def __init__(self,
                 num_labels,
                 pre_trained_model="ernie-1.0",
                 crf_lr=0.2,
                 with_start_stop_tag=True):
        super(ErnieCrf, self).__init__()
        self.num_labels = num_labels
        self.pre_trained_model = pre_trained_model
        self.crf_lr = crf_lr

        self.ernie_classification = ErnieForTokenClassification.from_pretrained(
            "ernie-1.0", num_classes=self.num_labels + 2)

        self.crf = LinearChainCrf(self.num_labels, self.crf_lr,
                                  with_start_stop_tag)
        self.crf_loss = LinearChainCrfLoss(self.crf)
        self.viterbi_decoder = ViterbiDecoder(self.crf.transitions,
                                              with_start_stop_tag)

    def forward(self, inputs, token_type_ids, lengths, labels=None):
        # word_embed = self.word_embedding(inputs)
        # bigru_output, _ = self.gru(word_embed)
        emission = self.ernie_classification(inputs, token_type_ids)

        if labels is not None:
            loss = self.crf_loss(emission, lengths, labels)
            return loss, lengths, labels
        else:
            _, prediction = self.viterbi_decoder(emission, lengths)
            return prediction
