#!/usr/bin/python3
# -*- coding: utf-8 -*-
# File  : predict.py
# Author: anyongjin
# Date  : 2020/9/8
import os
from DistillBert.model import *
import numpy as np


def softmax(z):
    assert len(z.shape) == 2
    s = np.max(z, axis=1)
    s = s[:, np.newaxis] # necessary step to do broadcasting
    e_x = np.exp(z - s)
    div = np.sum(e_x, axis=1)
    div = div[:, np.newaxis] # dito
    return e_x / div


class StuPredicter(StudentModel):
    def __init__(self, model_dir):
        self.model_path = os.path.join(model_dir, 'model.h5')
        assert os.path.isfile(self.model_path), f'model not exists:{self.model_path}'
        super(StuPredicter, self).__init__(model_dir=model_dir, distill_from_teacher=True)
        self.max_len = self.config['max_len']
        from transformers.tokenization_bert import load_vocab
        self.label_vocab = load_vocab(os.path.join(model_dir, 'labels.txt'))
        self.labels = list(self.label_vocab.keys())

    def predict_tokens(self, batch_tokens):
        assert len(batch_tokens) > 0 and len(batch_tokens[0]) > 0 and batch_tokens[0][0] > 0, \
            'batch_tokens must be 2 dimision of int tokens, e.g.[[23,5332,938,0,0],[2,67,0,0,0],...]'
        pad_tokens = np.array([
            list(tokens[:self.max_len]) + [0] * (self.max_len - len(tokens)) for tokens in batch_tokens
        ], dtype=np.int32)
        batch_outs = self.model.predict(pad_tokens)
        batch_probs = softmax(batch_outs)
        best_ids = batch_probs.argmax(axis=-1)
        best_probs = [float(batch_probs[i][cid]) for i, cid in enumerate(best_ids)]
        return [self.labels[i] for i in best_ids], best_probs

    def predict(self, batch_texts):
        assert len(batch_texts) > 0 and isinstance(batch_texts[0], str), 'batch_texts must be list of str'
        batch_tokens = [self.tokenizer.get_input_ids(t) for t in batch_texts]
        return self.predict_tokens(batch_tokens)

