import numpy as np
import tritonclient.http as httpclient
from utils.embedding.setting import *
from utils.embedding.tokenization import FullTokenizer

class HttpClient:
    def __init__(self, modelname='prompt_bert', url=vec_url, tokenizer_path=tokenizer_path, verbose=False):
        self.model_name = modelname
        vocab_path = tokenizer_path + '/vocab.txt'
        self.triton_client = httpclient.InferenceServerClient(url=url, verbose=verbose)
        self.tokenizer = FullTokenizer(vocab_path, do_lower_case=True)
        self.max_seq_length = 123

    def convert_single_example(self, text):
        text_tokens = self.tokenizer.tokenize(text)
        tokens = ["[CLS]"]
        for token in text_tokens[:self.max_seq_length - 2]:
            tokens.append(token)
        tokens.append("[SEP]")
        tokens.insert(-1,'[MASK]')
        input_ids = self.tokenizer.convert_tokens_to_ids(tokens)
        while len(input_ids) < self.max_seq_length:
            input_ids.append(0)
        assert len(input_ids) == self.max_seq_length
        return input_ids

    def gettmp(self, title):
        sentence = f'{title}，它的意思是。'
        sen_tmp = f'{title}，这句话的意思是。'
        return sentence, sen_tmp

    def getids(self, titles):
        batch_size = len(titles)
        inputs = []
        ori_inputs = []
        tmp_inputs = []
        inputs.append(httpclient.InferInput('input_ids', [batch_size, 123], "INT32"))
        inputs.append(httpclient.InferInput('input_tem', [batch_size, 123], "INT32"))
        for title in titles:
            sen, sen_tmp = self.gettmp(title)
            oriids = self.convert_single_example(sen)
            temids = self.convert_single_example(sen_tmp)
            ori_inputs.append(oriids)
            tmp_inputs.append(temids)

        input1 = np.array(ori_inputs, dtype=np.int32)
        input2 = np.array(tmp_inputs, dtype=np.int32)

        inputs[0].set_data_from_numpy(input1)
        inputs[1].set_data_from_numpy(input2)
        return inputs

    def inference(self, titles):
        """
        同步调用
        """
        inputs = self.getids(titles)
        outputs = [
            httpclient.InferRequestedOutput('output'),
        ]
        result = self.triton_client.infer(
            model_name=self.model_name,
            inputs=inputs,
            request_id=str('1'),
            outputs=outputs)
        output_array = result.as_numpy('output')
        return ','.join([str(i) for i in np.round(output_array[0], 6)])


if __name__ == '__main__':
    client = HttpClient()
    client.inference(['英国首相府违规聚会调查报告认为政府“领导不力”'])
