from typing import Dict, List, Any | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
class EndpointHandler(): | |
def __init__(self, path=""): | |
# init | |
self.tokenizer = AutoTokenizer.from_pretrained("verseAI/vai-GPT-NeoXT-Chat-Base-20B") | |
self.model = AutoModelForCausalLM.from_pretrained("verseAI/vai-GPT-NeoXT-Chat-Base-20B", device_map="auto", load_in_8bit=True) | |
def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]: | |
""" | |
data args: | |
inputs (:obj: `str`) | |
date (:obj: `str`) | |
Return: | |
A :obj:`list` | `dict`: will be serialized and returned | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
""" | |
# infer | |
inputs = self.tokenizer("<human>: Hello!\n<bot>:", return_tensors='pt').to(self.model.device) | |
outputs = self.model.generate(**inputs, max_new_tokens=10, do_sample=True, temperature=0.8) | |
output_str = self.tokenizer.decode(outputs[0]) | |
print(output_str) | |
return output_str | |