|
import torch |
|
from transformers import AutoTokenizer, AutoModelForCausalLM |
|
from typing import Dict, List, Any |
|
|
|
|
|
class EndpointHandler: |
|
|
|
def __init__(self, path="TangrisJones/vicuna-13b-GPTQ-4bit-128g"): |
|
self.tokenizer = AutoTokenizer.from_pretrained(path) |
|
self.model = AutoModelForCausalLM.from_pretrained(path) |
|
|
|
def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]: |
|
input_text = data["inputs"] |
|
kwargs = data.get("kwargs", {}) |
|
|
|
|
|
input_tokens = self.tokenizer.encode(input_text, return_tensors="pt") |
|
|
|
|
|
with torch.no_grad(): |
|
output_tokens = self.model.generate(input_tokens, **kwargs) |
|
|
|
|
|
output_text = self.tokenizer.decode(output_tokens[0]) |
|
|
|
return [{"output": output_text}] |
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
handler = EndpointHandler() |
|
input_data = {"inputs": "Once upon a time in a small village, "} |
|
output_data = handler(input_data) |
|
print(output_data) |
|
|