Linear-Matrix-Probability commited on
Commit
8f6a7e9
1 Parent(s): 2e98537

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +4 -4
handler.py CHANGED
@@ -1,4 +1,4 @@
1
- from typing import Dict
2
  from transformers import (
3
  AutoTokenizer,
4
  AutoModelForSeq2SeqLM,
@@ -20,11 +20,11 @@ class EndpointHandler:
20
  self.tokenizer = AutoTokenizer.from_pretrained(path)
21
  self.model = AutoModelForSeq2SeqLM.from_pretrained(path)
22
 
23
- def __call__(self, data: Dict[str, str]) -> Dict[str, str]:
24
 
25
  inputs = data.pop('inputs', None)
26
  if inputs is None or inputs == '':
27
- return {'generated_text': 'No input provided'}
28
 
29
  # preprocess
30
  input_ids = self.tokenizer(inputs, return_tensors="pt").input_ids
@@ -33,4 +33,4 @@ class EndpointHandler:
33
  # postprocess
34
  response = self.tokenizer.decode(output_ids[0], skip_special_tokens=True)
35
 
36
- return {'generated_text': response}
 
1
+ from typing import Dict, List
2
  from transformers import (
3
  AutoTokenizer,
4
  AutoModelForSeq2SeqLM,
 
20
  self.tokenizer = AutoTokenizer.from_pretrained(path)
21
  self.model = AutoModelForSeq2SeqLM.from_pretrained(path)
22
 
23
+ def __call__(self, data: Dict[str, str]) -> List[Dict[str, str]]:
24
 
25
  inputs = data.pop('inputs', None)
26
  if inputs is None or inputs == '':
27
+ return [{'generated_text': 'No input provided'}]
28
 
29
  # preprocess
30
  input_ids = self.tokenizer(inputs, return_tensors="pt").input_ids
 
33
  # postprocess
34
  response = self.tokenizer.decode(output_ids[0], skip_special_tokens=True)
35
 
36
+ return [{'generated_text': response}]