Hiveurban commited on
Commit
8c2a0e5
·
verified ·
1 Parent(s): 4892a13

Upload handler.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. handler.py +7 -7
handler.py CHANGED
@@ -1,14 +1,12 @@
1
- from transformers import pipeline, AutoModelForTokenClassification, AutoTokenizer
2
  from typing import Dict, List, Any
3
- from tokenizers.decoders import WordPiece
4
 
5
 
6
  class EndpointHandler:
7
  def __init__(self, path="."):
8
- model = AutoModelForTokenClassification.from_pretrained(path)
9
- tokenizer = AutoTokenizer.from_pretrained(path)
10
- self.pipeline = pipeline('token-classification', model=model, tokenizer=tokenizer, aggregation_strategy='simple')
11
- self.pipeline.tokenizer.backend_tokenizer.decoder = WordPiece()
12
 
13
  def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
14
  """
@@ -18,4 +16,6 @@ class EndpointHandler:
18
  Return:
19
  A :obj:`list` | `dict`: will be serialized and returned
20
  """
21
- return self.pipeline(data['inputs'])
 
 
 
 
1
  from typing import Dict, List, Any
2
+ from transformers import AutoModel, AutoTokenizer
3
 
4
 
5
  class EndpointHandler:
6
  def __init__(self, path="."):
7
+ self.tokenizer = AutoTokenizer.from_pretrained(path)
8
+ self.model = AutoModel.from_pretrained(path, trust_remote_code=True)
9
+ self.model.eval()
 
10
 
11
  def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
12
  """
 
16
  Return:
17
  A :obj:`list` | `dict`: will be serialized and returned
18
  """
19
+ # return self.pipeline(data['inputs'])
20
+ return self.model.predict([data['inputs']], self.tokenizer, output_style='json')
21
+