shhossain commited on
Commit
5b64c43
1 Parent(s): 6d9533b

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. config.json +13 -2
  2. sentpipeline.py +22 -0
config.json CHANGED
@@ -1,10 +1,11 @@
1
  {
 
2
  "architectures": [
3
  "SententenceTransformerSentimentModel"
4
  ],
5
  "auto_map": {
6
- "AutoConfig": "model.SentimentConfig",
7
- "AutoModelForSequenceClassification": "model.SententenceTransformerSentimentModel"
8
  },
9
  "class_map": {
10
  "0": "sad",
@@ -14,6 +15,16 @@
14
  "4": "fear",
15
  "5": "surprise"
16
  },
 
 
 
 
 
 
 
 
 
 
17
  "embedding_model": "sentence-transformers/all-MiniLM-L6-v2",
18
  "h1": 44,
19
  "h2": 46,
 
1
  {
2
+ "_name_or_path": "shhossain/all-MiniLM-L6-v2-sentiment-classifier",
3
  "architectures": [
4
  "SententenceTransformerSentimentModel"
5
  ],
6
  "auto_map": {
7
+ "AutoConfig": "shhossain/all-MiniLM-L6-v2-sentiment-classifier--model.SentimentConfig",
8
+ "AutoModelForSequenceClassification": "shhossain/all-MiniLM-L6-v2-sentiment-classifier--model.SententenceTransformerSentimentModel"
9
  },
10
  "class_map": {
11
  "0": "sad",
 
15
  "4": "fear",
16
  "5": "surprise"
17
  },
18
+ "custom_pipelines": {
19
+ "text-classification": {
20
+ "impl": "sentpipeline.SentimentModelPipe",
21
+ "pt": [
22
+ "AutoModelForSequenceClassification"
23
+ ],
24
+ "tf": [],
25
+ "type": "text"
26
+ }
27
+ },
28
  "embedding_model": "sentence-transformers/all-MiniLM-L6-v2",
29
  "h1": 44,
30
  "h2": 46,
sentpipeline.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import Pipeline
2
+ from sentence_transformers import SentenceTransformer
3
+ import torch
4
+
5
+ class SentimentModelPipe(Pipeline):
6
+ def __init__(self, **kwargs):
7
+ Pipeline.__init__(self, **kwargs)
8
+ self.smodel = SentenceTransformer(kwargs.get("embedding_model", "sentence-transformers/all-MiniLM-L6-v2"))
9
+
10
+ def _sanitize_parameters(self, **kw):
11
+ return {}, {}, {}
12
+
13
+ def preprocess(self, inputs):
14
+ return self.smodel.encode(inputs, convert_to_tensor=True)
15
+
16
+ def postprocess(self, outputs):
17
+ return outputs.argmax(1).item()
18
+
19
+ def _forward(self, tensor):
20
+ with torch.no_grad():
21
+ out = self.model(tensor)
22
+ return out