profoz commited on
Commit
2772074
1 Parent(s): 58ade4c
Files changed (2) hide show
  1. config.json +8 -8
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "./clf/results",
3
  "activation": "gelu",
4
  "architectures": [
5
  "DistilBertForSequenceClassification"
@@ -9,15 +9,15 @@
9
  "dropout": 0.1,
10
  "hidden_dim": 3072,
11
  "id2label": {
12
- "0": "Negative",
13
- "1": "Neutral",
14
- "2": "Positive"
15
  },
16
  "initializer_range": 0.02,
17
  "label2id": {
18
- "Negative": 0,
19
- "Neutral": 1,
20
- "Positive": 2
21
  },
22
  "max_position_embeddings": 512,
23
  "model_type": "distilbert",
@@ -30,6 +30,6 @@
30
  "sinusoidal_pos_embds": false,
31
  "tie_weights_": true,
32
  "torch_dtype": "float32",
33
- "transformers_version": "4.12.2",
34
  "vocab_size": 30522
35
  }
 
1
  {
2
+ "_name_or_path": "distilbert-base-uncased",
3
  "activation": "gelu",
4
  "architectures": [
5
  "DistilBertForSequenceClassification"
 
9
  "dropout": 0.1,
10
  "hidden_dim": 3072,
11
  "id2label": {
12
+ "0": "LABEL_0",
13
+ "1": "LABEL_1",
14
+ "2": "LABEL_2"
15
  },
16
  "initializer_range": 0.02,
17
  "label2id": {
18
+ "LABEL_0": 0,
19
+ "LABEL_1": 1,
20
+ "LABEL_2": 2
21
  },
22
  "max_position_embeddings": 512,
23
  "model_type": "distilbert",
 
30
  "sinusoidal_pos_embds": false,
31
  "tie_weights_": true,
32
  "torch_dtype": "float32",
33
+ "transformers_version": "4.16.2",
34
  "vocab_size": 30522
35
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:184263df1d53416ecbb59e1911f958a616bb807a296bd06790f8df6357924ca3
3
  size 267861745
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e909ecaa770ed237bfa184966a038652b4664da2653f4dec46ac73e1744402a
3
  size 267861745