mwkby commited on
Commit
1aff81c
1 Parent(s): 6ff2597

Fix output labels

Browse files
Files changed (2) hide show
  1. config.json +9 -1
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "distilbert-base-uncased",
3
  "activation": "gelu",
4
  "architectures": [
5
  "DistilBertForSequenceClassification"
@@ -8,7 +8,15 @@
8
  "dim": 768,
9
  "dropout": 0.1,
10
  "hidden_dim": 3072,
 
 
 
 
11
  "initializer_range": 0.02,
 
 
 
 
12
  "max_position_embeddings": 512,
13
  "model_type": "distilbert",
14
  "n_heads": 12,
 
1
  {
2
+ "_name_or_path": "mwkby/distilbert-base-uncased-sentiment-reddit-crypto",
3
  "activation": "gelu",
4
  "architectures": [
5
  "DistilBertForSequenceClassification"
 
8
  "dim": 768,
9
  "dropout": 0.1,
10
  "hidden_dim": 3072,
11
+ "id2label": {
12
+ "0": "negative",
13
+ "1": "positive"
14
+ },
15
  "initializer_range": 0.02,
16
+ "label2id": {
17
+ "negative": 0,
18
+ "positive": 1
19
+ },
20
  "max_position_embeddings": 512,
21
  "model_type": "distilbert",
22
  "n_heads": 12,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:67d83f650d9c439f9fa7c8bd5b8cd9d19d9b6f566b63bf14fd05ddeebf10418c
3
- size 267855533
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e05c523e5d8bfae91b19e37e99a12617592a55731a752146e25504b59ab8c65b
3
+ size 267854125