johntang commited on
Commit
6ce415e
1 Parent(s): 7da6f7c

Training in progress, epoch 1

Browse files
Files changed (4) hide show
  1. config.json +1 -11
  2. pytorch_model.bin +1 -1
  3. tokenizer_config.json +1 -3
  4. training_args.bin +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "distilbert-base-uncased-finetuned-sst-2-english",
3
  "activation": "gelu",
4
  "architectures": [
5
  "DistilBertForSequenceClassification"
@@ -7,22 +7,12 @@
7
  "attention_dropout": 0.1,
8
  "dim": 768,
9
  "dropout": 0.1,
10
- "finetuning_task": "sst-2",
11
  "hidden_dim": 3072,
12
- "id2label": {
13
- "0": "NEGATIVE",
14
- "1": "POSITIVE"
15
- },
16
  "initializer_range": 0.02,
17
- "label2id": {
18
- "NEGATIVE": 0,
19
- "POSITIVE": 1
20
- },
21
  "max_position_embeddings": 512,
22
  "model_type": "distilbert",
23
  "n_heads": 12,
24
  "n_layers": 6,
25
- "output_past": true,
26
  "pad_token_id": 0,
27
  "problem_type": "single_label_classification",
28
  "qa_dropout": 0.1,
 
1
  {
2
+ "_name_or_path": "distilbert-base-uncased",
3
  "activation": "gelu",
4
  "architectures": [
5
  "DistilBertForSequenceClassification"
 
7
  "attention_dropout": 0.1,
8
  "dim": 768,
9
  "dropout": 0.1,
 
10
  "hidden_dim": 3072,
 
 
 
 
11
  "initializer_range": 0.02,
 
 
 
 
12
  "max_position_embeddings": 512,
13
  "model_type": "distilbert",
14
  "n_heads": 12,
15
  "n_layers": 6,
 
16
  "pad_token_id": 0,
17
  "problem_type": "single_label_classification",
18
  "qa_dropout": 0.1,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:60475501eca7a3d6aac5e499059eb15518ef3826f952315a03cdbdc6ade22818
3
  size 267854321
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0edd6ce4b41c32bc02ebc1d2b2714eaa238bd8a9863b41bae0b61519ed73004
3
  size 267854321
tokenizer_config.json CHANGED
@@ -1,11 +1,9 @@
1
  {
2
  "cls_token": "[CLS]",
3
- "do_basic_tokenize": true,
4
  "do_lower_case": true,
5
  "mask_token": "[MASK]",
6
  "model_max_length": 512,
7
- "name_or_path": "distilbert-base-uncased-finetuned-sst-2-english",
8
- "never_split": null,
9
  "pad_token": "[PAD]",
10
  "sep_token": "[SEP]",
11
  "special_tokens_map_file": null,
 
1
  {
2
  "cls_token": "[CLS]",
 
3
  "do_lower_case": true,
4
  "mask_token": "[MASK]",
5
  "model_max_length": 512,
6
+ "name_or_path": "distilbert-base-uncased",
 
7
  "pad_token": "[PAD]",
8
  "sep_token": "[SEP]",
9
  "special_tokens_map_file": null,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9cdda410b39a44df892cb1ca5e99c45457d598b2bc196a95b57367599d0ed9e1
3
  size 3311
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ee423264dfe12960bbfbb1af259dbc2539efa633cc9f61e7cc447d20722fbfe
3
  size 3311