m0saan commited on
Commit
1856a4f
1 Parent(s): 69d92d0

Training in progress, step 500

Browse files
config.json CHANGED
@@ -36,6 +36,6 @@
36
  "sinusoidal_pos_embds": false,
37
  "tie_weights_": true,
38
  "torch_dtype": "float32",
39
- "transformers_version": "4.31.0",
40
  "vocab_size": 30522
41
  }
 
36
  "sinusoidal_pos_embds": false,
37
  "tie_weights_": true,
38
  "torch_dtype": "float32",
39
+ "transformers_version": "4.16.2",
40
  "vocab_size": 30522
41
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6648bd546c65b99d91a6b2ed1a969776f04679928f0b5b0a8f563e46c7c3a849
3
- size 267867821
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a3baaf38f3b4362d40914f5e7308e085f95aacc4483aff85aa48b46eb74fdde
3
+ size 267874026
runs/Jan29_12-17-44_6928e0d904c2/1706530734.518449/events.out.tfevents.1706530734.6928e0d904c2.159.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91d617bc216ec8b30464f4b296965ee0e6ceeeed59ec050b3817fbd5e6ee7162
3
+ size 4880
runs/Jan29_12-17-44_6928e0d904c2/events.out.tfevents.1706530734.6928e0d904c2.159.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d8c9690e368aa267de86c9772963ddbe707c23ca7148fffbe134238e3c0ceb4
3
+ size 4253
special_tokens_map.json CHANGED
@@ -1,7 +1 @@
1
- {
2
- "cls_token": "[CLS]",
3
- "mask_token": "[MASK]",
4
- "pad_token": "[PAD]",
5
- "sep_token": "[SEP]",
6
- "unk_token": "[UNK]"
7
- }
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
 
 
 
 
 
 
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,15 +1 @@
1
- {
2
- "clean_up_tokenization_spaces": true,
3
- "cls_token": "[CLS]",
4
- "do_basic_tokenize": true,
5
- "do_lower_case": true,
6
- "mask_token": "[MASK]",
7
- "model_max_length": 512,
8
- "never_split": null,
9
- "pad_token": "[PAD]",
10
- "sep_token": "[SEP]",
11
- "strip_accents": null,
12
- "tokenize_chinese_chars": true,
13
- "tokenizer_class": "DistilBertTokenizer",
14
- "unk_token": "[UNK]"
15
- }
 
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "distilbert-base-uncased", "tokenizer_class": "DistilBertTokenizer"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7477956745a8f45fd94fe95e55e3f1fe3ae53adfef5f35b08d9a834427b2f21c
3
- size 4027
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:951fa6cfba0599851c7e473c7d71a59df60c66d4e6bae9d182da7d471a619937
3
+ size 3512