Akari commited on
Commit
62bd184
1 Parent(s): 5c76eb6

Training in progress, step 500

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "test-squad-trained",
3
  "architectures": [
4
  "AlbertForQuestionAnswering"
5
  ],
 
1
  {
2
+ "_name_or_path": "albert-base-v2",
3
  "architectures": [
4
  "AlbertForQuestionAnswering"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8b34e009b733ec0ea576dcc2b14144f065b05d3a6f152aa5ee88be97cb92afe4
3
- size 44388794
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d09477329cbe6be02f9cb776556c7c4bf9fd83485f7ca0098503478958e31bd0
3
+ size 44393497
runs/Nov11_21-44-45_bigtensor/1636685104.336796/events.out.tfevents.1636685104.bigtensor.22780.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46bea3a88f2e7db831c02b09d101d2f129aeaab07766d4cb40dbdb6f133c7365
3
+ size 4546
runs/Nov11_21-44-45_bigtensor/events.out.tfevents.1636685104.bigtensor.22780.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa61f9cfe28d2785e8fb60393e8fd45f6f8274c608d61aaf0799c9a6fe699eba
3
+ size 3501
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": true, "remove_space": true, "keep_accents": false, "bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "<unk>", "sep_token": "[SEP]", "pad_token": "<pad>", "cls_token": "[CLS]", "mask_token": {"content": "[MASK]", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "test-squad-trained", "tokenizer_class": "AlbertTokenizer"}
 
1
+ {"do_lower_case": true, "remove_space": true, "keep_accents": false, "bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "<unk>", "sep_token": "[SEP]", "pad_token": "<pad>", "cls_token": "[CLS]", "mask_token": {"content": "[MASK]", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "albert-base-v2", "tokenizer_class": "AlbertTokenizer"}
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3f1f1eacf88a9b520172906c67b4af13d0b4cfcde63d455bcee070b7ba9ca146
3
- size 2593
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1d641806764872dfd2062ff90d456cde2247220eb8540f3c1f9864752fc6473
3
+ size 2863