pere commited on
Commit
458b93a
1 Parent(s): c851246

Saving weights and logs of step 2000

Browse files
events.out.tfevents.1645262505.t1v-n-1d903126-w-0.11150.0.v2 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:49b541c533ce9e49cd74b109233f3661233429a01b92f0012d6f13da616126dd
3
- size 147136
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5922927c49047feb864f3db5238d9e3a3fd902fd45840a1ee11bd6b181fd5b7a
3
+ size 294345
flax_model.msgpack CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ae454a7923a392df735c630bee742144b249d0d540b0a260dea8ea546247c73d
3
  size 498796983
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a160697b2290a77498f334c7d7d8de01d944f6bb5517c4f87e9902d87ecbe3c4
3
  size 498796983
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "trim_offsets": true, "special_tokens_map_file": null, "name_or_path": "NbAiLab/nb-roberta-base", "tokenizer_class": "RobertaTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "special_tokens_map_file": null, "name_or_path": "NbAiLab/nb-roberta-base", "tokenizer_class": "RobertaTokenizer"}