nadika commited on
Commit
6b67b82
1 Parent(s): 6501b9c

Training in progress, step 50

Browse files
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:74887ce24fa548ae712845520e554ebcfcf9ca716b6d5eb1938d899a2e293770
3
  size 891644712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73aacf0f4758c80f3d2054e609ce870b41111ba983690f0eb37167fd5d47ade2
3
  size 891644712
tokenizer.json CHANGED
@@ -6,14 +6,7 @@
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
- "padding": {
10
- "strategy": "BatchLongest",
11
- "direction": "Right",
12
- "pad_to_multiple_of": null,
13
- "pad_id": 0,
14
- "pad_type_id": 0,
15
- "pad_token": "<pad>"
16
- },
17
  "added_tokens": [
18
  {
19
  "id": 0,
 
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
+ "padding": null,
 
 
 
 
 
 
 
10
  "added_tokens": [
11
  {
12
  "id": 0,
tokenizer_config.json CHANGED
@@ -930,8 +930,15 @@
930
  "clean_up_tokenization_spaces": true,
931
  "eos_token": "</s>",
932
  "extra_ids": 100,
 
933
  "model_max_length": 1000000000000000019884624838656,
 
934
  "pad_token": "<pad>",
 
 
 
935
  "tokenizer_class": "T5Tokenizer",
 
 
936
  "unk_token": "<unk>"
937
  }
 
930
  "clean_up_tokenization_spaces": true,
931
  "eos_token": "</s>",
932
  "extra_ids": 100,
933
+ "max_length": 512,
934
  "model_max_length": 1000000000000000019884624838656,
935
+ "pad_to_multiple_of": null,
936
  "pad_token": "<pad>",
937
+ "pad_token_type_id": 0,
938
+ "padding_side": "right",
939
+ "stride": 0,
940
  "tokenizer_class": "T5Tokenizer",
941
+ "truncation_side": "right",
942
+ "truncation_strategy": "longest_first",
943
  "unk_token": "<unk>"
944
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6cbc29138ae1983d326ae7c404b75683ae354795daaf141c1d3afbb83095bd6b
3
  size 5176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bce81bca8d567c0bea56afe3d27fdaea03f074c7e0dcd9c1e048a3acc330707d
3
  size 5176