asahi417 commited on
Commit
7f103f2
1 Parent(s): a0eb040

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer.json +16 -2
  2. tokenizer_config.json +1 -2
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 32,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 32
12
+ },
13
+ "direction": "Right",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 0,
16
+ "pad_type_id": 0,
17
+ "pad_token": "<pad>"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 0,
tokenizer_config.json CHANGED
@@ -3,10 +3,9 @@
3
  "eos_token": "</s>",
4
  "extra_ids": 0,
5
  "model_max_length": 1000000000000000019884624838656,
6
- "name_or_path": "lmqg_output/trimmed_qg/mt5-small-trimmed-es-15000-esquad-qg/best_model",
7
  "pad_token": "<pad>",
8
  "sp_model_kwargs": {},
9
- "special_tokens_map_file": "/home/asahiushio/.cache/huggingface/hub/models--google--mt5-small/snapshots/38f23af8ec210eb6c376d40e9c56bd25a80f195d/special_tokens_map.json",
10
  "tokenizer_class": "T5Tokenizer",
11
  "unk_token": "<unk>"
12
  }
 
3
  "eos_token": "</s>",
4
  "extra_ids": 0,
5
  "model_max_length": 1000000000000000019884624838656,
 
6
  "pad_token": "<pad>",
7
  "sp_model_kwargs": {},
8
+ "special_tokens_map_file": "/home/c.c2042013/.cache/huggingface/hub/models--google--mt5-small/snapshots/38f23af8ec210eb6c376d40e9c56bd25a80f195d/special_tokens_map.json",
9
  "tokenizer_class": "T5Tokenizer",
10
  "unk_token": "<unk>"
11
  }