asahi417 commited on
Commit
6912ba9
1 Parent(s): 99ff511

model update

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. pytorch_model.bin +2 -2
  3. tokenizer_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "lmqg_output/qg_squadshifts_vanilla/bart_base_squadshifts_nyt/best_model",
3
  "activation_dropout": 0.1,
4
  "activation_function": "gelu",
5
  "add_bias_logits": false,
 
1
  {
2
+ "_name_or_path": "lmqg_output/qg_squadshifts_vanilla/bart_base_squadshifts_nyt/model_aacxew/epoch_5",
3
  "activation_dropout": 0.1,
4
  "activation_function": "gelu",
5
  "add_bias_logits": false,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0095df945dfadcce88f6612454d035fbdb1aee4c9b74a303fa261adb5f86a679
3
- size 557979321
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:712ca8cca351b0d4de093dc3cec5e5796469c59a9417f6e76d7b77cc5c82c52f
3
+ size 557982969
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 1024, "name_or_path": "lmqg_output/qg_squadshifts_vanilla/bart_base_squadshifts_nyt/best_model", "special_tokens_map_file": "lmqg_output/qg_squadshifts_vanilla/bart_base_squadshifts_nyt/model_aacxew/epoch_1/special_tokens_map.json", "tokenizer_class": "BartTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 1024, "name_or_path": "lmqg_output/qg_squadshifts_vanilla/bart_base_squadshifts_nyt/model_aacxew/epoch_5", "special_tokens_map_file": "lmqg_output/qg_squadshifts_vanilla/bart_base_squadshifts_nyt/model_aacxew/epoch_1/special_tokens_map.json", "tokenizer_class": "BartTokenizer"}