voidful commited on
Commit
bab494b
1 Parent(s): 32ab7d0

update model

Browse files
Files changed (4) hide show
  1. config.json +2 -1
  2. pytorch_model.bin +2 -2
  3. tokenizer.json +0 -0
  4. tokenizer_config.json +1 -1
config.json CHANGED
@@ -18,6 +18,7 @@
18
  "n_layer": 12,
19
  "n_positions": 1024,
20
  "resid_pdrop": 0.1,
 
21
  "summary_activation": null,
22
  "summary_first_dropout": 0.1,
23
  "summary_proj_to_labels": true,
@@ -31,7 +32,7 @@
31
  },
32
  "tie_word_embeddings": false,
33
  "tokenizer_class": "BertTokenizerFast",
34
- "transformers_version": "4.5.1",
35
  "use_cache": true,
36
  "vocab_size": 21128
37
  }
18
  "n_layer": 12,
19
  "n_positions": 1024,
20
  "resid_pdrop": 0.1,
21
+ "scale_attn_weights": true,
22
  "summary_activation": null,
23
  "summary_first_dropout": 0.1,
24
  "summary_proj_to_labels": true,
32
  },
33
  "tie_word_embeddings": false,
34
  "tokenizer_class": "BertTokenizerFast",
35
+ "transformers_version": "4.6.1",
36
  "use_cache": true,
37
  "vocab_size": 21128
38
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0ead48d6a1cd189178c396c2e6b859e4b78871970fb0c642608db2ab8cef15e6
3
- size 485912852
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:671975529a9d3f439fd451e365e54628c4ca827afa28338ad73beca0ed18e6c6
3
+ size 485828048
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "name_or_path": "ckiplab/gpt2-base-chinese", "special_tokens_map_file": "/root/.cache/huggingface/transformers/da2a6a6111a4fd14554e008b8b86a0655c8e5484fbc252508eeb51d7a7d57ef0.dd8bd9bfd3664b530ea4e645105f557769387b3da9f79bdb55ed556bdd80611d", "do_basic_tokenize": true, "never_split": null}
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "eos_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "name_or_path": "ckiplab/gpt2-base-chinese", "do_basic_tokenize": true, "never_split": null}