asahi417 commited on
Commit
c95db0a
1 Parent(s): 0e85ca3

model update

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. tokenizer_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cner_output/model/baseline_2021/roberta_large_continuous/best_model",
3
  "adapters": {
4
  "adapters": {},
5
  "config_map": {},
 
1
  {
2
+ "_name_or_path": "tner/roberta-large-tweetner-2020",
3
  "adapters": {
4
  "adapters": {},
5
  "config_map": {},
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cner_output/model/baseline_2021/roberta_large_continuous/best_model", "tokenizer_class": "RobertaTokenizer"}
 
1
+ {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "tner/roberta-large-tweetner-2020", "tokenizer_class": "RobertaTokenizer"}