config.json CHANGED
@@ -1,11 +1,10 @@
1
  {
2
- "_name_or_path": "RoBERTa_fine_tuned_for_proper_nouns_detection",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
8
- "classifier_dropout": null,
9
  "eos_token_id": 2,
10
  "gradient_checkpointing": false,
11
  "hidden_act": "gelu",
@@ -34,8 +33,7 @@
34
  "num_hidden_layers": 24,
35
  "pad_token_id": 1,
36
  "position_embedding_type": "absolute",
37
- "torch_dtype": "float32",
38
- "transformers_version": "4.31.0.dev0",
39
  "type_vocab_size": 1,
40
  "use_cache": true,
41
  "vocab_size": 50265
 
1
  {
2
+ "_name_or_path": "roberta-large",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
 
8
  "eos_token_id": 2,
9
  "gradient_checkpointing": false,
10
  "hidden_act": "gelu",
 
33
  "num_hidden_layers": 24,
34
  "pad_token_id": 1,
35
  "position_embedding_type": "absolute",
36
+ "transformers_version": "4.3.2",
 
37
  "type_vocab_size": 1,
38
  "use_cache": true,
39
  "vocab_size": 50265
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee8498022f4d5d82c7bcbcff7c017303d09ff0b47b863960724ef34061537df8
3
+ size 1417313294
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": true, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 512, "name_or_path": "roberta-large"}