asahi417 commited on
Commit
4b97363
1 Parent(s): 3bd29e3
config.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "xlm-roberta-base",
3
+ "architectures": [
4
+ "XLMRobertaForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 2,
9
+ "gradient_checkpointing": false,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 768,
13
+ "id2label": {
14
+ "0": "B-dna",
15
+ "1": "I-dna",
16
+ "2": "O",
17
+ "3": "B-protein",
18
+ "4": "I-protein",
19
+ "5": "B-cell type",
20
+ "6": "I-cell type",
21
+ "7": "B-cell line",
22
+ "8": "I-cell line",
23
+ "9": "B-rna",
24
+ "10": "I-rna"
25
+ },
26
+ "initializer_range": 0.02,
27
+ "intermediate_size": 3072,
28
+ "label2id": {
29
+ "B-cell line": 7,
30
+ "B-cell type": 5,
31
+ "B-dna": 0,
32
+ "B-protein": 3,
33
+ "B-rna": 9,
34
+ "I-cell line": 8,
35
+ "I-cell type": 6,
36
+ "I-dna": 1,
37
+ "I-protein": 4,
38
+ "I-rna": 10,
39
+ "O": 2
40
+ },
41
+ "layer_norm_eps": 1e-05,
42
+ "max_position_embeddings": 514,
43
+ "model_type": "xlm-roberta",
44
+ "num_attention_heads": 12,
45
+ "num_hidden_layers": 12,
46
+ "output_past": true,
47
+ "pad_token_id": 1,
48
+ "type_vocab_size": 1,
49
+ "vocab_size": 250002
50
+ }
events.out.tfevents.1609889155.comsc-ISRMOT-220.1145654.0 ADDED
Binary file (1.39 MB). View file
parameter.json ADDED
@@ -0,0 +1 @@
 
1
+ {"dataset": ["bionlp2004"], "transformers_model": "xlm-roberta-base", "random_seed": 1234, "lr": 1e-05, "total_step": 13000, "warmup_step": 700, "weight_decay": 1e-07, "batch_size": 16, "max_seq_length": 128, "fp16": false, "max_grad_norm": 1.0, "lower_case": false}
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83b75e8bece599fc958e77af87b7fc0c2f138198cbe7259125123b7d98558f56
3
+ size 1109931639
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
3
+ size 5069051
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>"}
test_bc5cdr_span.json ADDED
@@ -0,0 +1 @@
 
1
+ {"valid": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}, "test": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}}
test_bionlp2004.json ADDED
@@ -0,0 +1 @@
 
1
+ {"valid": {"f1": 73.40277032105658, "recall": 79.09290755524701, "precision": 68.47640989682459, "summary": " precision recall f1-score support\n\n cell line 0.49 0.69 0.57 500\n cell type 0.75 0.72 0.74 1919\n dna 0.65 0.75 0.70 1054\n protein 0.69 0.84 0.76 5052\n rna 0.64 0.76 0.70 118\n\n micro avg 0.68 0.79 0.73 8643\n macro avg 0.65 0.75 0.69 8643\nweighted avg 0.69 0.79 0.73 8643\n"}}
test_bionlp2004_span.json ADDED
@@ -0,0 +1 @@
 
1
+ {"valid": {"f1": 78.49781470889765, "recall": 84.16059238690269, "precision": 73.5490394337715, "summary": " precision recall f1-score support\n\n entity 0.74 0.84 0.78 8643\n\n micro avg 0.74 0.84 0.78 8643\n macro avg 0.74 0.84 0.78 8643\nweighted avg 0.74 0.84 0.78 8643\n"}}
test_conll2003_span.json ADDED
@@ -0,0 +1 @@
 
1
+ {"valid": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}, "test": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}}
test_fin_span.json ADDED
@@ -0,0 +1 @@
 
1
+ {"valid": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}}
test_ontonotes5_span.json ADDED
@@ -0,0 +1 @@
 
1
+ {"valid": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}, "test": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}}
test_panx_dataset-en_span.json ADDED
@@ -0,0 +1 @@
 
1
+ {"valid": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}, "test": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}}
test_wnut2017_span.json ADDED
@@ -0,0 +1 @@
 
1
+ {"valid": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}, "test": {"f1": 0.0, "recall": 0.0, "precision": 0.0, "summary": ""}}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 512, "name_or_path": "xlm-roberta-base"}