finiteautomata commited on
Commit
2c5fed9
1 Parent(s): 9c3a718

Initial commit

Browse files
config.json ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "pysentimiento/robertuito-base-uncased",
3
+ "architectures": [
4
+ "RobertaForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "eos_token_id": 2,
10
+ "gradient_checkpointing": false,
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.1,
13
+ "hidden_size": 768,
14
+ "id2label": {
15
+ "0": "O",
16
+ "1": "B-EVENT",
17
+ "2": "I-EVENT",
18
+ "3": "B-GROUP",
19
+ "4": "I-GROUP",
20
+ "5": "B-LOC",
21
+ "6": "I-LOC",
22
+ "7": "B-ORG",
23
+ "8": "I-ORG",
24
+ "9": "B-OTHER",
25
+ "10": "I-OTHER",
26
+ "11": "B-PER",
27
+ "12": "I-PER",
28
+ "13": "B-PROD",
29
+ "14": "I-PROD",
30
+ "15": "B-TIME",
31
+ "16": "I-TIME",
32
+ "17": "B-TITLE",
33
+ "18": "I-TITLE"
34
+ },
35
+ "initializer_range": 0.02,
36
+ "intermediate_size": 3072,
37
+ "label2id": {
38
+ "B-EVENT": 1,
39
+ "B-GROUP": 3,
40
+ "B-LOC": 5,
41
+ "B-ORG": 7,
42
+ "B-OTHER": 9,
43
+ "B-PER": 11,
44
+ "B-PROD": 13,
45
+ "B-TIME": 15,
46
+ "B-TITLE": 17,
47
+ "I-EVENT": 2,
48
+ "I-GROUP": 4,
49
+ "I-LOC": 6,
50
+ "I-ORG": 8,
51
+ "I-OTHER": 10,
52
+ "I-PER": 12,
53
+ "I-PROD": 14,
54
+ "I-TIME": 16,
55
+ "I-TITLE": 18,
56
+ "O": 0
57
+ },
58
+ "layer_norm_eps": 1e-12,
59
+ "max_position_embeddings": 130,
60
+ "model_type": "roberta",
61
+ "num_attention_heads": 12,
62
+ "num_hidden_layers": 12,
63
+ "pad_token_id": 1,
64
+ "position_embedding_type": "absolute",
65
+ "torch_dtype": "float32",
66
+ "transformers_version": "4.13.0",
67
+ "type_vocab_size": 1,
68
+ "use_cache": true,
69
+ "vocab_size": 30002
70
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68de2679921a7b304ff5d7fa6288efd3ac9ed62bf3839d558d55dd0a48ab8b8b
3
+ size 432932657
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>"}
test_results.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test_loss": 0.07815752178430557,
3
+ "test_precision": 0.6620947630922693,
4
+ "test_recall": 0.6764331210191082,
5
+ "test_macro_f1": 0.5127434670879479,
6
+ "test_micro_f1": 0.669187145557656,
7
+ "test_accuracy": 0.9862053222019306,
8
+ "test_EVENT_f1": 0.35714285714285715,
9
+ "test_EVENT_precision": 0.39473684210526316,
10
+ "test_EVENT_recall": 0.32608695652173914,
11
+ "test_GROUP_f1": 0.5677966101694916,
12
+ "test_GROUP_precision": 0.5677966101694916,
13
+ "test_GROUP_recall": 0.5677966101694916,
14
+ "test_LOC_f1": 0.730188679245283,
15
+ "test_LOC_precision": 0.7049180327868853,
16
+ "test_LOC_recall": 0.7573385518590998,
17
+ "test_ORG_f1": 0.41638225255972694,
18
+ "test_ORG_precision": 0.43884892086330934,
19
+ "test_ORG_recall": 0.3961038961038961,
20
+ "test_OTHER_f1": 0.3035714285714286,
21
+ "test_OTHER_precision": 0.3617021276595745,
22
+ "test_OTHER_recall": 0.26153846153846155,
23
+ "test_PER_f1": 0.8129164531009737,
24
+ "test_PER_precision": 0.7843719090009891,
25
+ "test_PER_recall": 0.8436170212765958,
26
+ "test_PROD_f1": 0.5320388349514563,
27
+ "test_PROD_precision": 0.528957528957529,
28
+ "test_PROD_recall": 0.53515625,
29
+ "test_TIME_f1": 0.4166666666666667,
30
+ "test_TIME_precision": 0.47619047619047616,
31
+ "test_TIME_recall": 0.37037037037037035,
32
+ "test_TITLE_f1": 0.4779874213836478,
33
+ "test_TITLE_precision": 0.4720496894409938,
34
+ "test_TITLE_recall": 0.4840764331210191,
35
+ "test_runtime": 20.0879,
36
+ "test_samples_per_second": 502.043,
37
+ "test_steps_per_second": 31.412
38
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "special_tokens_map_file": "models/twerto-base-uncased/special_tokens_map.json", "name_or_path": "pysentimiento/robertuito-base-uncased", "tokenizer_class": "PreTrainedTokenizerFast"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7a69886b1e2877fe532f080aad04c3b7bc882bc929e4eb2ba3660de684f6ca3
3
+ size 2863