finiteautomata commited on
Commit
353662c
1 Parent(s): d59c31c

First commit

Browse files
config.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "pysentimiento/robertuito-base-cased",
3
+ "architectures": [
4
+ "RobertaForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "eos_token_id": 2,
10
+ "gradient_checkpointing": false,
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.1,
13
+ "hidden_size": 768,
14
+ "id2label": {
15
+ "0": "B-VERB",
16
+ "1": "B-PUNCT",
17
+ "2": "B-PRON",
18
+ "3": "B-NOUN",
19
+ "4": "B-DET",
20
+ "5": "B-ADV",
21
+ "6": "B-ADP",
22
+ "7": "B-INTJ",
23
+ "8": "B-CONJ",
24
+ "9": "B-ADJ",
25
+ "10": "B-AUX",
26
+ "11": "B-SCONJ",
27
+ "12": "B-PART",
28
+ "13": "B-PROPN",
29
+ "14": "B-NUM",
30
+ "15": "B-UNK",
31
+ "16": "B-X"
32
+ },
33
+ "initializer_range": 0.02,
34
+ "intermediate_size": 3072,
35
+ "label2id": {
36
+ "B-ADJ": 9,
37
+ "B-ADP": 6,
38
+ "B-ADV": 5,
39
+ "B-AUX": 10,
40
+ "B-CONJ": 8,
41
+ "B-DET": 4,
42
+ "B-INTJ": 7,
43
+ "B-NOUN": 3,
44
+ "B-NUM": 14,
45
+ "B-PART": 12,
46
+ "B-PRON": 2,
47
+ "B-PROPN": 13,
48
+ "B-PUNCT": 1,
49
+ "B-SCONJ": 11,
50
+ "B-UNK": 15,
51
+ "B-VERB": 0,
52
+ "B-X": 16
53
+ },
54
+ "layer_norm_eps": 1e-12,
55
+ "max_position_embeddings": 130,
56
+ "model_type": "roberta",
57
+ "num_attention_heads": 12,
58
+ "num_hidden_layers": 12,
59
+ "pad_token_id": 1,
60
+ "position_embedding_type": "absolute",
61
+ "torch_dtype": "float32",
62
+ "transformers_version": "4.13.0",
63
+ "type_vocab_size": 1,
64
+ "use_cache": true,
65
+ "vocab_size": 30003
66
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52180c4a700faccc2bfd89e8ac69f72dbd9699d9098c927dd6905597f1b3bac3
3
+ size 432929585
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>"}
test_results.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "test_loss": 0.09433817863464355,
3
+ "test_precision": 0.9727695306642675,
4
+ "test_recall": 0.9727695306642675,
5
+ "test_macro_f1": 0.9103428865750461,
6
+ "test_micro_f1": 0.9727695306642675,
7
+ "test_accuracy": 0.9727695306642675,
8
+ "test_ADJ_f1": 0.8951762523191095,
9
+ "test_ADJ_precision": 0.8985102420856611,
10
+ "test_ADJ_recall": 0.8918669131238447,
11
+ "test_ADP_f1": 0.9671580036203775,
12
+ "test_ADP_precision": 0.9749739311783108,
13
+ "test_ADP_recall": 0.9594663930220626,
14
+ "test_ADV_f1": 0.9267624914442164,
15
+ "test_ADV_precision": 0.9206708975521306,
16
+ "test_ADV_recall": 0.9329352319706018,
17
+ "test_AUX_f1": 0.9430004559963521,
18
+ "test_AUX_precision": 0.9306930693069307,
19
+ "test_AUX_recall": 0.955637707948244,
20
+ "test_CONJ_f1": 0.9970123772940673,
21
+ "test_CONJ_precision": 0.9982905982905983,
22
+ "test_CONJ_recall": 0.9957374254049446,
23
+ "test_DET_f1": 0.9765440068861632,
24
+ "test_DET_precision": 0.9709028669234061,
25
+ "test_DET_recall": 0.9822510822510823,
26
+ "test_INTJ_f1": 0.958139534883721,
27
+ "test_INTJ_precision": 0.9519408502772643,
28
+ "test_INTJ_recall": 0.9644194756554307,
29
+ "test_NOUN_f1": 0.9731118342181881,
30
+ "test_NOUN_precision": 0.9783057851239669,
31
+ "test_NOUN_recall": 0.9679727427597956,
32
+ "test_NUM_f1": 0.9908015768725361,
33
+ "test_NUM_precision": 0.9947229551451188,
34
+ "test_NUM_recall": 0.9869109947643979,
35
+ "test_PART_f1": 0.9842141386410432,
36
+ "test_PART_precision": 0.9795081967213115,
37
+ "test_PART_recall": 0.9889655172413793,
38
+ "test_PRON_f1": 0.984004625168626,
39
+ "test_PRON_precision": 0.9823008849557522,
40
+ "test_PRON_recall": 0.9857142857142858,
41
+ "test_PROPN_f1": 0.9891225525743292,
42
+ "test_PROPN_precision": 0.9869753979739508,
43
+ "test_PROPN_recall": 0.9912790697674418,
44
+ "test_PUNCT_f1": 1.0,
45
+ "test_PUNCT_precision": 1.0,
46
+ "test_PUNCT_recall": 1.0,
47
+ "test_SCONJ_f1": 0.9125364431486881,
48
+ "test_SCONJ_precision": 0.9214916584887144,
49
+ "test_SCONJ_recall": 0.9037536092396535,
50
+ "test_UNK_f1": 0.998003992015968,
51
+ "test_UNK_precision": 0.9960159362549801,
52
+ "test_UNK_recall": 1.0,
53
+ "test_VERB_f1": 0.9802407866923997,
54
+ "test_VERB_precision": 0.9835853928439691,
55
+ "test_VERB_recall": 0.9769188496061549,
56
+ "test_X_f1": 0.0,
57
+ "test_X_precision": 0.0,
58
+ "test_X_recall": 0.0,
59
+ "test_runtime": 7.575,
60
+ "test_samples_per_second": 567.395,
61
+ "test_steps_per_second": 35.512
62
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "special_tokens_map_file": "models/twerto-base-cased/special_tokens_map.json", "name_or_path": "pysentimiento/robertuito-base-cased", "tokenizer_class": "PreTrainedTokenizerFast"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b31f0f88eb6af616c32b715486d1c414cf8d63bab2b0deb3da954b854a6c6cc7
3
+ size 2863