RogerB commited on
Commit
d3d7ce6
1 Parent(s): d21d9fe

End of training

Browse files
Files changed (3) hide show
  1. README.md +7 -7
  2. config.json +6 -6
  3. special_tokens_map.json +49 -7
README.md CHANGED
@@ -17,8 +17,8 @@ should probably proofread and complete it, then remove this comment. -->
17
 
18
  This model is a fine-tuned version of [RogerB/afro-xlmr-large-kinte-domain-kinte-task](https://huggingface.co/RogerB/afro-xlmr-large-kinte-domain-kinte-task) on the None dataset.
19
  It achieves the following results on the evaluation set:
20
- - Loss: 0.9515
21
- - F1: 0.6998
22
 
23
  ## Model description
24
 
@@ -49,14 +49,14 @@ The following hyperparameters were used during training:
49
 
50
  | Training Loss | Epoch | Step | Validation Loss | F1 |
51
  |:-------------:|:-----:|:----:|:---------------:|:------:|
52
- | 0.897 | 1.0 | 1013 | 0.6385 | 0.7419 |
53
- | 0.7098 | 2.0 | 2026 | 0.5148 | 0.8051 |
54
- | 0.6026 | 3.0 | 3039 | 0.4820 | 0.8330 |
55
 
56
 
57
  ### Framework versions
58
 
59
- - Transformers 4.34.0
60
- - Pytorch 2.0.1+cu118
61
  - Datasets 2.14.5
62
  - Tokenizers 0.14.1
 
17
 
18
  This model is a fine-tuned version of [RogerB/afro-xlmr-large-kinte-domain-kinte-task](https://huggingface.co/RogerB/afro-xlmr-large-kinte-domain-kinte-task) on the None dataset.
19
  It achieves the following results on the evaluation set:
20
+ - Loss: 1.0173
21
+ - F1: 0.7027
22
 
23
  ## Model description
24
 
 
49
 
50
  | Training Loss | Epoch | Step | Validation Loss | F1 |
51
  |:-------------:|:-----:|:----:|:---------------:|:------:|
52
+ | 0.8871 | 1.0 | 1013 | 0.6223 | 0.7533 |
53
+ | 0.7082 | 2.0 | 2026 | 0.5338 | 0.7975 |
54
+ | 0.6061 | 3.0 | 3039 | 0.4885 | 0.8385 |
55
 
56
 
57
  ### Framework versions
58
 
59
+ - Transformers 4.34.1
60
+ - Pytorch 2.1.0+cu118
61
  - Datasets 2.14.5
62
  - Tokenizers 0.14.1
config.json CHANGED
@@ -11,16 +11,16 @@
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 1024,
13
  "id2label": {
14
- "0": "LABEL_0",
15
- "1": "LABEL_1",
16
- "2": "LABEL_2"
17
  },
18
  "initializer_range": 0.02,
19
  "intermediate_size": 4096,
20
  "label2id": {
21
- "LABEL_0": 0,
22
- "LABEL_1": 1,
23
- "LABEL_2": 2
24
  },
25
  "layer_norm_eps": 1e-05,
26
  "max_position_embeddings": 514,
 
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 1024,
13
  "id2label": {
14
+ "0": "positive",
15
+ "1": "neutral",
16
+ "2": "negative"
17
  },
18
  "initializer_range": 0.02,
19
  "intermediate_size": 4096,
20
  "label2id": {
21
+ "negative": 2,
22
+ "neutral": 1,
23
+ "positive": 0
24
  },
25
  "layer_norm_eps": 1e-05,
26
  "max_position_embeddings": 514,
special_tokens_map.json CHANGED
@@ -6,11 +6,53 @@
6
  "<unk>",
7
  "<mask>"
8
  ],
9
- "bos_token": "<s>",
10
- "cls_token": "<s>",
11
- "eos_token": "</s>",
12
- "mask_token": "<mask>",
13
- "pad_token": "<pad>",
14
- "sep_token": "</s>",
15
- "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  }
 
6
  "<unk>",
7
  "<mask>"
8
  ],
9
+ "bos_token": {
10
+ "content": "<s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "cls_token": {
17
+ "content": "<s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "eos_token": {
24
+ "content": "</s>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "mask_token": {
31
+ "content": "<mask>",
32
+ "lstrip": true,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ "pad_token": {
38
+ "content": "<pad>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ },
44
+ "sep_token": {
45
+ "content": "</s>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false
50
+ },
51
+ "unk_token": {
52
+ "content": "<unk>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false
57
+ }
58
  }