salbatarni commited on
Commit
a1f1ec5
1 Parent(s): 0656bdd

Training in progress, step 170

Browse files
Files changed (4) hide show
  1. README.md +137 -0
  2. config.json +32 -0
  3. model.safetensors +3 -0
  4. training_args.bin +3 -0
README.md ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: aubmindlab/bert-base-arabertv02
3
+ tags:
4
+ - generated_from_trainer
5
+ model-index:
6
+ - name: arabert_cross_organization_task6_fold5
7
+ results: []
8
+ ---
9
+
10
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
11
+ should probably proofread and complete it, then remove this comment. -->
12
+
13
+ # arabert_cross_organization_task6_fold5
14
+
15
+ This model is a fine-tuned version of [aubmindlab/bert-base-arabertv02](https://huggingface.co/aubmindlab/bert-base-arabertv02) on the None dataset.
16
+ It achieves the following results on the evaluation set:
17
+ - Loss: 0.4946
18
+ - Qwk: 0.7585
19
+ - Mse: 0.4958
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 2e-05
39
+ - train_batch_size: 64
40
+ - eval_batch_size: 64
41
+ - seed: 42
42
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
43
+ - lr_scheduler_type: linear
44
+ - num_epochs: 10
45
+
46
+ ### Training results
47
+
48
+ | Training Loss | Epoch | Step | Validation Loss | Qwk | Mse |
49
+ |:-------------:|:-----:|:----:|:---------------:|:------:|:------:|
50
+ | No log | 0.125 | 2 | 1.3569 | 0.2073 | 1.3566 |
51
+ | No log | 0.25 | 4 | 0.8917 | 0.3804 | 0.8926 |
52
+ | No log | 0.375 | 6 | 1.3940 | 0.5002 | 1.3954 |
53
+ | No log | 0.5 | 8 | 1.1581 | 0.5869 | 1.1599 |
54
+ | No log | 0.625 | 10 | 0.8359 | 0.5005 | 0.8373 |
55
+ | No log | 0.75 | 12 | 0.8352 | 0.6240 | 0.8368 |
56
+ | No log | 0.875 | 14 | 0.7817 | 0.6808 | 0.7831 |
57
+ | No log | 1.0 | 16 | 0.6321 | 0.7284 | 0.6335 |
58
+ | No log | 1.125 | 18 | 0.5497 | 0.7013 | 0.5509 |
59
+ | No log | 1.25 | 20 | 0.6330 | 0.7733 | 0.6344 |
60
+ | No log | 1.375 | 22 | 0.9312 | 0.7286 | 0.9326 |
61
+ | No log | 1.5 | 24 | 0.9410 | 0.7373 | 0.9424 |
62
+ | No log | 1.625 | 26 | 0.6704 | 0.7704 | 0.6717 |
63
+ | No log | 1.75 | 28 | 0.5140 | 0.6654 | 0.5150 |
64
+ | No log | 1.875 | 30 | 0.5258 | 0.6207 | 0.5266 |
65
+ | No log | 2.0 | 32 | 0.4993 | 0.7208 | 0.5003 |
66
+ | No log | 2.125 | 34 | 0.5995 | 0.7661 | 0.6008 |
67
+ | No log | 2.25 | 36 | 0.6850 | 0.7821 | 0.6865 |
68
+ | No log | 2.375 | 38 | 0.6445 | 0.7839 | 0.6460 |
69
+ | No log | 2.5 | 40 | 0.5426 | 0.7571 | 0.5438 |
70
+ | No log | 2.625 | 42 | 0.5374 | 0.7584 | 0.5385 |
71
+ | No log | 2.75 | 44 | 0.5401 | 0.7508 | 0.5413 |
72
+ | No log | 2.875 | 46 | 0.5560 | 0.7716 | 0.5572 |
73
+ | No log | 3.0 | 48 | 0.5460 | 0.7794 | 0.5472 |
74
+ | No log | 3.125 | 50 | 0.5399 | 0.7800 | 0.5410 |
75
+ | No log | 3.25 | 52 | 0.4966 | 0.7520 | 0.4976 |
76
+ | No log | 3.375 | 54 | 0.4783 | 0.7484 | 0.4792 |
77
+ | No log | 3.5 | 56 | 0.5055 | 0.7654 | 0.5064 |
78
+ | No log | 3.625 | 58 | 0.4947 | 0.7569 | 0.4955 |
79
+ | No log | 3.75 | 60 | 0.5387 | 0.7681 | 0.5397 |
80
+ | No log | 3.875 | 62 | 0.6614 | 0.8077 | 0.6627 |
81
+ | No log | 4.0 | 64 | 0.6356 | 0.8243 | 0.6369 |
82
+ | No log | 4.125 | 66 | 0.4951 | 0.7545 | 0.4959 |
83
+ | No log | 4.25 | 68 | 0.4581 | 0.7123 | 0.4588 |
84
+ | No log | 4.375 | 70 | 0.4776 | 0.7450 | 0.4784 |
85
+ | No log | 4.5 | 72 | 0.5531 | 0.7823 | 0.5543 |
86
+ | No log | 4.625 | 74 | 0.5792 | 0.8103 | 0.5805 |
87
+ | No log | 4.75 | 76 | 0.5337 | 0.7801 | 0.5349 |
88
+ | No log | 4.875 | 78 | 0.4762 | 0.7597 | 0.4771 |
89
+ | No log | 5.0 | 80 | 0.4679 | 0.7390 | 0.4687 |
90
+ | No log | 5.125 | 82 | 0.4753 | 0.7488 | 0.4762 |
91
+ | No log | 5.25 | 84 | 0.5131 | 0.7689 | 0.5143 |
92
+ | No log | 5.375 | 86 | 0.5442 | 0.7925 | 0.5455 |
93
+ | No log | 5.5 | 88 | 0.5074 | 0.7624 | 0.5086 |
94
+ | No log | 5.625 | 90 | 0.4586 | 0.7435 | 0.4596 |
95
+ | No log | 5.75 | 92 | 0.4498 | 0.7269 | 0.4507 |
96
+ | No log | 5.875 | 94 | 0.4604 | 0.7354 | 0.4614 |
97
+ | No log | 6.0 | 96 | 0.5055 | 0.7753 | 0.5068 |
98
+ | No log | 6.125 | 98 | 0.5761 | 0.7991 | 0.5776 |
99
+ | No log | 6.25 | 100 | 0.5566 | 0.7942 | 0.5580 |
100
+ | No log | 6.375 | 102 | 0.5097 | 0.7509 | 0.5109 |
101
+ | No log | 6.5 | 104 | 0.4777 | 0.7454 | 0.4787 |
102
+ | No log | 6.625 | 106 | 0.4691 | 0.7225 | 0.4700 |
103
+ | No log | 6.75 | 108 | 0.4712 | 0.7283 | 0.4720 |
104
+ | No log | 6.875 | 110 | 0.4817 | 0.7509 | 0.4827 |
105
+ | No log | 7.0 | 112 | 0.4772 | 0.7454 | 0.4781 |
106
+ | No log | 7.125 | 114 | 0.4790 | 0.7490 | 0.4799 |
107
+ | No log | 7.25 | 116 | 0.5003 | 0.7688 | 0.5014 |
108
+ | No log | 7.375 | 118 | 0.5353 | 0.7753 | 0.5366 |
109
+ | No log | 7.5 | 120 | 0.5284 | 0.7670 | 0.5297 |
110
+ | No log | 7.625 | 122 | 0.5075 | 0.7556 | 0.5086 |
111
+ | No log | 7.75 | 124 | 0.4824 | 0.7527 | 0.4834 |
112
+ | No log | 7.875 | 126 | 0.4782 | 0.7527 | 0.4792 |
113
+ | No log | 8.0 | 128 | 0.4745 | 0.7554 | 0.4755 |
114
+ | No log | 8.125 | 130 | 0.4803 | 0.7523 | 0.4813 |
115
+ | No log | 8.25 | 132 | 0.4946 | 0.7614 | 0.4957 |
116
+ | No log | 8.375 | 134 | 0.4938 | 0.7558 | 0.4950 |
117
+ | No log | 8.5 | 136 | 0.4888 | 0.7558 | 0.4900 |
118
+ | No log | 8.625 | 138 | 0.4775 | 0.7507 | 0.4786 |
119
+ | No log | 8.75 | 140 | 0.4714 | 0.7474 | 0.4724 |
120
+ | No log | 8.875 | 142 | 0.4668 | 0.7410 | 0.4677 |
121
+ | No log | 9.0 | 144 | 0.4672 | 0.7382 | 0.4681 |
122
+ | No log | 9.125 | 146 | 0.4689 | 0.7433 | 0.4698 |
123
+ | No log | 9.25 | 148 | 0.4738 | 0.7571 | 0.4748 |
124
+ | No log | 9.375 | 150 | 0.4814 | 0.7511 | 0.4825 |
125
+ | No log | 9.5 | 152 | 0.4866 | 0.7567 | 0.4877 |
126
+ | No log | 9.625 | 154 | 0.4900 | 0.7585 | 0.4911 |
127
+ | No log | 9.75 | 156 | 0.4909 | 0.7585 | 0.4921 |
128
+ | No log | 9.875 | 158 | 0.4930 | 0.7585 | 0.4942 |
129
+ | No log | 10.0 | 160 | 0.4946 | 0.7585 | 0.4958 |
130
+
131
+
132
+ ### Framework versions
133
+
134
+ - Transformers 4.44.0
135
+ - Pytorch 2.4.0
136
+ - Datasets 2.21.0
137
+ - Tokenizers 0.19.1
config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "aubmindlab/bert-base-arabertv02",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 768,
11
+ "id2label": {
12
+ "0": "LABEL_0"
13
+ },
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 3072,
16
+ "label2id": {
17
+ "LABEL_0": 0
18
+ },
19
+ "layer_norm_eps": 1e-12,
20
+ "max_position_embeddings": 512,
21
+ "model_type": "bert",
22
+ "num_attention_heads": 12,
23
+ "num_hidden_layers": 12,
24
+ "pad_token_id": 0,
25
+ "position_embedding_type": "absolute",
26
+ "problem_type": "regression",
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.44.0",
29
+ "type_vocab_size": 2,
30
+ "use_cache": true,
31
+ "vocab_size": 64000
32
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1e45b74db0dd36b3faaa74d3062a35ced3fc196091e022fa37f3217e7770b10
3
+ size 540799996
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52f5639e3d26eb461fa410647780d77e8aae6f8eb052cd90b6d8ac508fd998d6
3
+ size 5240