File size: 2,354 Bytes
1989936
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
'microsoft/deberta-v3-base'

training_args = TrainingArguments(
    output_dir='ECO_DEBERTA',
    evaluation_strategy="epoch",
    learning_rate=2e-5,
    per_device_train_batch_size=16,
    per_device_eval_batch_size=16,
    num_train_epochs=32,
    weight_decay=0.01,
    save_strategy="epoch",
    load_best_model_at_end=True,
    push_to_hub=True
)

Epoch	Training Loss	Validation Loss	Precision	Recall	F1	Accuracy
1	No log	0.079086	0.264007	0.159103	0.198550	0.982315
2	0.153400	0.055790	0.402147	0.354133	0.376616	0.985904
3	0.153400	0.055938	0.377627	0.436791	0.405060	0.985536
4	0.037400	0.059241	0.424993	0.426256	0.425624	0.986040
5	0.037400	0.066712	0.436903	0.457320	0.446879	0.986067
6	0.021100	0.064148	0.422239	0.465694	0.442903	0.986155
7	0.021100	0.069515	0.460089	0.474878	0.467367	0.986865
8	0.012900	0.073564	0.458955	0.465154	0.462034	0.986700
9	0.012900	0.081422	0.452289	0.472447	0.462148	0.986066
10	0.008500	0.082762	0.452456	0.467855	0.460027	0.986476
11	0.008500	0.085812	0.458534	0.462993	0.460753	0.986490
12	0.005900	0.086245	0.470666	0.481091	0.475822	0.986883
13	0.005900	0.089477	0.479507	0.483522	0.481506	0.986921
14	0.004300	0.093831	0.474394	0.465424	0.469866	0.986814
15	0.004300	0.096122	0.487333	0.483252	0.485284	0.987021
16	0.003300	0.096951	0.492196	0.494057	0.493125	0.987023
17	0.003300	0.093057	0.480755	0.509454	0.494689	0.987118
18	0.002700	0.099559	0.507381	0.501351	0.504348	0.987200
19	0.002700	0.102917	0.498771	0.493247	0.495993	0.986986
20	0.002200	0.099864	0.503277	0.497839	0.500543	0.987309
21	0.002200	0.101206	0.500547	0.494327	0.497418	0.987205
22	0.001900	0.103037	0.490170	0.491626	0.490897	0.987013
23	0.001900	0.103360	0.493261	0.494327	0.493794	0.987143
24	0.001600	0.107981	0.505051	0.499730	0.502376	0.987058
25	0.001600	0.108147	0.511440	0.495138	0.503157	0.987289
26	0.001400	0.111687	0.507705	0.498379	0.502999	0.987246
27	0.001400	0.111873	0.502892	0.493247	0.498023	0.986916
28	0.001200	0.111417	0.506169	0.498649	0.502381	0.987219
29	0.001200	0.111508	0.509287	0.496218	0.502668	0.987453
30	0.001100	0.112689	0.514325	0.499460	0.506784	0.987268
31	0.001100	0.113233	0.508647	0.500540	0.504561	0.987196
32	0.001000	0.113873	0.510779	0.499190	0.504918	0.987244



![image/png](https://cdn-uploads.huggingface.co/production/uploads/6634a6ddbcf56d1302dc1e82/gui5WLtcHzTV3kNoJwZ9v.png)