File size: 998 Bytes
cf2b7a8
 
 
 
 
 
 
 
 
bfe5a56
cf2b7a8
 
 
24d8c8e
 
bfe5a56
360adb5
bfe5a56
 
 
 
 
360adb5
bfe5a56
 
 
360adb5
bfe5a56
 
cf2b7a8
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
{
    "model": "openlm-research/open_llama_3b",
    "base_model": "",
    "revision": "main",
    "private": false,
    "precision": "float16",
    "params": 3.0,
    "architectures": "LlamaForCausalLM",
    "weight_type": "Original",
    "status": "FINISHED",
    "submitted_time": "2024-02-11T13:34:13Z",
    "model_type": "🟢 : pretrained",
    "source": "script",
    "job_id": 235,
    "job_start_time": "2024-02-17T09-33-21.785739",
    "eval_version": "1.1.0",
    "result_metrics": {
        "enem_challenge": 0.1966410076976907,
        "bluex": 0.22253129346314326,
        "oab_exams": 0.2305239179954442,
        "assin2_rte": 0.35297899277093914,
        "assin2_sts": 0.05530368768431126,
        "faquad_nli": 0.4396551724137931,
        "hatebr_offensive": 0.38836470532454603,
        "portuguese_hate_speech": 0.4576046634870164,
        "tweetsentbr": 0.3732164088082346
    },
    "result_metrics_average": 0.3018688721827909,
    "result_metrics_npm": -0.055465157217862306
}