Datasets:

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
License:
results / openai /gpt-4-turbo-preview /results_2024-03-21 16:23:34.000307.json
ayushi0430's picture
add gpt results
0eec696
raw
history blame
2.7 kB
{
"config": {
"model": "GPT4Model",
"model_args": null,
"batch_size": 1,
"batch_sizes": [],
"device": null,
"use_cache": null,
"limit": 50,
"bootstrap_iters": 100000,
"gen_kwargs": null,
"model_dtype": "bfloat16",
"model_name": "gpt-4-turbo-preview",
"model_sha": "main"
},
"results": {
"product_response_subjective_score": {
"product_response_subjective_score": 3.18
},
"product_id_precision_score": {
"product_id_precision_score": 0.0
},
"earnings_response_subjective_score": {
"earnings_response_subjective_score": 2.42
},
"earnings_precision_score": {
"earnings_precision_score": 0.06
},
"icd11_response_subjective_score": {
"icd11_response_subjective_score": 2.32
},
"icd11_precision_score": {
"icd11_precision_score": 0.24
},
"mmlu_flan_n_shot_generative_global_facts": {
"exact_match,strict-match": 0.5,
"exact_match_stderr,strict-match": 0.07142857142857142,
"exact_match,flexible-extract": 0.56,
"exact_match_stderr,flexible-extract": 0.07091242083423345,
"alias": "mmlu_flan_n_shot_generative_global_facts"
},
"truthfulqa_gen": {
"bleu_max,none": 17.412843813330753,
"bleu_max_stderr,none": 3.5107529042869725,
"bleu_acc,none": 0.5,
"bleu_acc_stderr,none": 0.07142857142857142,
"bleu_diff,none": 2.6232948753008287,
"bleu_diff_stderr,none": 1.5697390102595539,
"rouge1_max,none": 36.53959310576969,
"rouge1_max_stderr,none": 3.576250657485817,
"rouge1_acc,none": 0.58,
"rouge1_acc_stderr,none": 0.07050835816716035,
"rouge1_diff,none": 3.2546797467837028,
"rouge1_diff_stderr,none": 1.624713032868973,
"rouge2_max,none": 25.96841315911483,
"rouge2_max_stderr,none": 3.8193443097505115,
"rouge2_acc,none": 0.5,
"rouge2_acc_stderr,none": 0.07142857142857142,
"rouge2_diff,none": 2.129828243762446,
"rouge2_diff_stderr,none": 1.8201313792918536,
"rougeL_max,none": 33.61539686833257,
"rougeL_max_stderr,none": 3.6324740440785726,
"rougeL_acc,none": 0.58,
"rougeL_acc_stderr,none": 0.07050835816716035,
"rougeL_diff,none": 2.793595307223977,
"rougeL_diff_stderr,none": 1.5676667673822502,
"alias": "truthfulqa_gen"
}
}
}