Datasets:

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
License:
results / earnings-ft-llama2-7b-chat-hf /results_2024-03-22 00:42:42.400834.json
ayushi0430's picture
update results
2b43d14
{
"config": {
"model": "LaminiModel",
"model_args": null,
"batch_size": 1,
"batch_sizes": [],
"device": null,
"use_cache": null,
"limit": 100,
"bootstrap_iters": 100000,
"gen_kwargs": null,
"model_dtype": "bfloat16",
"model_name": "earnings-ft-llama2-7b-chat-hf",
"model_sha": "main"
},
"results": {
"product_response_subjective_score": {
"product_response_subjective_score": 3.1176470588235294
},
"product_id_precision_score": {
"product_id_precision_score": 0.0
},
"earnings_response_subjective_score": {
"earnings_response_subjective_score": 2.742857142857143
},
"earnings_precision_score": {
"earnings_precision_score": 0.0
},
"icd11_response_subjective_score": {
"icd11_response_subjective_score": 2.336842105263158
},
"icd11_precision_score": {
"icd11_precision_score": 0.16842105263157894
},
"mmlu_flan_n_shot_generative_global_facts": {
"exact_match,strict-match": 0.41,
"exact_match_stderr,strict-match": 0.049431107042371025,
"exact_match,flexible-extract": 0.42,
"exact_match_stderr,flexible-extract": 0.04960449637488583,
"alias": "mmlu_flan_n_shot_generative_global_facts"
},
"truthfulqa_gen": {
"bleu_max,none": 12.194776475473436,
"bleu_max_stderr,none": 1.2701523429047132,
"bleu_acc,none": 0.45,
"bleu_acc_stderr,none": 0.049999999999999996,
"bleu_diff,none": -0.9337244318464067,
"bleu_diff_stderr,none": 0.9745288208534106,
"rouge1_max,none": 32.472453943146135,
"rouge1_max_stderr,none": 1.7844603640648615,
"rouge1_acc,none": 0.45,
"rouge1_acc_stderr,none": 0.049999999999999996,
"rouge1_diff,none": -1.5556741150691888,
"rouge1_diff_stderr,none": 1.520284343035022,
"rouge2_max,none": 22.486325099302917,
"rouge2_max_stderr,none": 1.8556650316966448,
"rouge2_acc,none": 0.43,
"rouge2_acc_stderr,none": 0.049756985195624284,
"rouge2_diff,none": -2.08816397304618,
"rouge2_diff_stderr,none": 1.588498359536718,
"rougeL_max,none": 29.906684059868695,
"rougeL_max_stderr,none": 1.8004049872376318,
"rougeL_acc,none": 0.43,
"rougeL_acc_stderr,none": 0.049756985195624284,
"rougeL_diff,none": -1.6994111526576103,
"rougeL_diff_stderr,none": 1.474902521555414,
"alias": "truthfulqa_gen"
}
}
}