results / test4mc_del /results.json
djstrong's picture
test
ae845e4
raw
history blame
2.12 kB
{
"results": {
"polemo2_in2": {
"exact_match,score-first": 0.03185595567867036,
"exact_match_stderr,score-first": 0.006540300095172154,
"alias": "polemo2_in2"
}
},
"configs": {
"polemo2_in2": {
"task": "polemo2_in2",
"group": [
"polemo2"
],
"dataset_path": "allegro/klej-polemo2-in",
"training_split": "train",
"validation_split": "validation",
"test_split": "test",
"doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
"doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "exact_match",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "generate_until",
"generation_kwargs": {
"until": [
".",
","
],
"do_sample": false,
"temperature": 0.0,
"max_gen_toks": 50
},
"repeats": 1,
"filter_list": [
{
"name": "score-first",
"filter": [
{
"function": "regex",
"regex_pattern": "(\\b[ABCD]\\b)"
},
{
"function": "take_first"
}
]
}
],
"should_decontaminate": true,
"doc_to_decontamination_query": "{{sentence}}",
"metadata": {
"version": 1.0
}
}
},
"versions": {
"polemo2_in2": 1.0
},
"n-shot": {
"polemo2_in2": 0
},
"config": {
"model": "hf",
"model_args": "pretrained=Azurro/APT3-1B-Base",
"batch_size": "16",
"batch_sizes": [],
"device": "cuda:0",
"use_cache": null,
"limit": null,
"bootstrap_iters": 100000,
"gen_kwargs": null
},
"git_hash": null
}