djstrong commited on
Commit
ae845e4
·
1 Parent(s): 0a4b6dc
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. lm-evaluation-harness/temp/results.json +67 -0
  2. polish_benchmarks-out/results_APT3-1B-Base-0_belebele_pol_Latn/results.json +72 -0
  3. polish_benchmarks-out/results_APT3-1B-Base-5_belebele_pol_Latn/results.json +72 -0
  4. polish_benchmarks-out/results_trurl-2-13b-academic_8bits-0_belebele_pol_Latn/results.json +72 -0
  5. polish_benchmarks-out/results_trurl-2-13b-academic_8bits-5_belebele_pol_Latn/results.json +72 -0
  6. polish_benchmarks-out/results_trurl-2-7b-0_belebele_pol_Latn/results.json +72 -0
  7. polish_benchmarks-out/results_trurl-2-7b-5_belebele_pol_Latn/results.json +72 -0
  8. polish_benchmarks-out/results_zephyr-7b-alpha-0_belebele_pol_Latn/results.json +72 -0
  9. polish_benchmarks-out/results_zephyr-7b-alpha-5_belebele_pol_Latn/results.json +74 -0
  10. polish_benchmarks-out/results_zephyr-7b-beta-0_belebele_pol_Latn/results.json +74 -0
  11. polish_benchmarks-out/results_zephyr-7b-beta-5_belebele_pol_Latn/results.json +74 -0
  12. polish_benchmarks-out10/results_APT3-1B-Instruct-e1-0_polish/results.json +747 -0
  13. polish_benchmarks-out10/results_APT3-1B-Instruct-e1-5_polish/results.json +747 -0
  14. polish_benchmarks-out10/results_APT3-1B-Instruct-e2-0_polish/results.json +747 -0
  15. polish_benchmarks-out10/results_APT3-1B-Instruct-e2-5_polish/results.json +747 -0
  16. polish_benchmarks-out10/results_Mistral-7B-v0.1-5_polish_generate/results.json +422 -0
  17. polish_benchmarks-out10/results_Mistral-7B-v0.1-5_polish_mc/results.json +357 -0
  18. polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp200-5_polish_generate/results.json +422 -0
  19. polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp200-5_polish_mc/results.json +357 -0
  20. polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp400-5_polish_generate/results.json +422 -0
  21. polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp400-5_polish_mc/results.json +357 -0
  22. polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp600-5_polish_generate/results.json +422 -0
  23. polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp600-5_polish_mc/results.json +357 -0
  24. polish_benchmarks-out11/results_Mistral-7B-v0.1_lora_chp2000-0_polish/results.json +747 -0
  25. polish_benchmarks-out11/results_Mistral-7B-v0.1_lora_chp2000-5_polish/results.json +747 -0
  26. polish_benchmarks-out11/results_Mistral-7B-v0.1_lora_chp2200-0_polish/results.json +747 -0
  27. polish_benchmarks-out11/results_Mistral-7B-v0.1_lora_chp2200-5_polish/results.json +747 -0
  28. polish_benchmarks-out13/results_Mistral-7B-v0.1-0_polish/results.json +747 -0
  29. polish_benchmarks-out13/results_Mistral-7B-v0.1-5_polish/results.json +747 -0
  30. polish_benchmarks-out14/results_Mistral-7B-v0.1-0_polish/results.json +747 -0
  31. polish_benchmarks-out15/results_Mistral-7B-Instruct-v0.1-0_polish/results.json +747 -0
  32. polish_benchmarks-out15/results_Mistral-7B-Instruct-v0.1-5_polish/results.json +747 -0
  33. polish_benchmarks-out15/results_phi-2-0_polish/results.json +747 -0
  34. polish_benchmarks-out15/results_phi-2-5_polish/results.json +747 -0
  35. polish_benchmarks-out15/results_plt5-large-0_polish/results.json +747 -0
  36. polish_benchmarks-out15/results_plt5-large-5_polish/results.json +747 -0
  37. polish_benchmarks-out15/results_polish-gpt2-xl-0_polish/results.json +747 -0
  38. polish_benchmarks-out15/results_polish-gpt2-xl-5_polish/results.json +747 -0
  39. polish_benchmarks-out16/results_Mistral-7B-v0.1-0_polish_generate/results.json +422 -0
  40. polish_benchmarks-out16/results_Mistral-7B-v0.1-0_polish_mc/results.json +357 -0
  41. polish_benchmarks-out16/results_Mistral-7B-v0.1-5_polish_generate/results.json +422 -0
  42. polish_benchmarks-out16/results_Mistral-7B-v0.1-5_polish_mc/results.json +357 -0
  43. polish_benchmarks-out17/results_zephyr-speakleash-007-pl-8192-32-16-0.05-0_polish/results.json +747 -0
  44. polish_benchmarks-out17/results_zephyr-speakleash-007-pl-8192-32-16-0.05-5_polish_generate/results.json +422 -0
  45. polish_benchmarks-out17/results_zephyr-speakleash-007-pl-8192-32-16-0.05-5_polish_mc/results.json +357 -0
  46. polish_benchmarks-out2/results_APT3-1B-Base-0_polish/results.json +911 -0
  47. polish_benchmarks-out2/results_APT3-1B-Base-5_polish/results.json +911 -0
  48. polish_benchmarks-out2/results_trurl-2-7b-0_polish/results.json +911 -0
  49. polish_benchmarks-out21/results_mt5-xl-0_polish_generate/results.json +422 -0
  50. polish_benchmarks-out21/results_mt5-xl-0_polish_mc/results.json +357 -0
lm-evaluation-harness/temp/results.json ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_cbd_multiple_choice": {
4
+ "acc,none": 0.693,
5
+ "acc_stderr,none": 0.014593284892852627,
6
+ "acc_norm,none": 0.693,
7
+ "acc_norm_stderr,none": 0.014593284892852627,
8
+ "alias": "polish_cbd_multiple_choice"
9
+ }
10
+ },
11
+ "configs": {
12
+ "polish_cbd_multiple_choice": {
13
+ "task": "polish_cbd_multiple_choice",
14
+ "dataset_path": "ptaszynski/PolishCyberbullyingDataset",
15
+ "training_split": "train",
16
+ "test_split": "test",
17
+ "doc_to_text": "Wypowiedź: \"{{TEXT}}\"\nDo podanej wypowiedzi przyporządkuj jedną, najlepiej pasującą kategorię z podanych: nieszkodliwa, szyderstwo, obelga, insynuacja, groźba, molestowanie.\nKategoria:",
18
+ "doc_to_target": "{{{'szyderstwo': 1, 'obelga': 2, 'insynuacja': 3, 'grozba': 4, 'molestowanie': 5}.get(CATEGORIES, 0)}}",
19
+ "doc_to_choice": [
20
+ "nieszkodliwa",
21
+ "szyderstwo",
22
+ "obelga",
23
+ "insynuacja",
24
+ "groźba",
25
+ "molestowanie"
26
+ ],
27
+ "description": "",
28
+ "target_delimiter": " ",
29
+ "fewshot_delimiter": "\n\n",
30
+ "num_fewshot": 0,
31
+ "metric_list": [
32
+ {
33
+ "metric": "acc",
34
+ "aggregation": "mean",
35
+ "higher_is_better": true
36
+ },
37
+ {
38
+ "metric": "acc_norm",
39
+ "aggregation": "mean",
40
+ "higher_is_better": true
41
+ }
42
+ ],
43
+ "output_type": "multiple_choice",
44
+ "repeats": 1,
45
+ "should_decontaminate": true,
46
+ "doc_to_decontamination_query": "{{TEXT}}"
47
+ }
48
+ },
49
+ "versions": {
50
+ "polish_cbd_multiple_choice": "Yaml"
51
+ },
52
+ "n-shot": {
53
+ "polish_cbd_multiple_choice": 0
54
+ },
55
+ "config": {
56
+ "model": "hf",
57
+ "model_args": "pretrained=Azurro/APT3-1B-Base",
58
+ "batch_size": "16",
59
+ "batch_sizes": [],
60
+ "device": "cuda:0",
61
+ "use_cache": null,
62
+ "limit": null,
63
+ "bootstrap_iters": 100000,
64
+ "gen_kwargs": null
65
+ },
66
+ "git_hash": "532aa2e6"
67
+ }
polish_benchmarks-out/results_APT3-1B-Base-0_belebele_pol_Latn/results.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "belebele_pol_Latn": {
4
+ "acc,none": 0.22111111111111112,
5
+ "acc_stderr,none": 0.013840863699859518,
6
+ "acc_norm,none": 0.22111111111111112,
7
+ "acc_norm_stderr,none": 0.013840863699859518,
8
+ "alias": "belebele_pol_Latn"
9
+ }
10
+ },
11
+ "configs": {
12
+ "belebele_pol_Latn": {
13
+ "task": "belebele_pol_Latn",
14
+ "group": "belebele",
15
+ "dataset_path": "facebook/belebele",
16
+ "test_split": "pol_Latn",
17
+ "fewshot_split": "pol_Latn",
18
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
19
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
20
+ "doc_to_choice": [
21
+ "A",
22
+ "B",
23
+ "C",
24
+ "D"
25
+ ],
26
+ "description": "",
27
+ "target_delimiter": " ",
28
+ "fewshot_delimiter": "\n\n",
29
+ "fewshot_config": {
30
+ "sampler": "first_n"
31
+ },
32
+ "num_fewshot": 0,
33
+ "metric_list": [
34
+ {
35
+ "metric": "acc",
36
+ "aggregation": "mean",
37
+ "higher_is_better": true
38
+ },
39
+ {
40
+ "metric": "acc_norm",
41
+ "aggregation": "mean",
42
+ "higher_is_better": true
43
+ }
44
+ ],
45
+ "output_type": "multiple_choice",
46
+ "repeats": 1,
47
+ "should_decontaminate": true,
48
+ "doc_to_decontamination_query": "{{question}}",
49
+ "metadata": {
50
+ "version": 0.0
51
+ }
52
+ }
53
+ },
54
+ "versions": {
55
+ "belebele_pol_Latn": 0.0
56
+ },
57
+ "n-shot": {
58
+ "belebele_pol_Latn": 0
59
+ },
60
+ "config": {
61
+ "model": "hf",
62
+ "model_args": "pretrained=Azurro/APT3-1B-Base",
63
+ "batch_size": "16",
64
+ "batch_sizes": [],
65
+ "device": "cuda:0",
66
+ "use_cache": null,
67
+ "limit": null,
68
+ "bootstrap_iters": 100000,
69
+ "gen_kwargs": null
70
+ },
71
+ "git_hash": null
72
+ }
polish_benchmarks-out/results_APT3-1B-Base-5_belebele_pol_Latn/results.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "belebele_pol_Latn": {
4
+ "acc,none": 0.25666666666666665,
5
+ "acc_stderr,none": 0.014567891342380032,
6
+ "acc_norm,none": 0.25666666666666665,
7
+ "acc_norm_stderr,none": 0.014567891342380032,
8
+ "alias": "belebele_pol_Latn"
9
+ }
10
+ },
11
+ "configs": {
12
+ "belebele_pol_Latn": {
13
+ "task": "belebele_pol_Latn",
14
+ "group": "belebele",
15
+ "dataset_path": "facebook/belebele",
16
+ "test_split": "pol_Latn",
17
+ "fewshot_split": "pol_Latn",
18
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
19
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
20
+ "doc_to_choice": [
21
+ "A",
22
+ "B",
23
+ "C",
24
+ "D"
25
+ ],
26
+ "description": "",
27
+ "target_delimiter": " ",
28
+ "fewshot_delimiter": "\n\n",
29
+ "fewshot_config": {
30
+ "sampler": "first_n"
31
+ },
32
+ "num_fewshot": 5,
33
+ "metric_list": [
34
+ {
35
+ "metric": "acc",
36
+ "aggregation": "mean",
37
+ "higher_is_better": true
38
+ },
39
+ {
40
+ "metric": "acc_norm",
41
+ "aggregation": "mean",
42
+ "higher_is_better": true
43
+ }
44
+ ],
45
+ "output_type": "multiple_choice",
46
+ "repeats": 1,
47
+ "should_decontaminate": true,
48
+ "doc_to_decontamination_query": "{{question}}",
49
+ "metadata": {
50
+ "version": 0.0
51
+ }
52
+ }
53
+ },
54
+ "versions": {
55
+ "belebele_pol_Latn": 0.0
56
+ },
57
+ "n-shot": {
58
+ "belebele_pol_Latn": 5
59
+ },
60
+ "config": {
61
+ "model": "hf",
62
+ "model_args": "pretrained=Azurro/APT3-1B-Base",
63
+ "batch_size": "16",
64
+ "batch_sizes": [],
65
+ "device": "cuda:0",
66
+ "use_cache": null,
67
+ "limit": null,
68
+ "bootstrap_iters": 100000,
69
+ "gen_kwargs": null
70
+ },
71
+ "git_hash": null
72
+ }
polish_benchmarks-out/results_trurl-2-13b-academic_8bits-0_belebele_pol_Latn/results.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "belebele_pol_Latn": {
4
+ "acc,none": 0.5033333333333333,
5
+ "acc_stderr,none": 0.01667556306870599,
6
+ "acc_norm,none": 0.5033333333333333,
7
+ "acc_norm_stderr,none": 0.01667556306870599,
8
+ "alias": "belebele_pol_Latn"
9
+ }
10
+ },
11
+ "configs": {
12
+ "belebele_pol_Latn": {
13
+ "task": "belebele_pol_Latn",
14
+ "group": "belebele",
15
+ "dataset_path": "facebook/belebele",
16
+ "test_split": "pol_Latn",
17
+ "fewshot_split": "pol_Latn",
18
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
19
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
20
+ "doc_to_choice": [
21
+ "A",
22
+ "B",
23
+ "C",
24
+ "D"
25
+ ],
26
+ "description": "",
27
+ "target_delimiter": " ",
28
+ "fewshot_delimiter": "\n\n",
29
+ "fewshot_config": {
30
+ "sampler": "first_n"
31
+ },
32
+ "num_fewshot": 0,
33
+ "metric_list": [
34
+ {
35
+ "metric": "acc",
36
+ "aggregation": "mean",
37
+ "higher_is_better": true
38
+ },
39
+ {
40
+ "metric": "acc_norm",
41
+ "aggregation": "mean",
42
+ "higher_is_better": true
43
+ }
44
+ ],
45
+ "output_type": "multiple_choice",
46
+ "repeats": 1,
47
+ "should_decontaminate": true,
48
+ "doc_to_decontamination_query": "{{question}}",
49
+ "metadata": {
50
+ "version": 0.0
51
+ }
52
+ }
53
+ },
54
+ "versions": {
55
+ "belebele_pol_Latn": 0.0
56
+ },
57
+ "n-shot": {
58
+ "belebele_pol_Latn": 0
59
+ },
60
+ "config": {
61
+ "model": "hf",
62
+ "model_args": "pretrained=Voicelab/trurl-2-13b-academic",
63
+ "batch_size": "16",
64
+ "batch_sizes": [],
65
+ "device": "cuda:0",
66
+ "use_cache": null,
67
+ "limit": null,
68
+ "bootstrap_iters": 100000,
69
+ "gen_kwargs": null
70
+ },
71
+ "git_hash": null
72
+ }
polish_benchmarks-out/results_trurl-2-13b-academic_8bits-5_belebele_pol_Latn/results.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "belebele_pol_Latn": {
4
+ "acc,none": 0.6044444444444445,
5
+ "acc_stderr,none": 0.01630805222350177,
6
+ "acc_norm,none": 0.6044444444444445,
7
+ "acc_norm_stderr,none": 0.01630805222350177,
8
+ "alias": "belebele_pol_Latn"
9
+ }
10
+ },
11
+ "configs": {
12
+ "belebele_pol_Latn": {
13
+ "task": "belebele_pol_Latn",
14
+ "group": "belebele",
15
+ "dataset_path": "facebook/belebele",
16
+ "test_split": "pol_Latn",
17
+ "fewshot_split": "pol_Latn",
18
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
19
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
20
+ "doc_to_choice": [
21
+ "A",
22
+ "B",
23
+ "C",
24
+ "D"
25
+ ],
26
+ "description": "",
27
+ "target_delimiter": " ",
28
+ "fewshot_delimiter": "\n\n",
29
+ "fewshot_config": {
30
+ "sampler": "first_n"
31
+ },
32
+ "num_fewshot": 5,
33
+ "metric_list": [
34
+ {
35
+ "metric": "acc",
36
+ "aggregation": "mean",
37
+ "higher_is_better": true
38
+ },
39
+ {
40
+ "metric": "acc_norm",
41
+ "aggregation": "mean",
42
+ "higher_is_better": true
43
+ }
44
+ ],
45
+ "output_type": "multiple_choice",
46
+ "repeats": 1,
47
+ "should_decontaminate": true,
48
+ "doc_to_decontamination_query": "{{question}}",
49
+ "metadata": {
50
+ "version": 0.0
51
+ }
52
+ }
53
+ },
54
+ "versions": {
55
+ "belebele_pol_Latn": 0.0
56
+ },
57
+ "n-shot": {
58
+ "belebele_pol_Latn": 5
59
+ },
60
+ "config": {
61
+ "model": "hf",
62
+ "model_args": "pretrained=Voicelab/trurl-2-13b-academic",
63
+ "batch_size": "16",
64
+ "batch_sizes": [],
65
+ "device": "cuda:0",
66
+ "use_cache": null,
67
+ "limit": null,
68
+ "bootstrap_iters": 100000,
69
+ "gen_kwargs": null
70
+ },
71
+ "git_hash": null
72
+ }
polish_benchmarks-out/results_trurl-2-7b-0_belebele_pol_Latn/results.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "belebele_pol_Latn": {
4
+ "acc,none": 0.39444444444444443,
5
+ "acc_stderr,none": 0.016300092627924964,
6
+ "acc_norm,none": 0.39444444444444443,
7
+ "acc_norm_stderr,none": 0.016300092627924964,
8
+ "alias": "belebele_pol_Latn"
9
+ }
10
+ },
11
+ "configs": {
12
+ "belebele_pol_Latn": {
13
+ "task": "belebele_pol_Latn",
14
+ "group": "belebele",
15
+ "dataset_path": "facebook/belebele",
16
+ "test_split": "pol_Latn",
17
+ "fewshot_split": "pol_Latn",
18
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
19
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
20
+ "doc_to_choice": [
21
+ "A",
22
+ "B",
23
+ "C",
24
+ "D"
25
+ ],
26
+ "description": "",
27
+ "target_delimiter": " ",
28
+ "fewshot_delimiter": "\n\n",
29
+ "fewshot_config": {
30
+ "sampler": "first_n"
31
+ },
32
+ "num_fewshot": 0,
33
+ "metric_list": [
34
+ {
35
+ "metric": "acc",
36
+ "aggregation": "mean",
37
+ "higher_is_better": true
38
+ },
39
+ {
40
+ "metric": "acc_norm",
41
+ "aggregation": "mean",
42
+ "higher_is_better": true
43
+ }
44
+ ],
45
+ "output_type": "multiple_choice",
46
+ "repeats": 1,
47
+ "should_decontaminate": true,
48
+ "doc_to_decontamination_query": "{{question}}",
49
+ "metadata": {
50
+ "version": 0.0
51
+ }
52
+ }
53
+ },
54
+ "versions": {
55
+ "belebele_pol_Latn": 0.0
56
+ },
57
+ "n-shot": {
58
+ "belebele_pol_Latn": 0
59
+ },
60
+ "config": {
61
+ "model": "hf",
62
+ "model_args": "pretrained=Voicelab/trurl-2-7b",
63
+ "batch_size": "16",
64
+ "batch_sizes": [],
65
+ "device": "cuda:0",
66
+ "use_cache": null,
67
+ "limit": null,
68
+ "bootstrap_iters": 100000,
69
+ "gen_kwargs": null
70
+ },
71
+ "git_hash": null
72
+ }
polish_benchmarks-out/results_trurl-2-7b-5_belebele_pol_Latn/results.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "belebele_pol_Latn": {
4
+ "acc,none": 0.48,
5
+ "acc_stderr,none": 0.016662587561635363,
6
+ "acc_norm,none": 0.48,
7
+ "acc_norm_stderr,none": 0.016662587561635363,
8
+ "alias": "belebele_pol_Latn"
9
+ }
10
+ },
11
+ "configs": {
12
+ "belebele_pol_Latn": {
13
+ "task": "belebele_pol_Latn",
14
+ "group": "belebele",
15
+ "dataset_path": "facebook/belebele",
16
+ "test_split": "pol_Latn",
17
+ "fewshot_split": "pol_Latn",
18
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
19
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
20
+ "doc_to_choice": [
21
+ "A",
22
+ "B",
23
+ "C",
24
+ "D"
25
+ ],
26
+ "description": "",
27
+ "target_delimiter": " ",
28
+ "fewshot_delimiter": "\n\n",
29
+ "fewshot_config": {
30
+ "sampler": "first_n"
31
+ },
32
+ "num_fewshot": 5,
33
+ "metric_list": [
34
+ {
35
+ "metric": "acc",
36
+ "aggregation": "mean",
37
+ "higher_is_better": true
38
+ },
39
+ {
40
+ "metric": "acc_norm",
41
+ "aggregation": "mean",
42
+ "higher_is_better": true
43
+ }
44
+ ],
45
+ "output_type": "multiple_choice",
46
+ "repeats": 1,
47
+ "should_decontaminate": true,
48
+ "doc_to_decontamination_query": "{{question}}",
49
+ "metadata": {
50
+ "version": 0.0
51
+ }
52
+ }
53
+ },
54
+ "versions": {
55
+ "belebele_pol_Latn": 0.0
56
+ },
57
+ "n-shot": {
58
+ "belebele_pol_Latn": 5
59
+ },
60
+ "config": {
61
+ "model": "hf",
62
+ "model_args": "pretrained=Voicelab/trurl-2-7b",
63
+ "batch_size": "16",
64
+ "batch_sizes": [],
65
+ "device": "cuda:0",
66
+ "use_cache": null,
67
+ "limit": null,
68
+ "bootstrap_iters": 100000,
69
+ "gen_kwargs": null
70
+ },
71
+ "git_hash": null
72
+ }
polish_benchmarks-out/results_zephyr-7b-alpha-0_belebele_pol_Latn/results.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "belebele_pol_Latn": {
4
+ "acc,none": 0.43555555555555553,
5
+ "acc_stderr,none": 0.016536840376359642,
6
+ "acc_norm,none": 0.43555555555555553,
7
+ "acc_norm_stderr,none": 0.016536840376359642,
8
+ "alias": "belebele_pol_Latn"
9
+ }
10
+ },
11
+ "configs": {
12
+ "belebele_pol_Latn": {
13
+ "task": "belebele_pol_Latn",
14
+ "group": "belebele",
15
+ "dataset_path": "facebook/belebele",
16
+ "test_split": "pol_Latn",
17
+ "fewshot_split": "pol_Latn",
18
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
19
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
20
+ "doc_to_choice": [
21
+ "A",
22
+ "B",
23
+ "C",
24
+ "D"
25
+ ],
26
+ "description": "",
27
+ "target_delimiter": " ",
28
+ "fewshot_delimiter": "\n\n",
29
+ "fewshot_config": {
30
+ "sampler": "first_n"
31
+ },
32
+ "num_fewshot": 0,
33
+ "metric_list": [
34
+ {
35
+ "metric": "acc",
36
+ "aggregation": "mean",
37
+ "higher_is_better": true
38
+ },
39
+ {
40
+ "metric": "acc_norm",
41
+ "aggregation": "mean",
42
+ "higher_is_better": true
43
+ }
44
+ ],
45
+ "output_type": "multiple_choice",
46
+ "repeats": 1,
47
+ "should_decontaminate": true,
48
+ "doc_to_decontamination_query": "{{question}}",
49
+ "metadata": {
50
+ "version": 0.0
51
+ }
52
+ }
53
+ },
54
+ "versions": {
55
+ "belebele_pol_Latn": 0.0
56
+ },
57
+ "n-shot": {
58
+ "belebele_pol_Latn": 0
59
+ },
60
+ "config": {
61
+ "model": "hf",
62
+ "model_args": "pretrained=HuggingFaceH4/zephyr-7b-alpha",
63
+ "batch_size": "16",
64
+ "batch_sizes": [],
65
+ "device": "cuda:0",
66
+ "use_cache": null,
67
+ "limit": null,
68
+ "bootstrap_iters": 100000,
69
+ "gen_kwargs": null
70
+ },
71
+ "git_hash": null
72
+ }
polish_benchmarks-out/results_zephyr-7b-alpha-5_belebele_pol_Latn/results.json ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "belebele_pol_Latn": {
4
+ "acc,none": 0.6511111111111111,
5
+ "acc_stderr,none": 0.015896125275520714,
6
+ "acc_norm,none": 0.6511111111111111,
7
+ "acc_norm_stderr,none": 0.015896125275520714,
8
+ "alias": "belebele_pol_Latn"
9
+ }
10
+ },
11
+ "configs": {
12
+ "belebele_pol_Latn": {
13
+ "task": "belebele_pol_Latn",
14
+ "group": "belebele",
15
+ "dataset_path": "facebook/belebele",
16
+ "test_split": "pol_Latn",
17
+ "fewshot_split": "pol_Latn",
18
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
19
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
20
+ "doc_to_choice": [
21
+ "A",
22
+ "B",
23
+ "C",
24
+ "D"
25
+ ],
26
+ "description": "",
27
+ "target_delimiter": " ",
28
+ "fewshot_delimiter": "\n\n",
29
+ "fewshot_config": {
30
+ "sampler": "first_n"
31
+ },
32
+ "num_fewshot": 5,
33
+ "metric_list": [
34
+ {
35
+ "metric": "acc",
36
+ "aggregation": "mean",
37
+ "higher_is_better": true
38
+ },
39
+ {
40
+ "metric": "acc_norm",
41
+ "aggregation": "mean",
42
+ "higher_is_better": true
43
+ }
44
+ ],
45
+ "output_type": "multiple_choice",
46
+ "repeats": 1,
47
+ "should_decontaminate": true,
48
+ "doc_to_decontamination_query": "{{question}}",
49
+ "metadata": {
50
+ "version": 0.0
51
+ }
52
+ }
53
+ },
54
+ "versions": {
55
+ "belebele_pol_Latn": 0.0
56
+ },
57
+ "n-shot": {
58
+ "belebele_pol_Latn": 5
59
+ },
60
+ "config": {
61
+ "model": "hf",
62
+ "model_args": "pretrained=HuggingFaceH4/zephyr-7b-alpha",
63
+ "batch_size": "auto",
64
+ "batch_sizes": [
65
+ 8
66
+ ],
67
+ "device": "cuda:0",
68
+ "use_cache": null,
69
+ "limit": null,
70
+ "bootstrap_iters": 100000,
71
+ "gen_kwargs": null
72
+ },
73
+ "git_hash": null
74
+ }
polish_benchmarks-out/results_zephyr-7b-beta-0_belebele_pol_Latn/results.json ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "belebele_pol_Latn": {
4
+ "acc,none": 0.4222222222222222,
5
+ "acc_stderr,none": 0.016472939933046615,
6
+ "acc_norm,none": 0.4222222222222222,
7
+ "acc_norm_stderr,none": 0.016472939933046615,
8
+ "alias": "belebele_pol_Latn"
9
+ }
10
+ },
11
+ "configs": {
12
+ "belebele_pol_Latn": {
13
+ "task": "belebele_pol_Latn",
14
+ "group": "belebele",
15
+ "dataset_path": "facebook/belebele",
16
+ "test_split": "pol_Latn",
17
+ "fewshot_split": "pol_Latn",
18
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
19
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
20
+ "doc_to_choice": [
21
+ "A",
22
+ "B",
23
+ "C",
24
+ "D"
25
+ ],
26
+ "description": "",
27
+ "target_delimiter": " ",
28
+ "fewshot_delimiter": "\n\n",
29
+ "fewshot_config": {
30
+ "sampler": "first_n"
31
+ },
32
+ "num_fewshot": 0,
33
+ "metric_list": [
34
+ {
35
+ "metric": "acc",
36
+ "aggregation": "mean",
37
+ "higher_is_better": true
38
+ },
39
+ {
40
+ "metric": "acc_norm",
41
+ "aggregation": "mean",
42
+ "higher_is_better": true
43
+ }
44
+ ],
45
+ "output_type": "multiple_choice",
46
+ "repeats": 1,
47
+ "should_decontaminate": true,
48
+ "doc_to_decontamination_query": "{{question}}",
49
+ "metadata": {
50
+ "version": 0.0
51
+ }
52
+ }
53
+ },
54
+ "versions": {
55
+ "belebele_pol_Latn": 0.0
56
+ },
57
+ "n-shot": {
58
+ "belebele_pol_Latn": 0
59
+ },
60
+ "config": {
61
+ "model": "hf",
62
+ "model_args": "pretrained=HuggingFaceH4/zephyr-7b-beta",
63
+ "batch_size": "auto",
64
+ "batch_sizes": [
65
+ 32
66
+ ],
67
+ "device": "cuda:0",
68
+ "use_cache": null,
69
+ "limit": null,
70
+ "bootstrap_iters": 100000,
71
+ "gen_kwargs": null
72
+ },
73
+ "git_hash": null
74
+ }
polish_benchmarks-out/results_zephyr-7b-beta-5_belebele_pol_Latn/results.json ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "belebele_pol_Latn": {
4
+ "acc,none": 0.6333333333333333,
5
+ "acc_stderr,none": 0.01607207840831028,
6
+ "acc_norm,none": 0.6333333333333333,
7
+ "acc_norm_stderr,none": 0.01607207840831028,
8
+ "alias": "belebele_pol_Latn"
9
+ }
10
+ },
11
+ "configs": {
12
+ "belebele_pol_Latn": {
13
+ "task": "belebele_pol_Latn",
14
+ "group": "belebele",
15
+ "dataset_path": "facebook/belebele",
16
+ "test_split": "pol_Latn",
17
+ "fewshot_split": "pol_Latn",
18
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
19
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
20
+ "doc_to_choice": [
21
+ "A",
22
+ "B",
23
+ "C",
24
+ "D"
25
+ ],
26
+ "description": "",
27
+ "target_delimiter": " ",
28
+ "fewshot_delimiter": "\n\n",
29
+ "fewshot_config": {
30
+ "sampler": "first_n"
31
+ },
32
+ "num_fewshot": 5,
33
+ "metric_list": [
34
+ {
35
+ "metric": "acc",
36
+ "aggregation": "mean",
37
+ "higher_is_better": true
38
+ },
39
+ {
40
+ "metric": "acc_norm",
41
+ "aggregation": "mean",
42
+ "higher_is_better": true
43
+ }
44
+ ],
45
+ "output_type": "multiple_choice",
46
+ "repeats": 1,
47
+ "should_decontaminate": true,
48
+ "doc_to_decontamination_query": "{{question}}",
49
+ "metadata": {
50
+ "version": 0.0
51
+ }
52
+ }
53
+ },
54
+ "versions": {
55
+ "belebele_pol_Latn": 0.0
56
+ },
57
+ "n-shot": {
58
+ "belebele_pol_Latn": 5
59
+ },
60
+ "config": {
61
+ "model": "hf",
62
+ "model_args": "pretrained=HuggingFaceH4/zephyr-7b-beta",
63
+ "batch_size": "auto",
64
+ "batch_sizes": [
65
+ 8
66
+ ],
67
+ "device": "cuda:0",
68
+ "use_cache": null,
69
+ "limit": null,
70
+ "bootstrap_iters": 100000,
71
+ "gen_kwargs": null
72
+ },
73
+ "git_hash": null
74
+ }
polish_benchmarks-out10/results_APT3-1B-Instruct-e1-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.30533563147712284,
5
+ "acc_stderr,none": 0.005139466962313564,
6
+ "acc_norm,none": 0.23045011951041258,
7
+ "acc_norm_stderr,none": 0.011131236790323795,
8
+ "exact_match,score-first": 0.06251491695427411,
9
+ "exact_match_stderr,score-first": 0.04026723037877803,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.22666666666666666,
14
+ "acc_stderr,none": 0.013963598349030465,
15
+ "acc_norm,none": 0.22666666666666666,
16
+ "acc_norm_stderr,none": 0.013963598349030465,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.07894736842105263,
21
+ "exact_match_stderr,score-first": 0.01004253653910762,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.26869806094182824,
26
+ "acc_stderr,none": 0.016508704592015123,
27
+ "acc_norm,none": 0.22853185595567868,
28
+ "acc_norm_stderr,none": 0.01563740699730465,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.004048582995951417,
33
+ "exact_match_stderr,score-first": 0.0028598755883223618,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.34210526315789475,
38
+ "acc_stderr,none": 0.021366560862202868,
39
+ "acc_norm,none": 0.3724696356275304,
40
+ "acc_norm_stderr,none": 0.02177405671825141,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.287740164684355,
45
+ "acc_stderr,none": 0.00684745312851687,
46
+ "acc_norm,none": 0.13083257090576395,
47
+ "acc_norm_stderr,none": 0.005100576389030051,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.05695333943275389,
52
+ "exact_match_stderr,score-first": 0.003505385488315323,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.056666666666666664,
57
+ "exact_match_stderr,score-first": 0.00771110430808905,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.5306122448979592,
62
+ "acc_stderr,none": 0.015565316454386663,
63
+ "acc_norm,none": 0.5306122448979592,
64
+ "acc_norm_stderr,none": 0.015565316454386663,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.0,
69
+ "exact_match_stderr,score-first": 0.0,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.422,
74
+ "acc_stderr,none": 0.015625625112620667,
75
+ "acc_norm,none": 0.422,
76
+ "acc_norm_stderr,none": 0.015625625112620667,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.0,
81
+ "exact_match_stderr,score-first": 0.0,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.3821892393320965,
86
+ "acc_stderr,none": 0.014806728396999735,
87
+ "acc_norm,none": 0.3821892393320965,
88
+ "acc_norm_stderr,none": 0.014806728396999735,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.19573283858998144,
93
+ "exact_match_stderr,score-first": 0.01208994051053487,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.30533563147712284,
100
+ "acc_stderr,none": 0.005139466962313564,
101
+ "acc_norm,none": 0.23045011951041258,
102
+ "acc_norm_stderr,none": 0.011131236790323795,
103
+ "exact_match,score-first": 0.06251491695427411,
104
+ "exact_match_stderr,score-first": 0.04026723037877803,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=models/hf_v7_e1",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out10/results_APT3-1B-Instruct-e1-5_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.2995695333959389,
5
+ "acc_stderr,none": 0.02793102270530222,
6
+ "acc_norm,none": 0.26181843547121303,
7
+ "acc_norm_stderr,none": 0.0331192226110557,
8
+ "exact_match,score-first": 0.2855402402613798,
9
+ "exact_match_stderr,score-first": 0.17014706195076215,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.22,
14
+ "acc_stderr,none": 0.013815887744596803,
15
+ "acc_norm,none": 0.22,
16
+ "acc_norm_stderr,none": 0.013815887744596803,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.2299168975069252,
21
+ "exact_match_stderr,score-first": 0.015670635531400696,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.31301939058171746,
26
+ "acc_stderr,none": 0.017269916107497767,
27
+ "acc_norm,none": 0.2146814404432133,
28
+ "acc_norm_stderr,none": 0.015291586600683917,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.3562753036437247,
33
+ "exact_match_stderr,score-first": 0.02156847728252059,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.33805668016194335,
38
+ "acc_stderr,none": 0.021305008084780325,
39
+ "acc_norm,none": 0.3299595141700405,
40
+ "acc_norm_stderr,none": 0.02117665642255094,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.1983074107959744,
45
+ "acc_stderr,none": 0.006030912164611869,
46
+ "acc_norm,none": 0.13746569075937787,
47
+ "acc_norm_stderr,none": 0.005208287454350561,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.12900274473924978,
52
+ "exact_match_stderr,score-first": 0.0050701109751773,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.27,
57
+ "exact_match_stderr,score-first": 0.014806876915962114,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.8289601554907677,
62
+ "acc_stderr,none": 0.011744077740056324,
63
+ "acc_norm,none": 0.8289601554907677,
64
+ "acc_norm_stderr,none": 0.011744077740056324,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.8036929057337221,
69
+ "exact_match_stderr,score-first": 0.012388436737248482,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.359,
74
+ "acc_stderr,none": 0.015177264224798596,
75
+ "acc_norm,none": 0.359,
76
+ "acc_norm_stderr,none": 0.015177264224798596,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.421,
81
+ "exact_match_stderr,score-first": 0.015620595475301317,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.6771799628942486,
86
+ "acc_stderr,none": 0.014247033304657502,
87
+ "acc_norm,none": 0.6771799628942486,
88
+ "acc_norm_stderr,none": 0.014247033304657502,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.6781076066790352,
93
+ "exact_match_stderr,score-first": 0.01423628957934827,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.2995695333959389,
100
+ "acc_stderr,none": 0.02793102270530222,
101
+ "acc_norm,none": 0.26181843547121303,
102
+ "acc_norm_stderr,none": 0.0331192226110557,
103
+ "exact_match,score-first": 0.2855402402613798,
104
+ "exact_match_stderr,score-first": 0.17014706195076215,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 5,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 5,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 5,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 5,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 5,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 5,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 5,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 5,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 5,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 5,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 5,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 5,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 5,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 5,
720
+ "polemo2_in": 5,
721
+ "polemo2_in_multiple_choice": 5,
722
+ "polemo2_out": 5,
723
+ "polemo2_out_multiple_choice": 5,
724
+ "polish": 5,
725
+ "polish_8tags_multiple_choice": 5,
726
+ "polish_8tags_regex": 5,
727
+ "polish_belebele_regex": 5,
728
+ "polish_dyk_multiple_choice": 5,
729
+ "polish_dyk_regex": 5,
730
+ "polish_ppc_multiple_choice": 5,
731
+ "polish_ppc_regex": 5,
732
+ "polish_psc_multiple_choice": 5,
733
+ "polish_psc_regex": 5
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=models/hf_v7_e1",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out10/results_APT3-1B-Instruct-e2-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.3764177338567992,
5
+ "acc_stderr,none": 0.008624681811571087,
6
+ "acc_norm,none": 0.2600649135917133,
7
+ "acc_norm_stderr,none": 0.0179062234015382,
8
+ "exact_match,score-first": 0.09057769906782864,
9
+ "exact_match_stderr,score-first": 0.043599173145033025,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.22555555555555556,
14
+ "acc_stderr,none": 0.013939334910458142,
15
+ "acc_norm,none": 0.22555555555555556,
16
+ "acc_norm_stderr,none": 0.013939334910458142,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.09695290858725762,
21
+ "exact_match_stderr,score-first": 0.011019654745613438,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.30055401662049863,
26
+ "acc_stderr,none": 0.01707539370960794,
27
+ "acc_norm,none": 0.23407202216066483,
28
+ "acc_norm_stderr,none": 0.015768888706671383,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.05668016194331984,
33
+ "exact_match_stderr,score-first": 0.01041409572782948,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.37044534412955465,
38
+ "acc_stderr,none": 0.02174980315792073,
39
+ "acc_norm,none": 0.37044534412955465,
40
+ "acc_norm_stderr,none": 0.021749803157920727,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.3986733760292772,
45
+ "acc_stderr,none": 0.007405824738251191,
46
+ "acc_norm,none": 0.16102470265324795,
47
+ "acc_norm_stderr,none": 0.0055594321792952865,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.10567246111619397,
52
+ "exact_match_stderr,score-first": 0.0046498485737524525,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.07,
57
+ "exact_match_stderr,score-first": 0.008509629433967584,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.6929057337220602,
62
+ "acc_stderr,none": 0.014387203089566317,
63
+ "acc_norm,none": 0.6929057337220602,
64
+ "acc_norm_stderr,none": 0.014387203089566317,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.0029154518950437317,
69
+ "exact_match_stderr,score-first": 0.0016815987492253832,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.393,
74
+ "acc_stderr,none": 0.015452824654081496,
75
+ "acc_norm,none": 0.393,
76
+ "acc_norm_stderr,none": 0.015452824654081496,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.002,
81
+ "exact_match_stderr,score-first": 0.0014135055705578159,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.4935064935064935,
86
+ "acc_stderr,none": 0.015234405984314282,
87
+ "acc_norm,none": 0.4935064935064935,
88
+ "acc_norm_stderr,none": 0.015234405984314282,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.22448979591836735,
93
+ "exact_match_stderr,score-first": 0.012714058612202518,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.3764177338567992,
100
+ "acc_stderr,none": 0.008624681811571087,
101
+ "acc_norm,none": 0.2600649135917133,
102
+ "acc_norm_stderr,none": 0.0179062234015382,
103
+ "exact_match,score-first": 0.09057769906782864,
104
+ "exact_match_stderr,score-first": 0.043599173145033025,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=models/hf_v7_e2",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out10/results_APT3-1B-Instruct-e2-5_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.31024878977878106,
5
+ "acc_stderr,none": 0.027980216827585686,
6
+ "acc_norm,none": 0.26879152578135684,
7
+ "acc_norm_stderr,none": 0.034246705086353024,
8
+ "exact_match,score-first": 0.30242251545043747,
9
+ "exact_match_stderr,score-first": 0.15362428559916144,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.2788888888888889,
14
+ "acc_stderr,none": 0.014956736888683079,
15
+ "acc_norm,none": 0.2788888888888889,
16
+ "acc_norm_stderr,none": 0.014956736888683079,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.2880886426592798,
21
+ "exact_match_stderr,score-first": 0.016865856350741566,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.29916897506925205,
26
+ "acc_stderr,none": 0.01705286304111782,
27
+ "acc_norm,none": 0.2077562326869806,
28
+ "acc_norm_stderr,none": 0.01510910759802611,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.3340080971659919,
33
+ "exact_match_stderr,score-first": 0.02124171181612737,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.38461538461538464,
38
+ "acc_stderr,none": 0.02191104968830336,
39
+ "acc_norm,none": 0.340080971659919,
40
+ "acc_norm_stderr,none": 0.02133600145678249,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.18275388838060383,
45
+ "acc_stderr,none": 0.00584546872481264,
46
+ "acc_norm,none": 0.11665141811527904,
47
+ "acc_norm_stderr,none": 0.0048553513926815365,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.13060384263494967,
52
+ "exact_match_stderr,score-first": 0.0050967863903210765,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.2722222222222222,
57
+ "exact_match_stderr,score-first": 0.01484503879443395,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.8241010689990281,
62
+ "acc_stderr,none": 0.011874772046703725,
63
+ "acc_norm,none": 0.8241010689990281,
64
+ "acc_norm_stderr,none": 0.011874772046703725,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.7570456754130224,
69
+ "exact_match_stderr,score-first": 0.01337601666202105,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.434,
74
+ "acc_stderr,none": 0.015680876566375054,
75
+ "acc_norm,none": 0.434,
76
+ "acc_norm_stderr,none": 0.015680876566375054,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.372,
81
+ "exact_match_stderr,score-first": 0.015292149942040577,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.6836734693877551,
86
+ "acc_stderr,none": 0.01417047215182454,
87
+ "acc_norm,none": 0.6836734693877551,
88
+ "acc_norm_stderr,none": 0.01417047215182454,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.6808905380333952,
93
+ "exact_match_stderr,score-first": 0.014203672130694803,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.31024878977878106,
100
+ "acc_stderr,none": 0.027980216827585686,
101
+ "acc_norm,none": 0.26879152578135684,
102
+ "acc_norm_stderr,none": 0.034246705086353024,
103
+ "exact_match,score-first": 0.30242251545043747,
104
+ "exact_match_stderr,score-first": 0.15362428559916144,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 5,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 5,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 5,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 5,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 5,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 5,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 5,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 5,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 5,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 5,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 5,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 5,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 5,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 5,
720
+ "polemo2_in": 5,
721
+ "polemo2_in_multiple_choice": 5,
722
+ "polemo2_out": 5,
723
+ "polemo2_out_multiple_choice": 5,
724
+ "polish": 5,
725
+ "polish_8tags_multiple_choice": 5,
726
+ "polish_8tags_regex": 5,
727
+ "polish_belebele_regex": 5,
728
+ "polish_dyk_multiple_choice": 5,
729
+ "polish_dyk_regex": 5,
730
+ "polish_ppc_multiple_choice": 5,
731
+ "polish_ppc_regex": 5,
732
+ "polish_psc_multiple_choice": 5,
733
+ "polish_psc_regex": 5
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=models/hf_v7_e2",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out10/results_Mistral-7B-v0.1-5_polish_generate/results.json ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_generate": {
4
+ "exact_match,score-first": 0.6642001042209484,
5
+ "exact_match_stderr,score-first": 0.11767931063786438,
6
+ "alias": "polish_generate"
7
+ },
8
+ "polemo2_in": {
9
+ "exact_match,score-first": 0.6966759002770083,
10
+ "exact_match_stderr,score-first": 0.017119899860129042,
11
+ "alias": " - polemo2_in"
12
+ },
13
+ "polemo2_out": {
14
+ "exact_match,score-first": 0.6336032388663968,
15
+ "exact_match_stderr,score-first": 0.021700065645408235,
16
+ "alias": " - polemo2_out"
17
+ },
18
+ "polish_8tags_regex": {
19
+ "exact_match,score-first": 0.7179780420860018,
20
+ "exact_match_stderr,score-first": 0.006806230292926995,
21
+ "alias": " - polish_8tags_regex"
22
+ },
23
+ "polish_belebele_regex": {
24
+ "exact_match,score-first": 0.7211111111111111,
25
+ "exact_match_stderr,score-first": 0.014956736888683079,
26
+ "alias": " - polish_belebele_regex"
27
+ },
28
+ "polish_dyk_regex": {
29
+ "exact_match,score-first": 0.8328474246841594,
30
+ "exact_match_stderr,score-first": 0.011637044694546985,
31
+ "alias": " - polish_dyk_regex"
32
+ },
33
+ "polish_ppc_regex": {
34
+ "exact_match,score-first": 0.538,
35
+ "exact_match_stderr,score-first": 0.015773547629015106,
36
+ "alias": " - polish_ppc_regex"
37
+ },
38
+ "polish_psc_regex": {
39
+ "exact_match,score-first": 0.3469387755102041,
40
+ "exact_match_stderr,score-first": 0.014504258061361417,
41
+ "alias": " - polish_psc_regex"
42
+ }
43
+ },
44
+ "groups": {
45
+ "polish_generate": {
46
+ "exact_match,score-first": 0.6642001042209484,
47
+ "exact_match_stderr,score-first": 0.11767931063786438,
48
+ "alias": "polish_generate"
49
+ }
50
+ },
51
+ "configs": {
52
+ "polemo2_in": {
53
+ "task": "polemo2_in",
54
+ "group": [
55
+ "polemo2"
56
+ ],
57
+ "dataset_path": "allegro/klej-polemo2-in",
58
+ "training_split": "train",
59
+ "validation_split": "validation",
60
+ "test_split": "test",
61
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
62
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
63
+ "description": "",
64
+ "target_delimiter": " ",
65
+ "fewshot_delimiter": "\n\n",
66
+ "num_fewshot": 5,
67
+ "metric_list": [
68
+ {
69
+ "metric": "exact_match",
70
+ "aggregation": "mean",
71
+ "higher_is_better": true
72
+ }
73
+ ],
74
+ "output_type": "generate_until",
75
+ "generation_kwargs": {
76
+ "until": [
77
+ ".",
78
+ ","
79
+ ],
80
+ "do_sample": false,
81
+ "temperature": 0.0,
82
+ "max_gen_toks": 50
83
+ },
84
+ "repeats": 1,
85
+ "filter_list": [
86
+ {
87
+ "name": "score-first",
88
+ "filter": [
89
+ {
90
+ "function": "regex",
91
+ "regex_pattern": "(\\b[ABCD]\\b)"
92
+ },
93
+ {
94
+ "function": "take_first"
95
+ }
96
+ ]
97
+ }
98
+ ],
99
+ "should_decontaminate": true,
100
+ "doc_to_decontamination_query": "{{sentence}}",
101
+ "metadata": {
102
+ "version": 1.0
103
+ }
104
+ },
105
+ "polemo2_out": {
106
+ "task": "polemo2_out",
107
+ "group": [
108
+ "polemo2"
109
+ ],
110
+ "dataset_path": "allegro/klej-polemo2-out",
111
+ "training_split": "train",
112
+ "validation_split": "validation",
113
+ "test_split": "test",
114
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
115
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
116
+ "description": "",
117
+ "target_delimiter": " ",
118
+ "fewshot_delimiter": "\n\n",
119
+ "num_fewshot": 5,
120
+ "metric_list": [
121
+ {
122
+ "metric": "exact_match",
123
+ "aggregation": "mean",
124
+ "higher_is_better": true
125
+ }
126
+ ],
127
+ "output_type": "generate_until",
128
+ "generation_kwargs": {
129
+ "until": [
130
+ ".",
131
+ ","
132
+ ],
133
+ "do_sample": false,
134
+ "temperature": 0.0,
135
+ "max_gen_toks": 50
136
+ },
137
+ "repeats": 1,
138
+ "filter_list": [
139
+ {
140
+ "name": "score-first",
141
+ "filter": [
142
+ {
143
+ "function": "regex",
144
+ "regex_pattern": "(\\b[ABCD]\\b)"
145
+ },
146
+ {
147
+ "function": "take_first"
148
+ }
149
+ ]
150
+ }
151
+ ],
152
+ "should_decontaminate": true,
153
+ "doc_to_decontamination_query": "{{sentence}}",
154
+ "metadata": {
155
+ "version": 1.0
156
+ }
157
+ },
158
+ "polish_8tags_regex": {
159
+ "task": "polish_8tags_regex",
160
+ "dataset_path": "sdadas/8tags",
161
+ "training_split": "train",
162
+ "validation_split": "validation",
163
+ "test_split": "test",
164
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
165
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "exact_match",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ }
176
+ ],
177
+ "output_type": "generate_until",
178
+ "generation_kwargs": {
179
+ "until": [
180
+ ".",
181
+ ","
182
+ ],
183
+ "do_sample": false,
184
+ "temperature": 0.0,
185
+ "max_gen_toks": 50
186
+ },
187
+ "repeats": 1,
188
+ "filter_list": [
189
+ {
190
+ "name": "score-first",
191
+ "filter": [
192
+ {
193
+ "function": "regex",
194
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
195
+ },
196
+ {
197
+ "function": "take_first"
198
+ }
199
+ ]
200
+ }
201
+ ],
202
+ "should_decontaminate": true,
203
+ "doc_to_decontamination_query": "{{sentence}}"
204
+ },
205
+ "polish_belebele_regex": {
206
+ "task": "polish_belebele_regex",
207
+ "dataset_path": "facebook/belebele",
208
+ "test_split": "pol_Latn",
209
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
210
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
211
+ "description": "",
212
+ "target_delimiter": " ",
213
+ "fewshot_delimiter": "\n\n",
214
+ "num_fewshot": 5,
215
+ "metric_list": [
216
+ {
217
+ "metric": "exact_match",
218
+ "aggregation": "mean",
219
+ "higher_is_better": true
220
+ }
221
+ ],
222
+ "output_type": "generate_until",
223
+ "generation_kwargs": {
224
+ "until": [
225
+ ".",
226
+ ","
227
+ ],
228
+ "do_sample": false,
229
+ "temperature": 0.0,
230
+ "max_gen_toks": 50
231
+ },
232
+ "repeats": 1,
233
+ "filter_list": [
234
+ {
235
+ "name": "score-first",
236
+ "filter": [
237
+ {
238
+ "function": "regex",
239
+ "regex_pattern": "(\\b[ABCD]\\b)"
240
+ },
241
+ {
242
+ "function": "take_first"
243
+ }
244
+ ]
245
+ }
246
+ ],
247
+ "should_decontaminate": true,
248
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
249
+ },
250
+ "polish_dyk_regex": {
251
+ "task": "polish_dyk_regex",
252
+ "dataset_path": "allegro/klej-dyk",
253
+ "training_split": "train",
254
+ "test_split": "test",
255
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
256
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
257
+ "description": "",
258
+ "target_delimiter": " ",
259
+ "fewshot_delimiter": "\n\n",
260
+ "num_fewshot": 5,
261
+ "metric_list": [
262
+ {
263
+ "metric": "exact_match",
264
+ "aggregation": "mean",
265
+ "higher_is_better": true
266
+ }
267
+ ],
268
+ "output_type": "generate_until",
269
+ "generation_kwargs": {
270
+ "until": [
271
+ ".",
272
+ ","
273
+ ],
274
+ "do_sample": false,
275
+ "temperature": 0.0,
276
+ "max_gen_toks": 50
277
+ },
278
+ "repeats": 1,
279
+ "filter_list": [
280
+ {
281
+ "name": "score-first",
282
+ "filter": [
283
+ {
284
+ "function": "regex",
285
+ "regex_pattern": "(\\b[ABCD]\\b)"
286
+ },
287
+ {
288
+ "function": "take_first"
289
+ }
290
+ ]
291
+ }
292
+ ],
293
+ "should_decontaminate": true,
294
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
295
+ },
296
+ "polish_ppc_regex": {
297
+ "task": "polish_ppc_regex",
298
+ "dataset_path": "sdadas/ppc",
299
+ "training_split": "train",
300
+ "validation_split": "validation",
301
+ "test_split": "test",
302
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
303
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
304
+ "description": "",
305
+ "target_delimiter": " ",
306
+ "fewshot_delimiter": "\n\n",
307
+ "num_fewshot": 5,
308
+ "metric_list": [
309
+ {
310
+ "metric": "exact_match",
311
+ "aggregation": "mean",
312
+ "higher_is_better": true
313
+ }
314
+ ],
315
+ "output_type": "generate_until",
316
+ "generation_kwargs": {
317
+ "until": [
318
+ ".",
319
+ ","
320
+ ],
321
+ "do_sample": false,
322
+ "temperature": 0.0,
323
+ "max_gen_toks": 50
324
+ },
325
+ "repeats": 1,
326
+ "filter_list": [
327
+ {
328
+ "name": "score-first",
329
+ "filter": [
330
+ {
331
+ "function": "regex",
332
+ "regex_pattern": "(\\b[ABCD]\\b)"
333
+ },
334
+ {
335
+ "function": "take_first"
336
+ }
337
+ ]
338
+ }
339
+ ],
340
+ "should_decontaminate": true,
341
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
342
+ },
343
+ "polish_psc_regex": {
344
+ "task": "polish_psc_regex",
345
+ "dataset_path": "allegro/klej-psc",
346
+ "training_split": "train",
347
+ "test_split": "test",
348
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
349
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "exact_match",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ }
360
+ ],
361
+ "output_type": "generate_until",
362
+ "generation_kwargs": {
363
+ "until": [
364
+ ".",
365
+ ","
366
+ ],
367
+ "do_sample": false,
368
+ "temperature": 0.0,
369
+ "max_gen_toks": 50
370
+ },
371
+ "repeats": 1,
372
+ "filter_list": [
373
+ {
374
+ "name": "score-first",
375
+ "filter": [
376
+ {
377
+ "function": "regex",
378
+ "regex_pattern": "(\\b[ABCD]\\b)"
379
+ },
380
+ {
381
+ "function": "take_first"
382
+ }
383
+ ]
384
+ }
385
+ ],
386
+ "should_decontaminate": true,
387
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
388
+ }
389
+ },
390
+ "versions": {
391
+ "polemo2_in": 1.0,
392
+ "polemo2_out": 1.0,
393
+ "polish_8tags_regex": "Yaml",
394
+ "polish_belebele_regex": "Yaml",
395
+ "polish_dyk_regex": "Yaml",
396
+ "polish_generate": "N/A",
397
+ "polish_ppc_regex": "Yaml",
398
+ "polish_psc_regex": "Yaml"
399
+ },
400
+ "n-shot": {
401
+ "polemo2_in": 5,
402
+ "polemo2_out": 5,
403
+ "polish_8tags_regex": 5,
404
+ "polish_belebele_regex": 5,
405
+ "polish_dyk_regex": 5,
406
+ "polish_generate": 5,
407
+ "polish_ppc_regex": 5,
408
+ "polish_psc_regex": 5
409
+ },
410
+ "config": {
411
+ "model": "hf",
412
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1",
413
+ "batch_size": "1",
414
+ "batch_sizes": [],
415
+ "device": "cuda:0",
416
+ "use_cache": null,
417
+ "limit": null,
418
+ "bootstrap_iters": 100000,
419
+ "gen_kwargs": null
420
+ },
421
+ "git_hash": null
422
+ }
polish_benchmarks-out10/results_Mistral-7B-v0.1-5_polish_mc/results.json ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_mc": {
4
+ "acc,none": 0.718186555497655,
5
+ "acc_stderr,none": 0.04357151021998618,
6
+ "acc_norm,none": 0.7141219385096405,
7
+ "acc_norm_stderr,none": 0.04372608290602946,
8
+ "alias": "polish_mc"
9
+ },
10
+ "belebele_pol_Latn": {
11
+ "acc,none": 0.6855555555555556,
12
+ "acc_stderr,none": 0.015485079682471438,
13
+ "acc_norm,none": 0.6855555555555556,
14
+ "acc_norm_stderr,none": 0.015485079682471438,
15
+ "alias": " - belebele_pol_Latn"
16
+ },
17
+ "polemo2_in_multiple_choice": {
18
+ "acc,none": 0.7132963988919667,
19
+ "acc_stderr,none": 0.01684162361588182,
20
+ "acc_norm,none": 0.7174515235457064,
21
+ "acc_norm_stderr,none": 0.016767763260316662,
22
+ "alias": " - polemo2_in_multiple_choice"
23
+ },
24
+ "polemo2_out_multiple_choice": {
25
+ "acc,none": 0.6902834008097166,
26
+ "acc_stderr,none": 0.02082439995830139,
27
+ "acc_norm,none": 0.7145748987854251,
28
+ "acc_norm_stderr,none": 0.020339791674885366,
29
+ "alias": " - polemo2_out_multiple_choice"
30
+ },
31
+ "polish_8tags_multiple_choice": {
32
+ "acc,none": 0.72483989021043,
33
+ "acc_stderr,none": 0.0067549692558673035,
34
+ "acc_norm,none": 0.7124885635864593,
35
+ "acc_norm_stderr,none": 0.006845829961519696,
36
+ "alias": " - polish_8tags_multiple_choice"
37
+ },
38
+ "polish_dyk_multiple_choice": {
39
+ "acc,none": 0.8299319727891157,
40
+ "acc_stderr,none": 0.011717528745193611,
41
+ "acc_norm,none": 0.8299319727891157,
42
+ "acc_norm_stderr,none": 0.011717528745193611,
43
+ "alias": " - polish_dyk_multiple_choice"
44
+ },
45
+ "polish_ppc_multiple_choice": {
46
+ "acc,none": 0.624,
47
+ "acc_stderr,none": 0.015325105508898132,
48
+ "acc_norm,none": 0.624,
49
+ "acc_norm_stderr,none": 0.015325105508898132,
50
+ "alias": " - polish_ppc_multiple_choice"
51
+ },
52
+ "polish_psc_multiple_choice": {
53
+ "acc,none": 0.7152133580705009,
54
+ "acc_stderr,none": 0.013752123513176048,
55
+ "acc_norm,none": 0.7152133580705009,
56
+ "acc_norm_stderr,none": 0.013752123513176048,
57
+ "alias": " - polish_psc_multiple_choice"
58
+ }
59
+ },
60
+ "groups": {
61
+ "polish_mc": {
62
+ "acc,none": 0.718186555497655,
63
+ "acc_stderr,none": 0.04357151021998618,
64
+ "acc_norm,none": 0.7141219385096405,
65
+ "acc_norm_stderr,none": 0.04372608290602946,
66
+ "alias": "polish_mc"
67
+ }
68
+ },
69
+ "configs": {
70
+ "belebele_pol_Latn": {
71
+ "task": "belebele_pol_Latn",
72
+ "group": "belebele",
73
+ "dataset_path": "facebook/belebele",
74
+ "test_split": "pol_Latn",
75
+ "fewshot_split": "pol_Latn",
76
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
77
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
78
+ "doc_to_choice": [
79
+ "A",
80
+ "B",
81
+ "C",
82
+ "D"
83
+ ],
84
+ "description": "",
85
+ "target_delimiter": " ",
86
+ "fewshot_delimiter": "\n\n",
87
+ "fewshot_config": {
88
+ "sampler": "first_n"
89
+ },
90
+ "num_fewshot": 5,
91
+ "metric_list": [
92
+ {
93
+ "metric": "acc",
94
+ "aggregation": "mean",
95
+ "higher_is_better": true
96
+ },
97
+ {
98
+ "metric": "acc_norm",
99
+ "aggregation": "mean",
100
+ "higher_is_better": true
101
+ }
102
+ ],
103
+ "output_type": "multiple_choice",
104
+ "repeats": 1,
105
+ "should_decontaminate": true,
106
+ "doc_to_decontamination_query": "{{question}}",
107
+ "metadata": {
108
+ "version": 0.0
109
+ }
110
+ },
111
+ "polemo2_in_multiple_choice": {
112
+ "task": "polemo2_in_multiple_choice",
113
+ "group": [
114
+ "polemo2_mc"
115
+ ],
116
+ "dataset_path": "allegro/klej-polemo2-in",
117
+ "training_split": "train",
118
+ "validation_split": "validation",
119
+ "test_split": "test",
120
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
121
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
122
+ "doc_to_choice": [
123
+ "Neutralny",
124
+ "Negatywny",
125
+ "Pozytywny",
126
+ "Niejednoznaczny"
127
+ ],
128
+ "description": "",
129
+ "target_delimiter": " ",
130
+ "fewshot_delimiter": "\n\n",
131
+ "num_fewshot": 5,
132
+ "metric_list": [
133
+ {
134
+ "metric": "acc",
135
+ "aggregation": "mean",
136
+ "higher_is_better": true
137
+ },
138
+ {
139
+ "metric": "acc_norm",
140
+ "aggregation": "mean",
141
+ "higher_is_better": true
142
+ }
143
+ ],
144
+ "output_type": "multiple_choice",
145
+ "repeats": 1,
146
+ "should_decontaminate": true,
147
+ "doc_to_decontamination_query": "{{sentence}}"
148
+ },
149
+ "polemo2_out_multiple_choice": {
150
+ "task": "polemo2_out_multiple_choice",
151
+ "group": [
152
+ "polemo2_mc"
153
+ ],
154
+ "dataset_path": "allegro/klej-polemo2-out",
155
+ "training_split": "train",
156
+ "validation_split": "validation",
157
+ "test_split": "test",
158
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
159
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
160
+ "doc_to_choice": [
161
+ "Neutralny",
162
+ "Negatywny",
163
+ "Pozytywny",
164
+ "Niejednoznaczny"
165
+ ],
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "acc",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ },
176
+ {
177
+ "metric": "acc_norm",
178
+ "aggregation": "mean",
179
+ "higher_is_better": true
180
+ }
181
+ ],
182
+ "output_type": "multiple_choice",
183
+ "repeats": 1,
184
+ "should_decontaminate": true,
185
+ "doc_to_decontamination_query": "{{sentence}}"
186
+ },
187
+ "polish_8tags_multiple_choice": {
188
+ "task": "polish_8tags_multiple_choice",
189
+ "dataset_path": "djstrong/8tags",
190
+ "training_split": "train",
191
+ "test_split": "test",
192
+ "fewshot_split": "train",
193
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
194
+ "doc_to_target": "{{label|int}}",
195
+ "doc_to_choice": [
196
+ "Film",
197
+ "Historia",
198
+ "Jedzenie",
199
+ "Medycyna",
200
+ "Motoryzacja",
201
+ "Praca",
202
+ "Sport",
203
+ "Technologie"
204
+ ],
205
+ "description": "",
206
+ "target_delimiter": " ",
207
+ "fewshot_delimiter": "\n\n",
208
+ "num_fewshot": 5,
209
+ "metric_list": [
210
+ {
211
+ "metric": "acc",
212
+ "aggregation": "mean",
213
+ "higher_is_better": true
214
+ },
215
+ {
216
+ "metric": "acc_norm",
217
+ "aggregation": "mean",
218
+ "higher_is_better": true
219
+ }
220
+ ],
221
+ "output_type": "multiple_choice",
222
+ "repeats": 1,
223
+ "should_decontaminate": true,
224
+ "doc_to_decontamination_query": "{{sentence}}"
225
+ },
226
+ "polish_dyk_multiple_choice": {
227
+ "task": "polish_dyk_multiple_choice",
228
+ "dataset_path": "allegro/klej-dyk",
229
+ "training_split": "train",
230
+ "test_split": "test",
231
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
232
+ "doc_to_target": "{{target|int}}",
233
+ "doc_to_choice": [
234
+ "Nie",
235
+ "Tak"
236
+ ],
237
+ "description": "",
238
+ "target_delimiter": " ",
239
+ "fewshot_delimiter": "\n\n",
240
+ "num_fewshot": 5,
241
+ "metric_list": [
242
+ {
243
+ "metric": "acc",
244
+ "aggregation": "mean",
245
+ "higher_is_better": true
246
+ },
247
+ {
248
+ "metric": "acc_norm",
249
+ "aggregation": "mean",
250
+ "higher_is_better": true
251
+ }
252
+ ],
253
+ "output_type": "multiple_choice",
254
+ "repeats": 1,
255
+ "should_decontaminate": true,
256
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
257
+ },
258
+ "polish_ppc_multiple_choice": {
259
+ "task": "polish_ppc_multiple_choice",
260
+ "dataset_path": "djstrong/ppc",
261
+ "training_split": "train",
262
+ "validation_split": "validation",
263
+ "test_split": "test",
264
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
265
+ "doc_to_target": "{{label|int - 1}}",
266
+ "doc_to_choice": [
267
+ "A",
268
+ "B",
269
+ "C"
270
+ ],
271
+ "description": "",
272
+ "target_delimiter": " ",
273
+ "fewshot_delimiter": "\n\n",
274
+ "num_fewshot": 5,
275
+ "metric_list": [
276
+ {
277
+ "metric": "acc",
278
+ "aggregation": "mean",
279
+ "higher_is_better": true
280
+ },
281
+ {
282
+ "metric": "acc_norm",
283
+ "aggregation": "mean",
284
+ "higher_is_better": true
285
+ }
286
+ ],
287
+ "output_type": "multiple_choice",
288
+ "repeats": 1,
289
+ "should_decontaminate": true,
290
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
291
+ },
292
+ "polish_psc_multiple_choice": {
293
+ "task": "polish_psc_multiple_choice",
294
+ "dataset_path": "allegro/klej-psc",
295
+ "training_split": "train",
296
+ "test_split": "test",
297
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
298
+ "doc_to_target": "{{label|int}}",
299
+ "doc_to_choice": [
300
+ "Nie",
301
+ "Tak"
302
+ ],
303
+ "description": "",
304
+ "target_delimiter": " ",
305
+ "fewshot_delimiter": "\n\n",
306
+ "num_fewshot": 5,
307
+ "metric_list": [
308
+ {
309
+ "metric": "acc",
310
+ "aggregation": "mean",
311
+ "higher_is_better": true
312
+ },
313
+ {
314
+ "metric": "acc_norm",
315
+ "aggregation": "mean",
316
+ "higher_is_better": true
317
+ }
318
+ ],
319
+ "output_type": "multiple_choice",
320
+ "repeats": 1,
321
+ "should_decontaminate": true,
322
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
323
+ }
324
+ },
325
+ "versions": {
326
+ "belebele_pol_Latn": 0.0,
327
+ "polemo2_in_multiple_choice": "Yaml",
328
+ "polemo2_out_multiple_choice": "Yaml",
329
+ "polish_8tags_multiple_choice": "Yaml",
330
+ "polish_dyk_multiple_choice": "Yaml",
331
+ "polish_mc": "N/A",
332
+ "polish_ppc_multiple_choice": "Yaml",
333
+ "polish_psc_multiple_choice": "Yaml"
334
+ },
335
+ "n-shot": {
336
+ "belebele_pol_Latn": 5,
337
+ "polemo2_in_multiple_choice": 5,
338
+ "polemo2_out_multiple_choice": 5,
339
+ "polish_8tags_multiple_choice": 5,
340
+ "polish_dyk_multiple_choice": 5,
341
+ "polish_mc": 5,
342
+ "polish_ppc_multiple_choice": 5,
343
+ "polish_psc_multiple_choice": 5
344
+ },
345
+ "config": {
346
+ "model": "hf",
347
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1",
348
+ "batch_size": "1",
349
+ "batch_sizes": [],
350
+ "device": "cuda:0",
351
+ "use_cache": null,
352
+ "limit": null,
353
+ "bootstrap_iters": 100000,
354
+ "gen_kwargs": null
355
+ },
356
+ "git_hash": null
357
+ }
polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp200-5_polish_generate/results.json ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_generate": {
4
+ "exact_match,score-first": 0.6314747264200105,
5
+ "exact_match_stderr,score-first": 0.0707242079346716,
6
+ "alias": "polish_generate"
7
+ },
8
+ "polemo2_in": {
9
+ "exact_match,score-first": 0.7880886426592798,
10
+ "exact_match_stderr,score-first": 0.015219382031425949,
11
+ "alias": " - polemo2_in"
12
+ },
13
+ "polemo2_out": {
14
+ "exact_match,score-first": 0.6538461538461539,
15
+ "exact_match_stderr,score-first": 0.021426385174275118,
16
+ "alias": " - polemo2_out"
17
+ },
18
+ "polish_8tags_regex": {
19
+ "exact_match,score-first": 0.6790942360475755,
20
+ "exact_match_stderr,score-first": 0.007060953928205099,
21
+ "alias": " - polish_8tags_regex"
22
+ },
23
+ "polish_belebele_regex": {
24
+ "exact_match,score-first": 0.5944444444444444,
25
+ "exact_match_stderr,score-first": 0.016375741266194007,
26
+ "alias": " - polish_belebele_regex"
27
+ },
28
+ "polish_dyk_regex": {
29
+ "exact_match,score-first": 0.5179786200194364,
30
+ "exact_match_stderr,score-first": 0.015584486987127225,
31
+ "alias": " - polish_dyk_regex"
32
+ },
33
+ "polish_ppc_regex": {
34
+ "exact_match,score-first": 0.479,
35
+ "exact_match_stderr,score-first": 0.015805341148131296,
36
+ "alias": " - polish_ppc_regex"
37
+ },
38
+ "polish_psc_regex": {
39
+ "exact_match,score-first": 0.6038961038961039,
40
+ "exact_match_stderr,score-first": 0.014903141306250456,
41
+ "alias": " - polish_psc_regex"
42
+ }
43
+ },
44
+ "groups": {
45
+ "polish_generate": {
46
+ "exact_match,score-first": 0.6314747264200105,
47
+ "exact_match_stderr,score-first": 0.0707242079346716,
48
+ "alias": "polish_generate"
49
+ }
50
+ },
51
+ "configs": {
52
+ "polemo2_in": {
53
+ "task": "polemo2_in",
54
+ "group": [
55
+ "polemo2"
56
+ ],
57
+ "dataset_path": "allegro/klej-polemo2-in",
58
+ "training_split": "train",
59
+ "validation_split": "validation",
60
+ "test_split": "test",
61
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
62
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
63
+ "description": "",
64
+ "target_delimiter": " ",
65
+ "fewshot_delimiter": "\n\n",
66
+ "num_fewshot": 5,
67
+ "metric_list": [
68
+ {
69
+ "metric": "exact_match",
70
+ "aggregation": "mean",
71
+ "higher_is_better": true
72
+ }
73
+ ],
74
+ "output_type": "generate_until",
75
+ "generation_kwargs": {
76
+ "until": [
77
+ ".",
78
+ ","
79
+ ],
80
+ "do_sample": false,
81
+ "temperature": 0.0,
82
+ "max_gen_toks": 50
83
+ },
84
+ "repeats": 1,
85
+ "filter_list": [
86
+ {
87
+ "name": "score-first",
88
+ "filter": [
89
+ {
90
+ "function": "regex",
91
+ "regex_pattern": "(\\b[ABCD]\\b)"
92
+ },
93
+ {
94
+ "function": "take_first"
95
+ }
96
+ ]
97
+ }
98
+ ],
99
+ "should_decontaminate": true,
100
+ "doc_to_decontamination_query": "{{sentence}}",
101
+ "metadata": {
102
+ "version": 1.0
103
+ }
104
+ },
105
+ "polemo2_out": {
106
+ "task": "polemo2_out",
107
+ "group": [
108
+ "polemo2"
109
+ ],
110
+ "dataset_path": "allegro/klej-polemo2-out",
111
+ "training_split": "train",
112
+ "validation_split": "validation",
113
+ "test_split": "test",
114
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
115
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
116
+ "description": "",
117
+ "target_delimiter": " ",
118
+ "fewshot_delimiter": "\n\n",
119
+ "num_fewshot": 5,
120
+ "metric_list": [
121
+ {
122
+ "metric": "exact_match",
123
+ "aggregation": "mean",
124
+ "higher_is_better": true
125
+ }
126
+ ],
127
+ "output_type": "generate_until",
128
+ "generation_kwargs": {
129
+ "until": [
130
+ ".",
131
+ ","
132
+ ],
133
+ "do_sample": false,
134
+ "temperature": 0.0,
135
+ "max_gen_toks": 50
136
+ },
137
+ "repeats": 1,
138
+ "filter_list": [
139
+ {
140
+ "name": "score-first",
141
+ "filter": [
142
+ {
143
+ "function": "regex",
144
+ "regex_pattern": "(\\b[ABCD]\\b)"
145
+ },
146
+ {
147
+ "function": "take_first"
148
+ }
149
+ ]
150
+ }
151
+ ],
152
+ "should_decontaminate": true,
153
+ "doc_to_decontamination_query": "{{sentence}}",
154
+ "metadata": {
155
+ "version": 1.0
156
+ }
157
+ },
158
+ "polish_8tags_regex": {
159
+ "task": "polish_8tags_regex",
160
+ "dataset_path": "sdadas/8tags",
161
+ "training_split": "train",
162
+ "validation_split": "validation",
163
+ "test_split": "test",
164
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
165
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "exact_match",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ }
176
+ ],
177
+ "output_type": "generate_until",
178
+ "generation_kwargs": {
179
+ "until": [
180
+ ".",
181
+ ","
182
+ ],
183
+ "do_sample": false,
184
+ "temperature": 0.0,
185
+ "max_gen_toks": 50
186
+ },
187
+ "repeats": 1,
188
+ "filter_list": [
189
+ {
190
+ "name": "score-first",
191
+ "filter": [
192
+ {
193
+ "function": "regex",
194
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
195
+ },
196
+ {
197
+ "function": "take_first"
198
+ }
199
+ ]
200
+ }
201
+ ],
202
+ "should_decontaminate": true,
203
+ "doc_to_decontamination_query": "{{sentence}}"
204
+ },
205
+ "polish_belebele_regex": {
206
+ "task": "polish_belebele_regex",
207
+ "dataset_path": "facebook/belebele",
208
+ "test_split": "pol_Latn",
209
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
210
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
211
+ "description": "",
212
+ "target_delimiter": " ",
213
+ "fewshot_delimiter": "\n\n",
214
+ "num_fewshot": 5,
215
+ "metric_list": [
216
+ {
217
+ "metric": "exact_match",
218
+ "aggregation": "mean",
219
+ "higher_is_better": true
220
+ }
221
+ ],
222
+ "output_type": "generate_until",
223
+ "generation_kwargs": {
224
+ "until": [
225
+ ".",
226
+ ","
227
+ ],
228
+ "do_sample": false,
229
+ "temperature": 0.0,
230
+ "max_gen_toks": 50
231
+ },
232
+ "repeats": 1,
233
+ "filter_list": [
234
+ {
235
+ "name": "score-first",
236
+ "filter": [
237
+ {
238
+ "function": "regex",
239
+ "regex_pattern": "(\\b[ABCD]\\b)"
240
+ },
241
+ {
242
+ "function": "take_first"
243
+ }
244
+ ]
245
+ }
246
+ ],
247
+ "should_decontaminate": true,
248
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
249
+ },
250
+ "polish_dyk_regex": {
251
+ "task": "polish_dyk_regex",
252
+ "dataset_path": "allegro/klej-dyk",
253
+ "training_split": "train",
254
+ "test_split": "test",
255
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
256
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
257
+ "description": "",
258
+ "target_delimiter": " ",
259
+ "fewshot_delimiter": "\n\n",
260
+ "num_fewshot": 5,
261
+ "metric_list": [
262
+ {
263
+ "metric": "exact_match",
264
+ "aggregation": "mean",
265
+ "higher_is_better": true
266
+ }
267
+ ],
268
+ "output_type": "generate_until",
269
+ "generation_kwargs": {
270
+ "until": [
271
+ ".",
272
+ ","
273
+ ],
274
+ "do_sample": false,
275
+ "temperature": 0.0,
276
+ "max_gen_toks": 50
277
+ },
278
+ "repeats": 1,
279
+ "filter_list": [
280
+ {
281
+ "name": "score-first",
282
+ "filter": [
283
+ {
284
+ "function": "regex",
285
+ "regex_pattern": "(\\b[ABCD]\\b)"
286
+ },
287
+ {
288
+ "function": "take_first"
289
+ }
290
+ ]
291
+ }
292
+ ],
293
+ "should_decontaminate": true,
294
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
295
+ },
296
+ "polish_ppc_regex": {
297
+ "task": "polish_ppc_regex",
298
+ "dataset_path": "sdadas/ppc",
299
+ "training_split": "train",
300
+ "validation_split": "validation",
301
+ "test_split": "test",
302
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
303
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
304
+ "description": "",
305
+ "target_delimiter": " ",
306
+ "fewshot_delimiter": "\n\n",
307
+ "num_fewshot": 5,
308
+ "metric_list": [
309
+ {
310
+ "metric": "exact_match",
311
+ "aggregation": "mean",
312
+ "higher_is_better": true
313
+ }
314
+ ],
315
+ "output_type": "generate_until",
316
+ "generation_kwargs": {
317
+ "until": [
318
+ ".",
319
+ ","
320
+ ],
321
+ "do_sample": false,
322
+ "temperature": 0.0,
323
+ "max_gen_toks": 50
324
+ },
325
+ "repeats": 1,
326
+ "filter_list": [
327
+ {
328
+ "name": "score-first",
329
+ "filter": [
330
+ {
331
+ "function": "regex",
332
+ "regex_pattern": "(\\b[ABCD]\\b)"
333
+ },
334
+ {
335
+ "function": "take_first"
336
+ }
337
+ ]
338
+ }
339
+ ],
340
+ "should_decontaminate": true,
341
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
342
+ },
343
+ "polish_psc_regex": {
344
+ "task": "polish_psc_regex",
345
+ "dataset_path": "allegro/klej-psc",
346
+ "training_split": "train",
347
+ "test_split": "test",
348
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
349
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "exact_match",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ }
360
+ ],
361
+ "output_type": "generate_until",
362
+ "generation_kwargs": {
363
+ "until": [
364
+ ".",
365
+ ","
366
+ ],
367
+ "do_sample": false,
368
+ "temperature": 0.0,
369
+ "max_gen_toks": 50
370
+ },
371
+ "repeats": 1,
372
+ "filter_list": [
373
+ {
374
+ "name": "score-first",
375
+ "filter": [
376
+ {
377
+ "function": "regex",
378
+ "regex_pattern": "(\\b[ABCD]\\b)"
379
+ },
380
+ {
381
+ "function": "take_first"
382
+ }
383
+ ]
384
+ }
385
+ ],
386
+ "should_decontaminate": true,
387
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
388
+ }
389
+ },
390
+ "versions": {
391
+ "polemo2_in": 1.0,
392
+ "polemo2_out": 1.0,
393
+ "polish_8tags_regex": "Yaml",
394
+ "polish_belebele_regex": "Yaml",
395
+ "polish_dyk_regex": "Yaml",
396
+ "polish_generate": "N/A",
397
+ "polish_ppc_regex": "Yaml",
398
+ "polish_psc_regex": "Yaml"
399
+ },
400
+ "n-shot": {
401
+ "polemo2_in": 5,
402
+ "polemo2_out": 5,
403
+ "polish_8tags_regex": 5,
404
+ "polish_belebele_regex": 5,
405
+ "polish_dyk_regex": 5,
406
+ "polish_generate": 5,
407
+ "polish_ppc_regex": 5,
408
+ "polish_psc_regex": 5
409
+ },
410
+ "config": {
411
+ "model": "hf",
412
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-200/adapter_model",
413
+ "batch_size": "1",
414
+ "batch_sizes": [],
415
+ "device": "cuda:0",
416
+ "use_cache": null,
417
+ "limit": null,
418
+ "bootstrap_iters": 100000,
419
+ "gen_kwargs": null
420
+ },
421
+ "git_hash": null
422
+ }
polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp200-5_polish_mc/results.json ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_mc": {
4
+ "acc,none": 0.6312662845231891,
5
+ "acc_stderr,none": 0.10517768892126916,
6
+ "acc_norm,none": 0.6244919228764981,
7
+ "acc_norm_stderr,none": 0.10116067524418394,
8
+ "alias": "polish_mc"
9
+ },
10
+ "belebele_pol_Latn": {
11
+ "acc,none": 0.5466666666666666,
12
+ "acc_stderr,none": 0.016603141822512064,
13
+ "acc_norm,none": 0.5466666666666666,
14
+ "acc_norm_stderr,none": 0.016603141822512064,
15
+ "alias": " - belebele_pol_Latn"
16
+ },
17
+ "polemo2_in_multiple_choice": {
18
+ "acc,none": 0.7894736842105263,
19
+ "acc_stderr,none": 0.015182888122718783,
20
+ "acc_norm,none": 0.7617728531855956,
21
+ "acc_norm_stderr,none": 0.015865024177194348,
22
+ "alias": " - polemo2_in_multiple_choice"
23
+ },
24
+ "polemo2_out_multiple_choice": {
25
+ "acc,none": 0.6842105263157895,
26
+ "acc_stderr,none": 0.02093486866820736,
27
+ "acc_norm,none": 0.7327935222672065,
28
+ "acc_norm_stderr,none": 0.019929244821849688,
29
+ "alias": " - polemo2_out_multiple_choice"
30
+ },
31
+ "polish_8tags_multiple_choice": {
32
+ "acc,none": 0.7335315645013724,
33
+ "acc_stderr,none": 0.006687162643383958,
34
+ "acc_norm,none": 0.7177493138151876,
35
+ "acc_norm_stderr,none": 0.006807905096202593,
36
+ "alias": " - polish_8tags_multiple_choice"
37
+ },
38
+ "polish_dyk_multiple_choice": {
39
+ "acc,none": 0.4402332361516035,
40
+ "acc_stderr,none": 0.015482761027859615,
41
+ "acc_norm,none": 0.4402332361516035,
42
+ "acc_norm_stderr,none": 0.015482761027859615,
43
+ "alias": " - polish_dyk_multiple_choice"
44
+ },
45
+ "polish_ppc_multiple_choice": {
46
+ "acc,none": 0.461,
47
+ "acc_stderr,none": 0.015771104201283186,
48
+ "acc_norm,none": 0.461,
49
+ "acc_norm_stderr,none": 0.015771104201283186,
50
+ "alias": " - polish_ppc_multiple_choice"
51
+ },
52
+ "polish_psc_multiple_choice": {
53
+ "acc,none": 0.4972170686456401,
54
+ "acc_stderr,none": 0.015235454889651216,
55
+ "acc_norm,none": 0.4972170686456401,
56
+ "acc_norm_stderr,none": 0.015235454889651216,
57
+ "alias": " - polish_psc_multiple_choice"
58
+ }
59
+ },
60
+ "groups": {
61
+ "polish_mc": {
62
+ "acc,none": 0.6312662845231891,
63
+ "acc_stderr,none": 0.10517768892126916,
64
+ "acc_norm,none": 0.6244919228764981,
65
+ "acc_norm_stderr,none": 0.10116067524418394,
66
+ "alias": "polish_mc"
67
+ }
68
+ },
69
+ "configs": {
70
+ "belebele_pol_Latn": {
71
+ "task": "belebele_pol_Latn",
72
+ "group": "belebele",
73
+ "dataset_path": "facebook/belebele",
74
+ "test_split": "pol_Latn",
75
+ "fewshot_split": "pol_Latn",
76
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
77
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
78
+ "doc_to_choice": [
79
+ "A",
80
+ "B",
81
+ "C",
82
+ "D"
83
+ ],
84
+ "description": "",
85
+ "target_delimiter": " ",
86
+ "fewshot_delimiter": "\n\n",
87
+ "fewshot_config": {
88
+ "sampler": "first_n"
89
+ },
90
+ "num_fewshot": 5,
91
+ "metric_list": [
92
+ {
93
+ "metric": "acc",
94
+ "aggregation": "mean",
95
+ "higher_is_better": true
96
+ },
97
+ {
98
+ "metric": "acc_norm",
99
+ "aggregation": "mean",
100
+ "higher_is_better": true
101
+ }
102
+ ],
103
+ "output_type": "multiple_choice",
104
+ "repeats": 1,
105
+ "should_decontaminate": true,
106
+ "doc_to_decontamination_query": "{{question}}",
107
+ "metadata": {
108
+ "version": 0.0
109
+ }
110
+ },
111
+ "polemo2_in_multiple_choice": {
112
+ "task": "polemo2_in_multiple_choice",
113
+ "group": [
114
+ "polemo2_mc"
115
+ ],
116
+ "dataset_path": "allegro/klej-polemo2-in",
117
+ "training_split": "train",
118
+ "validation_split": "validation",
119
+ "test_split": "test",
120
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
121
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
122
+ "doc_to_choice": [
123
+ "Neutralny",
124
+ "Negatywny",
125
+ "Pozytywny",
126
+ "Niejednoznaczny"
127
+ ],
128
+ "description": "",
129
+ "target_delimiter": " ",
130
+ "fewshot_delimiter": "\n\n",
131
+ "num_fewshot": 5,
132
+ "metric_list": [
133
+ {
134
+ "metric": "acc",
135
+ "aggregation": "mean",
136
+ "higher_is_better": true
137
+ },
138
+ {
139
+ "metric": "acc_norm",
140
+ "aggregation": "mean",
141
+ "higher_is_better": true
142
+ }
143
+ ],
144
+ "output_type": "multiple_choice",
145
+ "repeats": 1,
146
+ "should_decontaminate": true,
147
+ "doc_to_decontamination_query": "{{sentence}}"
148
+ },
149
+ "polemo2_out_multiple_choice": {
150
+ "task": "polemo2_out_multiple_choice",
151
+ "group": [
152
+ "polemo2_mc"
153
+ ],
154
+ "dataset_path": "allegro/klej-polemo2-out",
155
+ "training_split": "train",
156
+ "validation_split": "validation",
157
+ "test_split": "test",
158
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
159
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
160
+ "doc_to_choice": [
161
+ "Neutralny",
162
+ "Negatywny",
163
+ "Pozytywny",
164
+ "Niejednoznaczny"
165
+ ],
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "acc",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ },
176
+ {
177
+ "metric": "acc_norm",
178
+ "aggregation": "mean",
179
+ "higher_is_better": true
180
+ }
181
+ ],
182
+ "output_type": "multiple_choice",
183
+ "repeats": 1,
184
+ "should_decontaminate": true,
185
+ "doc_to_decontamination_query": "{{sentence}}"
186
+ },
187
+ "polish_8tags_multiple_choice": {
188
+ "task": "polish_8tags_multiple_choice",
189
+ "dataset_path": "djstrong/8tags",
190
+ "training_split": "train",
191
+ "test_split": "test",
192
+ "fewshot_split": "train",
193
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
194
+ "doc_to_target": "{{label|int}}",
195
+ "doc_to_choice": [
196
+ "Film",
197
+ "Historia",
198
+ "Jedzenie",
199
+ "Medycyna",
200
+ "Motoryzacja",
201
+ "Praca",
202
+ "Sport",
203
+ "Technologie"
204
+ ],
205
+ "description": "",
206
+ "target_delimiter": " ",
207
+ "fewshot_delimiter": "\n\n",
208
+ "num_fewshot": 5,
209
+ "metric_list": [
210
+ {
211
+ "metric": "acc",
212
+ "aggregation": "mean",
213
+ "higher_is_better": true
214
+ },
215
+ {
216
+ "metric": "acc_norm",
217
+ "aggregation": "mean",
218
+ "higher_is_better": true
219
+ }
220
+ ],
221
+ "output_type": "multiple_choice",
222
+ "repeats": 1,
223
+ "should_decontaminate": true,
224
+ "doc_to_decontamination_query": "{{sentence}}"
225
+ },
226
+ "polish_dyk_multiple_choice": {
227
+ "task": "polish_dyk_multiple_choice",
228
+ "dataset_path": "allegro/klej-dyk",
229
+ "training_split": "train",
230
+ "test_split": "test",
231
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
232
+ "doc_to_target": "{{target|int}}",
233
+ "doc_to_choice": [
234
+ "Nie",
235
+ "Tak"
236
+ ],
237
+ "description": "",
238
+ "target_delimiter": " ",
239
+ "fewshot_delimiter": "\n\n",
240
+ "num_fewshot": 5,
241
+ "metric_list": [
242
+ {
243
+ "metric": "acc",
244
+ "aggregation": "mean",
245
+ "higher_is_better": true
246
+ },
247
+ {
248
+ "metric": "acc_norm",
249
+ "aggregation": "mean",
250
+ "higher_is_better": true
251
+ }
252
+ ],
253
+ "output_type": "multiple_choice",
254
+ "repeats": 1,
255
+ "should_decontaminate": true,
256
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
257
+ },
258
+ "polish_ppc_multiple_choice": {
259
+ "task": "polish_ppc_multiple_choice",
260
+ "dataset_path": "djstrong/ppc",
261
+ "training_split": "train",
262
+ "validation_split": "validation",
263
+ "test_split": "test",
264
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
265
+ "doc_to_target": "{{label|int - 1}}",
266
+ "doc_to_choice": [
267
+ "A",
268
+ "B",
269
+ "C"
270
+ ],
271
+ "description": "",
272
+ "target_delimiter": " ",
273
+ "fewshot_delimiter": "\n\n",
274
+ "num_fewshot": 5,
275
+ "metric_list": [
276
+ {
277
+ "metric": "acc",
278
+ "aggregation": "mean",
279
+ "higher_is_better": true
280
+ },
281
+ {
282
+ "metric": "acc_norm",
283
+ "aggregation": "mean",
284
+ "higher_is_better": true
285
+ }
286
+ ],
287
+ "output_type": "multiple_choice",
288
+ "repeats": 1,
289
+ "should_decontaminate": true,
290
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
291
+ },
292
+ "polish_psc_multiple_choice": {
293
+ "task": "polish_psc_multiple_choice",
294
+ "dataset_path": "allegro/klej-psc",
295
+ "training_split": "train",
296
+ "test_split": "test",
297
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
298
+ "doc_to_target": "{{label|int}}",
299
+ "doc_to_choice": [
300
+ "Nie",
301
+ "Tak"
302
+ ],
303
+ "description": "",
304
+ "target_delimiter": " ",
305
+ "fewshot_delimiter": "\n\n",
306
+ "num_fewshot": 5,
307
+ "metric_list": [
308
+ {
309
+ "metric": "acc",
310
+ "aggregation": "mean",
311
+ "higher_is_better": true
312
+ },
313
+ {
314
+ "metric": "acc_norm",
315
+ "aggregation": "mean",
316
+ "higher_is_better": true
317
+ }
318
+ ],
319
+ "output_type": "multiple_choice",
320
+ "repeats": 1,
321
+ "should_decontaminate": true,
322
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
323
+ }
324
+ },
325
+ "versions": {
326
+ "belebele_pol_Latn": 0.0,
327
+ "polemo2_in_multiple_choice": "Yaml",
328
+ "polemo2_out_multiple_choice": "Yaml",
329
+ "polish_8tags_multiple_choice": "Yaml",
330
+ "polish_dyk_multiple_choice": "Yaml",
331
+ "polish_mc": "N/A",
332
+ "polish_ppc_multiple_choice": "Yaml",
333
+ "polish_psc_multiple_choice": "Yaml"
334
+ },
335
+ "n-shot": {
336
+ "belebele_pol_Latn": 5,
337
+ "polemo2_in_multiple_choice": 5,
338
+ "polemo2_out_multiple_choice": 5,
339
+ "polish_8tags_multiple_choice": 5,
340
+ "polish_dyk_multiple_choice": 5,
341
+ "polish_mc": 5,
342
+ "polish_ppc_multiple_choice": 5,
343
+ "polish_psc_multiple_choice": 5
344
+ },
345
+ "config": {
346
+ "model": "hf",
347
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-200/adapter_model",
348
+ "batch_size": "1",
349
+ "batch_sizes": [],
350
+ "device": "cuda:0",
351
+ "use_cache": null,
352
+ "limit": null,
353
+ "bootstrap_iters": 100000,
354
+ "gen_kwargs": null
355
+ },
356
+ "git_hash": null
357
+ }
polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp400-5_polish_generate/results.json ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_generate": {
4
+ "exact_match,score-first": 0.6364773319437207,
5
+ "exact_match_stderr,score-first": 0.06182259997601216,
6
+ "alias": "polish_generate"
7
+ },
8
+ "polemo2_in": {
9
+ "exact_match,score-first": 0.7631578947368421,
10
+ "exact_match_stderr,score-first": 0.01583321192224942,
11
+ "alias": " - polemo2_in"
12
+ },
13
+ "polemo2_out": {
14
+ "exact_match,score-first": 0.6437246963562753,
15
+ "exact_match_stderr,score-first": 0.02156847728252059,
16
+ "alias": " - polemo2_out"
17
+ },
18
+ "polish_8tags_regex": {
19
+ "exact_match,score-first": 0.6612534309240622,
20
+ "exact_match_stderr,score-first": 0.007158648172303392,
21
+ "alias": " - polish_8tags_regex"
22
+ },
23
+ "polish_belebele_regex": {
24
+ "exact_match,score-first": 0.6055555555555555,
25
+ "exact_match_stderr,score-first": 0.016300092627924964,
26
+ "alias": " - polish_belebele_regex"
27
+ },
28
+ "polish_dyk_regex": {
29
+ "exact_match,score-first": 0.6073858114674441,
30
+ "exact_match_stderr,score-first": 0.015230661256517303,
31
+ "alias": " - polish_dyk_regex"
32
+ },
33
+ "polish_ppc_regex": {
34
+ "exact_match,score-first": 0.455,
35
+ "exact_match_stderr,score-first": 0.01575510149834709,
36
+ "alias": " - polish_ppc_regex"
37
+ },
38
+ "polish_psc_regex": {
39
+ "exact_match,score-first": 0.6697588126159555,
40
+ "exact_match_stderr,score-first": 0.014330686165019315,
41
+ "alias": " - polish_psc_regex"
42
+ }
43
+ },
44
+ "groups": {
45
+ "polish_generate": {
46
+ "exact_match,score-first": 0.6364773319437207,
47
+ "exact_match_stderr,score-first": 0.06182259997601216,
48
+ "alias": "polish_generate"
49
+ }
50
+ },
51
+ "configs": {
52
+ "polemo2_in": {
53
+ "task": "polemo2_in",
54
+ "group": [
55
+ "polemo2"
56
+ ],
57
+ "dataset_path": "allegro/klej-polemo2-in",
58
+ "training_split": "train",
59
+ "validation_split": "validation",
60
+ "test_split": "test",
61
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
62
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
63
+ "description": "",
64
+ "target_delimiter": " ",
65
+ "fewshot_delimiter": "\n\n",
66
+ "num_fewshot": 5,
67
+ "metric_list": [
68
+ {
69
+ "metric": "exact_match",
70
+ "aggregation": "mean",
71
+ "higher_is_better": true
72
+ }
73
+ ],
74
+ "output_type": "generate_until",
75
+ "generation_kwargs": {
76
+ "until": [
77
+ ".",
78
+ ","
79
+ ],
80
+ "do_sample": false,
81
+ "temperature": 0.0,
82
+ "max_gen_toks": 50
83
+ },
84
+ "repeats": 1,
85
+ "filter_list": [
86
+ {
87
+ "name": "score-first",
88
+ "filter": [
89
+ {
90
+ "function": "regex",
91
+ "regex_pattern": "(\\b[ABCD]\\b)"
92
+ },
93
+ {
94
+ "function": "take_first"
95
+ }
96
+ ]
97
+ }
98
+ ],
99
+ "should_decontaminate": true,
100
+ "doc_to_decontamination_query": "{{sentence}}",
101
+ "metadata": {
102
+ "version": 1.0
103
+ }
104
+ },
105
+ "polemo2_out": {
106
+ "task": "polemo2_out",
107
+ "group": [
108
+ "polemo2"
109
+ ],
110
+ "dataset_path": "allegro/klej-polemo2-out",
111
+ "training_split": "train",
112
+ "validation_split": "validation",
113
+ "test_split": "test",
114
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
115
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
116
+ "description": "",
117
+ "target_delimiter": " ",
118
+ "fewshot_delimiter": "\n\n",
119
+ "num_fewshot": 5,
120
+ "metric_list": [
121
+ {
122
+ "metric": "exact_match",
123
+ "aggregation": "mean",
124
+ "higher_is_better": true
125
+ }
126
+ ],
127
+ "output_type": "generate_until",
128
+ "generation_kwargs": {
129
+ "until": [
130
+ ".",
131
+ ","
132
+ ],
133
+ "do_sample": false,
134
+ "temperature": 0.0,
135
+ "max_gen_toks": 50
136
+ },
137
+ "repeats": 1,
138
+ "filter_list": [
139
+ {
140
+ "name": "score-first",
141
+ "filter": [
142
+ {
143
+ "function": "regex",
144
+ "regex_pattern": "(\\b[ABCD]\\b)"
145
+ },
146
+ {
147
+ "function": "take_first"
148
+ }
149
+ ]
150
+ }
151
+ ],
152
+ "should_decontaminate": true,
153
+ "doc_to_decontamination_query": "{{sentence}}",
154
+ "metadata": {
155
+ "version": 1.0
156
+ }
157
+ },
158
+ "polish_8tags_regex": {
159
+ "task": "polish_8tags_regex",
160
+ "dataset_path": "sdadas/8tags",
161
+ "training_split": "train",
162
+ "validation_split": "validation",
163
+ "test_split": "test",
164
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
165
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "exact_match",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ }
176
+ ],
177
+ "output_type": "generate_until",
178
+ "generation_kwargs": {
179
+ "until": [
180
+ ".",
181
+ ","
182
+ ],
183
+ "do_sample": false,
184
+ "temperature": 0.0,
185
+ "max_gen_toks": 50
186
+ },
187
+ "repeats": 1,
188
+ "filter_list": [
189
+ {
190
+ "name": "score-first",
191
+ "filter": [
192
+ {
193
+ "function": "regex",
194
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
195
+ },
196
+ {
197
+ "function": "take_first"
198
+ }
199
+ ]
200
+ }
201
+ ],
202
+ "should_decontaminate": true,
203
+ "doc_to_decontamination_query": "{{sentence}}"
204
+ },
205
+ "polish_belebele_regex": {
206
+ "task": "polish_belebele_regex",
207
+ "dataset_path": "facebook/belebele",
208
+ "test_split": "pol_Latn",
209
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
210
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
211
+ "description": "",
212
+ "target_delimiter": " ",
213
+ "fewshot_delimiter": "\n\n",
214
+ "num_fewshot": 5,
215
+ "metric_list": [
216
+ {
217
+ "metric": "exact_match",
218
+ "aggregation": "mean",
219
+ "higher_is_better": true
220
+ }
221
+ ],
222
+ "output_type": "generate_until",
223
+ "generation_kwargs": {
224
+ "until": [
225
+ ".",
226
+ ","
227
+ ],
228
+ "do_sample": false,
229
+ "temperature": 0.0,
230
+ "max_gen_toks": 50
231
+ },
232
+ "repeats": 1,
233
+ "filter_list": [
234
+ {
235
+ "name": "score-first",
236
+ "filter": [
237
+ {
238
+ "function": "regex",
239
+ "regex_pattern": "(\\b[ABCD]\\b)"
240
+ },
241
+ {
242
+ "function": "take_first"
243
+ }
244
+ ]
245
+ }
246
+ ],
247
+ "should_decontaminate": true,
248
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
249
+ },
250
+ "polish_dyk_regex": {
251
+ "task": "polish_dyk_regex",
252
+ "dataset_path": "allegro/klej-dyk",
253
+ "training_split": "train",
254
+ "test_split": "test",
255
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
256
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
257
+ "description": "",
258
+ "target_delimiter": " ",
259
+ "fewshot_delimiter": "\n\n",
260
+ "num_fewshot": 5,
261
+ "metric_list": [
262
+ {
263
+ "metric": "exact_match",
264
+ "aggregation": "mean",
265
+ "higher_is_better": true
266
+ }
267
+ ],
268
+ "output_type": "generate_until",
269
+ "generation_kwargs": {
270
+ "until": [
271
+ ".",
272
+ ","
273
+ ],
274
+ "do_sample": false,
275
+ "temperature": 0.0,
276
+ "max_gen_toks": 50
277
+ },
278
+ "repeats": 1,
279
+ "filter_list": [
280
+ {
281
+ "name": "score-first",
282
+ "filter": [
283
+ {
284
+ "function": "regex",
285
+ "regex_pattern": "(\\b[ABCD]\\b)"
286
+ },
287
+ {
288
+ "function": "take_first"
289
+ }
290
+ ]
291
+ }
292
+ ],
293
+ "should_decontaminate": true,
294
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
295
+ },
296
+ "polish_ppc_regex": {
297
+ "task": "polish_ppc_regex",
298
+ "dataset_path": "sdadas/ppc",
299
+ "training_split": "train",
300
+ "validation_split": "validation",
301
+ "test_split": "test",
302
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
303
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
304
+ "description": "",
305
+ "target_delimiter": " ",
306
+ "fewshot_delimiter": "\n\n",
307
+ "num_fewshot": 5,
308
+ "metric_list": [
309
+ {
310
+ "metric": "exact_match",
311
+ "aggregation": "mean",
312
+ "higher_is_better": true
313
+ }
314
+ ],
315
+ "output_type": "generate_until",
316
+ "generation_kwargs": {
317
+ "until": [
318
+ ".",
319
+ ","
320
+ ],
321
+ "do_sample": false,
322
+ "temperature": 0.0,
323
+ "max_gen_toks": 50
324
+ },
325
+ "repeats": 1,
326
+ "filter_list": [
327
+ {
328
+ "name": "score-first",
329
+ "filter": [
330
+ {
331
+ "function": "regex",
332
+ "regex_pattern": "(\\b[ABCD]\\b)"
333
+ },
334
+ {
335
+ "function": "take_first"
336
+ }
337
+ ]
338
+ }
339
+ ],
340
+ "should_decontaminate": true,
341
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
342
+ },
343
+ "polish_psc_regex": {
344
+ "task": "polish_psc_regex",
345
+ "dataset_path": "allegro/klej-psc",
346
+ "training_split": "train",
347
+ "test_split": "test",
348
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
349
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "exact_match",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ }
360
+ ],
361
+ "output_type": "generate_until",
362
+ "generation_kwargs": {
363
+ "until": [
364
+ ".",
365
+ ","
366
+ ],
367
+ "do_sample": false,
368
+ "temperature": 0.0,
369
+ "max_gen_toks": 50
370
+ },
371
+ "repeats": 1,
372
+ "filter_list": [
373
+ {
374
+ "name": "score-first",
375
+ "filter": [
376
+ {
377
+ "function": "regex",
378
+ "regex_pattern": "(\\b[ABCD]\\b)"
379
+ },
380
+ {
381
+ "function": "take_first"
382
+ }
383
+ ]
384
+ }
385
+ ],
386
+ "should_decontaminate": true,
387
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
388
+ }
389
+ },
390
+ "versions": {
391
+ "polemo2_in": 1.0,
392
+ "polemo2_out": 1.0,
393
+ "polish_8tags_regex": "Yaml",
394
+ "polish_belebele_regex": "Yaml",
395
+ "polish_dyk_regex": "Yaml",
396
+ "polish_generate": "N/A",
397
+ "polish_ppc_regex": "Yaml",
398
+ "polish_psc_regex": "Yaml"
399
+ },
400
+ "n-shot": {
401
+ "polemo2_in": 5,
402
+ "polemo2_out": 5,
403
+ "polish_8tags_regex": 5,
404
+ "polish_belebele_regex": 5,
405
+ "polish_dyk_regex": 5,
406
+ "polish_generate": 5,
407
+ "polish_ppc_regex": 5,
408
+ "polish_psc_regex": 5
409
+ },
410
+ "config": {
411
+ "model": "hf",
412
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-400/adapter_model",
413
+ "batch_size": "1",
414
+ "batch_sizes": [],
415
+ "device": "cuda:0",
416
+ "use_cache": null,
417
+ "limit": null,
418
+ "bootstrap_iters": 100000,
419
+ "gen_kwargs": null
420
+ },
421
+ "git_hash": null
422
+ }
polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp400-5_polish_mc/results.json ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_mc": {
4
+ "acc,none": 0.6686816050026055,
5
+ "acc_stderr,none": 0.0642513149668469,
6
+ "acc_norm,none": 0.6594059405940594,
7
+ "acc_norm_stderr,none": 0.060466983153738094,
8
+ "alias": "polish_mc"
9
+ },
10
+ "belebele_pol_Latn": {
11
+ "acc,none": 0.5255555555555556,
12
+ "acc_stderr,none": 0.016654137753608793,
13
+ "acc_norm,none": 0.5255555555555556,
14
+ "acc_norm_stderr,none": 0.016654137753608793,
15
+ "alias": " - belebele_pol_Latn"
16
+ },
17
+ "polemo2_in_multiple_choice": {
18
+ "acc,none": 0.8213296398891967,
19
+ "acc_stderr,none": 0.014266497739018441,
20
+ "acc_norm,none": 0.7908587257617729,
21
+ "acc_norm_stderr,none": 0.015146130619525858,
22
+ "alias": " - polemo2_in_multiple_choice"
23
+ },
24
+ "polemo2_out_multiple_choice": {
25
+ "acc,none": 0.6558704453441295,
26
+ "acc_stderr,none": 0.02139668816045485,
27
+ "acc_norm,none": 0.6821862348178138,
28
+ "acc_norm_stderr,none": 0.020970769565086225,
29
+ "alias": " - polemo2_out_multiple_choice"
30
+ },
31
+ "polish_8tags_multiple_choice": {
32
+ "acc,none": 0.7047118023787741,
33
+ "acc_stderr,none": 0.006899830368155602,
34
+ "acc_norm,none": 0.6864135407136323,
35
+ "acc_norm_stderr,none": 0.007017479758060964,
36
+ "alias": " - polish_8tags_multiple_choice"
37
+ },
38
+ "polish_dyk_multiple_choice": {
39
+ "acc,none": 0.7482993197278912,
40
+ "acc_stderr,none": 0.013535780793703692,
41
+ "acc_norm,none": 0.7482993197278912,
42
+ "acc_norm_stderr,none": 0.013535780793703692,
43
+ "alias": " - polish_dyk_multiple_choice"
44
+ },
45
+ "polish_ppc_multiple_choice": {
46
+ "acc,none": 0.524,
47
+ "acc_stderr,none": 0.015801065586651755,
48
+ "acc_norm,none": 0.524,
49
+ "acc_norm_stderr,none": 0.015801065586651755,
50
+ "alias": " - polish_ppc_multiple_choice"
51
+ },
52
+ "polish_psc_multiple_choice": {
53
+ "acc,none": 0.6038961038961039,
54
+ "acc_stderr,none": 0.01490314130625046,
55
+ "acc_norm,none": 0.6038961038961039,
56
+ "acc_norm_stderr,none": 0.01490314130625046,
57
+ "alias": " - polish_psc_multiple_choice"
58
+ }
59
+ },
60
+ "groups": {
61
+ "polish_mc": {
62
+ "acc,none": 0.6686816050026055,
63
+ "acc_stderr,none": 0.0642513149668469,
64
+ "acc_norm,none": 0.6594059405940594,
65
+ "acc_norm_stderr,none": 0.060466983153738094,
66
+ "alias": "polish_mc"
67
+ }
68
+ },
69
+ "configs": {
70
+ "belebele_pol_Latn": {
71
+ "task": "belebele_pol_Latn",
72
+ "group": "belebele",
73
+ "dataset_path": "facebook/belebele",
74
+ "test_split": "pol_Latn",
75
+ "fewshot_split": "pol_Latn",
76
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
77
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
78
+ "doc_to_choice": [
79
+ "A",
80
+ "B",
81
+ "C",
82
+ "D"
83
+ ],
84
+ "description": "",
85
+ "target_delimiter": " ",
86
+ "fewshot_delimiter": "\n\n",
87
+ "fewshot_config": {
88
+ "sampler": "first_n"
89
+ },
90
+ "num_fewshot": 5,
91
+ "metric_list": [
92
+ {
93
+ "metric": "acc",
94
+ "aggregation": "mean",
95
+ "higher_is_better": true
96
+ },
97
+ {
98
+ "metric": "acc_norm",
99
+ "aggregation": "mean",
100
+ "higher_is_better": true
101
+ }
102
+ ],
103
+ "output_type": "multiple_choice",
104
+ "repeats": 1,
105
+ "should_decontaminate": true,
106
+ "doc_to_decontamination_query": "{{question}}",
107
+ "metadata": {
108
+ "version": 0.0
109
+ }
110
+ },
111
+ "polemo2_in_multiple_choice": {
112
+ "task": "polemo2_in_multiple_choice",
113
+ "group": [
114
+ "polemo2_mc"
115
+ ],
116
+ "dataset_path": "allegro/klej-polemo2-in",
117
+ "training_split": "train",
118
+ "validation_split": "validation",
119
+ "test_split": "test",
120
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
121
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
122
+ "doc_to_choice": [
123
+ "Neutralny",
124
+ "Negatywny",
125
+ "Pozytywny",
126
+ "Niejednoznaczny"
127
+ ],
128
+ "description": "",
129
+ "target_delimiter": " ",
130
+ "fewshot_delimiter": "\n\n",
131
+ "num_fewshot": 5,
132
+ "metric_list": [
133
+ {
134
+ "metric": "acc",
135
+ "aggregation": "mean",
136
+ "higher_is_better": true
137
+ },
138
+ {
139
+ "metric": "acc_norm",
140
+ "aggregation": "mean",
141
+ "higher_is_better": true
142
+ }
143
+ ],
144
+ "output_type": "multiple_choice",
145
+ "repeats": 1,
146
+ "should_decontaminate": true,
147
+ "doc_to_decontamination_query": "{{sentence}}"
148
+ },
149
+ "polemo2_out_multiple_choice": {
150
+ "task": "polemo2_out_multiple_choice",
151
+ "group": [
152
+ "polemo2_mc"
153
+ ],
154
+ "dataset_path": "allegro/klej-polemo2-out",
155
+ "training_split": "train",
156
+ "validation_split": "validation",
157
+ "test_split": "test",
158
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
159
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
160
+ "doc_to_choice": [
161
+ "Neutralny",
162
+ "Negatywny",
163
+ "Pozytywny",
164
+ "Niejednoznaczny"
165
+ ],
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "acc",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ },
176
+ {
177
+ "metric": "acc_norm",
178
+ "aggregation": "mean",
179
+ "higher_is_better": true
180
+ }
181
+ ],
182
+ "output_type": "multiple_choice",
183
+ "repeats": 1,
184
+ "should_decontaminate": true,
185
+ "doc_to_decontamination_query": "{{sentence}}"
186
+ },
187
+ "polish_8tags_multiple_choice": {
188
+ "task": "polish_8tags_multiple_choice",
189
+ "dataset_path": "djstrong/8tags",
190
+ "training_split": "train",
191
+ "test_split": "test",
192
+ "fewshot_split": "train",
193
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
194
+ "doc_to_target": "{{label|int}}",
195
+ "doc_to_choice": [
196
+ "Film",
197
+ "Historia",
198
+ "Jedzenie",
199
+ "Medycyna",
200
+ "Motoryzacja",
201
+ "Praca",
202
+ "Sport",
203
+ "Technologie"
204
+ ],
205
+ "description": "",
206
+ "target_delimiter": " ",
207
+ "fewshot_delimiter": "\n\n",
208
+ "num_fewshot": 5,
209
+ "metric_list": [
210
+ {
211
+ "metric": "acc",
212
+ "aggregation": "mean",
213
+ "higher_is_better": true
214
+ },
215
+ {
216
+ "metric": "acc_norm",
217
+ "aggregation": "mean",
218
+ "higher_is_better": true
219
+ }
220
+ ],
221
+ "output_type": "multiple_choice",
222
+ "repeats": 1,
223
+ "should_decontaminate": true,
224
+ "doc_to_decontamination_query": "{{sentence}}"
225
+ },
226
+ "polish_dyk_multiple_choice": {
227
+ "task": "polish_dyk_multiple_choice",
228
+ "dataset_path": "allegro/klej-dyk",
229
+ "training_split": "train",
230
+ "test_split": "test",
231
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
232
+ "doc_to_target": "{{target|int}}",
233
+ "doc_to_choice": [
234
+ "Nie",
235
+ "Tak"
236
+ ],
237
+ "description": "",
238
+ "target_delimiter": " ",
239
+ "fewshot_delimiter": "\n\n",
240
+ "num_fewshot": 5,
241
+ "metric_list": [
242
+ {
243
+ "metric": "acc",
244
+ "aggregation": "mean",
245
+ "higher_is_better": true
246
+ },
247
+ {
248
+ "metric": "acc_norm",
249
+ "aggregation": "mean",
250
+ "higher_is_better": true
251
+ }
252
+ ],
253
+ "output_type": "multiple_choice",
254
+ "repeats": 1,
255
+ "should_decontaminate": true,
256
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
257
+ },
258
+ "polish_ppc_multiple_choice": {
259
+ "task": "polish_ppc_multiple_choice",
260
+ "dataset_path": "djstrong/ppc",
261
+ "training_split": "train",
262
+ "validation_split": "validation",
263
+ "test_split": "test",
264
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
265
+ "doc_to_target": "{{label|int - 1}}",
266
+ "doc_to_choice": [
267
+ "A",
268
+ "B",
269
+ "C"
270
+ ],
271
+ "description": "",
272
+ "target_delimiter": " ",
273
+ "fewshot_delimiter": "\n\n",
274
+ "num_fewshot": 5,
275
+ "metric_list": [
276
+ {
277
+ "metric": "acc",
278
+ "aggregation": "mean",
279
+ "higher_is_better": true
280
+ },
281
+ {
282
+ "metric": "acc_norm",
283
+ "aggregation": "mean",
284
+ "higher_is_better": true
285
+ }
286
+ ],
287
+ "output_type": "multiple_choice",
288
+ "repeats": 1,
289
+ "should_decontaminate": true,
290
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
291
+ },
292
+ "polish_psc_multiple_choice": {
293
+ "task": "polish_psc_multiple_choice",
294
+ "dataset_path": "allegro/klej-psc",
295
+ "training_split": "train",
296
+ "test_split": "test",
297
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
298
+ "doc_to_target": "{{label|int}}",
299
+ "doc_to_choice": [
300
+ "Nie",
301
+ "Tak"
302
+ ],
303
+ "description": "",
304
+ "target_delimiter": " ",
305
+ "fewshot_delimiter": "\n\n",
306
+ "num_fewshot": 5,
307
+ "metric_list": [
308
+ {
309
+ "metric": "acc",
310
+ "aggregation": "mean",
311
+ "higher_is_better": true
312
+ },
313
+ {
314
+ "metric": "acc_norm",
315
+ "aggregation": "mean",
316
+ "higher_is_better": true
317
+ }
318
+ ],
319
+ "output_type": "multiple_choice",
320
+ "repeats": 1,
321
+ "should_decontaminate": true,
322
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
323
+ }
324
+ },
325
+ "versions": {
326
+ "belebele_pol_Latn": 0.0,
327
+ "polemo2_in_multiple_choice": "Yaml",
328
+ "polemo2_out_multiple_choice": "Yaml",
329
+ "polish_8tags_multiple_choice": "Yaml",
330
+ "polish_dyk_multiple_choice": "Yaml",
331
+ "polish_mc": "N/A",
332
+ "polish_ppc_multiple_choice": "Yaml",
333
+ "polish_psc_multiple_choice": "Yaml"
334
+ },
335
+ "n-shot": {
336
+ "belebele_pol_Latn": 5,
337
+ "polemo2_in_multiple_choice": 5,
338
+ "polemo2_out_multiple_choice": 5,
339
+ "polish_8tags_multiple_choice": 5,
340
+ "polish_dyk_multiple_choice": 5,
341
+ "polish_mc": 5,
342
+ "polish_ppc_multiple_choice": 5,
343
+ "polish_psc_multiple_choice": 5
344
+ },
345
+ "config": {
346
+ "model": "hf",
347
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-400/adapter_model",
348
+ "batch_size": "1",
349
+ "batch_sizes": [],
350
+ "device": "cuda:0",
351
+ "use_cache": null,
352
+ "limit": null,
353
+ "bootstrap_iters": 100000,
354
+ "gen_kwargs": null
355
+ },
356
+ "git_hash": null
357
+ }
polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp600-5_polish_generate/results.json ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_generate": {
4
+ "exact_match,score-first": 0.5759249609171443,
5
+ "exact_match_stderr,score-first": 0.10324718697560364,
6
+ "alias": "polish_generate"
7
+ },
8
+ "polemo2_in": {
9
+ "exact_match,score-first": 0.7617728531855956,
10
+ "exact_match_stderr,score-first": 0.01586502417719433,
11
+ "alias": " - polemo2_in"
12
+ },
13
+ "polemo2_out": {
14
+ "exact_match,score-first": 0.6619433198380567,
15
+ "exact_match_stderr,score-first": 0.02130500808478033,
16
+ "alias": " - polemo2_out"
17
+ },
18
+ "polish_8tags_regex": {
19
+ "exact_match,score-first": 0.6338060384263495,
20
+ "exact_match_stderr,score-first": 0.007286909441339264,
21
+ "alias": " - polish_8tags_regex"
22
+ },
23
+ "polish_belebele_regex": {
24
+ "exact_match,score-first": 0.5633333333333334,
25
+ "exact_match_stderr,score-first": 0.016541614656655053,
26
+ "alias": " - polish_belebele_regex"
27
+ },
28
+ "polish_dyk_regex": {
29
+ "exact_match,score-first": 0.28765792031098153,
30
+ "exact_match_stderr,score-first": 0.014118413935558783,
31
+ "alias": " - polish_dyk_regex"
32
+ },
33
+ "polish_ppc_regex": {
34
+ "exact_match,score-first": 0.458,
35
+ "exact_match_stderr,score-first": 0.015763390640483706,
36
+ "alias": " - polish_ppc_regex"
37
+ },
38
+ "polish_psc_regex": {
39
+ "exact_match,score-first": 0.5723562152133581,
40
+ "exact_match_stderr,score-first": 0.015075316273392174,
41
+ "alias": " - polish_psc_regex"
42
+ }
43
+ },
44
+ "groups": {
45
+ "polish_generate": {
46
+ "exact_match,score-first": 0.5759249609171443,
47
+ "exact_match_stderr,score-first": 0.10324718697560364,
48
+ "alias": "polish_generate"
49
+ }
50
+ },
51
+ "configs": {
52
+ "polemo2_in": {
53
+ "task": "polemo2_in",
54
+ "group": [
55
+ "polemo2"
56
+ ],
57
+ "dataset_path": "allegro/klej-polemo2-in",
58
+ "training_split": "train",
59
+ "validation_split": "validation",
60
+ "test_split": "test",
61
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
62
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
63
+ "description": "",
64
+ "target_delimiter": " ",
65
+ "fewshot_delimiter": "\n\n",
66
+ "num_fewshot": 5,
67
+ "metric_list": [
68
+ {
69
+ "metric": "exact_match",
70
+ "aggregation": "mean",
71
+ "higher_is_better": true
72
+ }
73
+ ],
74
+ "output_type": "generate_until",
75
+ "generation_kwargs": {
76
+ "until": [
77
+ ".",
78
+ ","
79
+ ],
80
+ "do_sample": false,
81
+ "temperature": 0.0,
82
+ "max_gen_toks": 50
83
+ },
84
+ "repeats": 1,
85
+ "filter_list": [
86
+ {
87
+ "name": "score-first",
88
+ "filter": [
89
+ {
90
+ "function": "regex",
91
+ "regex_pattern": "(\\b[ABCD]\\b)"
92
+ },
93
+ {
94
+ "function": "take_first"
95
+ }
96
+ ]
97
+ }
98
+ ],
99
+ "should_decontaminate": true,
100
+ "doc_to_decontamination_query": "{{sentence}}",
101
+ "metadata": {
102
+ "version": 1.0
103
+ }
104
+ },
105
+ "polemo2_out": {
106
+ "task": "polemo2_out",
107
+ "group": [
108
+ "polemo2"
109
+ ],
110
+ "dataset_path": "allegro/klej-polemo2-out",
111
+ "training_split": "train",
112
+ "validation_split": "validation",
113
+ "test_split": "test",
114
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
115
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
116
+ "description": "",
117
+ "target_delimiter": " ",
118
+ "fewshot_delimiter": "\n\n",
119
+ "num_fewshot": 5,
120
+ "metric_list": [
121
+ {
122
+ "metric": "exact_match",
123
+ "aggregation": "mean",
124
+ "higher_is_better": true
125
+ }
126
+ ],
127
+ "output_type": "generate_until",
128
+ "generation_kwargs": {
129
+ "until": [
130
+ ".",
131
+ ","
132
+ ],
133
+ "do_sample": false,
134
+ "temperature": 0.0,
135
+ "max_gen_toks": 50
136
+ },
137
+ "repeats": 1,
138
+ "filter_list": [
139
+ {
140
+ "name": "score-first",
141
+ "filter": [
142
+ {
143
+ "function": "regex",
144
+ "regex_pattern": "(\\b[ABCD]\\b)"
145
+ },
146
+ {
147
+ "function": "take_first"
148
+ }
149
+ ]
150
+ }
151
+ ],
152
+ "should_decontaminate": true,
153
+ "doc_to_decontamination_query": "{{sentence}}",
154
+ "metadata": {
155
+ "version": 1.0
156
+ }
157
+ },
158
+ "polish_8tags_regex": {
159
+ "task": "polish_8tags_regex",
160
+ "dataset_path": "sdadas/8tags",
161
+ "training_split": "train",
162
+ "validation_split": "validation",
163
+ "test_split": "test",
164
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
165
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "exact_match",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ }
176
+ ],
177
+ "output_type": "generate_until",
178
+ "generation_kwargs": {
179
+ "until": [
180
+ ".",
181
+ ","
182
+ ],
183
+ "do_sample": false,
184
+ "temperature": 0.0,
185
+ "max_gen_toks": 50
186
+ },
187
+ "repeats": 1,
188
+ "filter_list": [
189
+ {
190
+ "name": "score-first",
191
+ "filter": [
192
+ {
193
+ "function": "regex",
194
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
195
+ },
196
+ {
197
+ "function": "take_first"
198
+ }
199
+ ]
200
+ }
201
+ ],
202
+ "should_decontaminate": true,
203
+ "doc_to_decontamination_query": "{{sentence}}"
204
+ },
205
+ "polish_belebele_regex": {
206
+ "task": "polish_belebele_regex",
207
+ "dataset_path": "facebook/belebele",
208
+ "test_split": "pol_Latn",
209
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
210
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
211
+ "description": "",
212
+ "target_delimiter": " ",
213
+ "fewshot_delimiter": "\n\n",
214
+ "num_fewshot": 5,
215
+ "metric_list": [
216
+ {
217
+ "metric": "exact_match",
218
+ "aggregation": "mean",
219
+ "higher_is_better": true
220
+ }
221
+ ],
222
+ "output_type": "generate_until",
223
+ "generation_kwargs": {
224
+ "until": [
225
+ ".",
226
+ ","
227
+ ],
228
+ "do_sample": false,
229
+ "temperature": 0.0,
230
+ "max_gen_toks": 50
231
+ },
232
+ "repeats": 1,
233
+ "filter_list": [
234
+ {
235
+ "name": "score-first",
236
+ "filter": [
237
+ {
238
+ "function": "regex",
239
+ "regex_pattern": "(\\b[ABCD]\\b)"
240
+ },
241
+ {
242
+ "function": "take_first"
243
+ }
244
+ ]
245
+ }
246
+ ],
247
+ "should_decontaminate": true,
248
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
249
+ },
250
+ "polish_dyk_regex": {
251
+ "task": "polish_dyk_regex",
252
+ "dataset_path": "allegro/klej-dyk",
253
+ "training_split": "train",
254
+ "test_split": "test",
255
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
256
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
257
+ "description": "",
258
+ "target_delimiter": " ",
259
+ "fewshot_delimiter": "\n\n",
260
+ "num_fewshot": 5,
261
+ "metric_list": [
262
+ {
263
+ "metric": "exact_match",
264
+ "aggregation": "mean",
265
+ "higher_is_better": true
266
+ }
267
+ ],
268
+ "output_type": "generate_until",
269
+ "generation_kwargs": {
270
+ "until": [
271
+ ".",
272
+ ","
273
+ ],
274
+ "do_sample": false,
275
+ "temperature": 0.0,
276
+ "max_gen_toks": 50
277
+ },
278
+ "repeats": 1,
279
+ "filter_list": [
280
+ {
281
+ "name": "score-first",
282
+ "filter": [
283
+ {
284
+ "function": "regex",
285
+ "regex_pattern": "(\\b[ABCD]\\b)"
286
+ },
287
+ {
288
+ "function": "take_first"
289
+ }
290
+ ]
291
+ }
292
+ ],
293
+ "should_decontaminate": true,
294
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
295
+ },
296
+ "polish_ppc_regex": {
297
+ "task": "polish_ppc_regex",
298
+ "dataset_path": "sdadas/ppc",
299
+ "training_split": "train",
300
+ "validation_split": "validation",
301
+ "test_split": "test",
302
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
303
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
304
+ "description": "",
305
+ "target_delimiter": " ",
306
+ "fewshot_delimiter": "\n\n",
307
+ "num_fewshot": 5,
308
+ "metric_list": [
309
+ {
310
+ "metric": "exact_match",
311
+ "aggregation": "mean",
312
+ "higher_is_better": true
313
+ }
314
+ ],
315
+ "output_type": "generate_until",
316
+ "generation_kwargs": {
317
+ "until": [
318
+ ".",
319
+ ","
320
+ ],
321
+ "do_sample": false,
322
+ "temperature": 0.0,
323
+ "max_gen_toks": 50
324
+ },
325
+ "repeats": 1,
326
+ "filter_list": [
327
+ {
328
+ "name": "score-first",
329
+ "filter": [
330
+ {
331
+ "function": "regex",
332
+ "regex_pattern": "(\\b[ABCD]\\b)"
333
+ },
334
+ {
335
+ "function": "take_first"
336
+ }
337
+ ]
338
+ }
339
+ ],
340
+ "should_decontaminate": true,
341
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
342
+ },
343
+ "polish_psc_regex": {
344
+ "task": "polish_psc_regex",
345
+ "dataset_path": "allegro/klej-psc",
346
+ "training_split": "train",
347
+ "test_split": "test",
348
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
349
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "exact_match",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ }
360
+ ],
361
+ "output_type": "generate_until",
362
+ "generation_kwargs": {
363
+ "until": [
364
+ ".",
365
+ ","
366
+ ],
367
+ "do_sample": false,
368
+ "temperature": 0.0,
369
+ "max_gen_toks": 50
370
+ },
371
+ "repeats": 1,
372
+ "filter_list": [
373
+ {
374
+ "name": "score-first",
375
+ "filter": [
376
+ {
377
+ "function": "regex",
378
+ "regex_pattern": "(\\b[ABCD]\\b)"
379
+ },
380
+ {
381
+ "function": "take_first"
382
+ }
383
+ ]
384
+ }
385
+ ],
386
+ "should_decontaminate": true,
387
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
388
+ }
389
+ },
390
+ "versions": {
391
+ "polemo2_in": 1.0,
392
+ "polemo2_out": 1.0,
393
+ "polish_8tags_regex": "Yaml",
394
+ "polish_belebele_regex": "Yaml",
395
+ "polish_dyk_regex": "Yaml",
396
+ "polish_generate": "N/A",
397
+ "polish_ppc_regex": "Yaml",
398
+ "polish_psc_regex": "Yaml"
399
+ },
400
+ "n-shot": {
401
+ "polemo2_in": 5,
402
+ "polemo2_out": 5,
403
+ "polish_8tags_regex": 5,
404
+ "polish_belebele_regex": 5,
405
+ "polish_dyk_regex": 5,
406
+ "polish_generate": 5,
407
+ "polish_ppc_regex": 5,
408
+ "polish_psc_regex": 5
409
+ },
410
+ "config": {
411
+ "model": "hf",
412
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-600/adapter_model",
413
+ "batch_size": "1",
414
+ "batch_sizes": [],
415
+ "device": "cuda:0",
416
+ "use_cache": null,
417
+ "limit": null,
418
+ "bootstrap_iters": 100000,
419
+ "gen_kwargs": null
420
+ },
421
+ "git_hash": null
422
+ }
polish_benchmarks-out10/results_Mistral-7B-v0.1_lora_chp600-5_polish_mc/results.json ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_mc": {
4
+ "acc,none": 0.6551328817092236,
5
+ "acc_stderr,none": 0.06798975937561243,
6
+ "acc_norm,none": 0.6501302761855133,
7
+ "acc_norm_stderr,none": 0.06581469456865614,
8
+ "alias": "polish_mc"
9
+ },
10
+ "belebele_pol_Latn": {
11
+ "acc,none": 0.53,
12
+ "acc_stderr,none": 0.01664588990480914,
13
+ "acc_norm,none": 0.53,
14
+ "acc_norm_stderr,none": 0.01664588990480914,
15
+ "alias": " - belebele_pol_Latn"
16
+ },
17
+ "polemo2_in_multiple_choice": {
18
+ "acc,none": 0.778393351800554,
19
+ "acc_stderr,none": 0.01546761377296944,
20
+ "acc_norm,none": 0.7645429362880887,
21
+ "acc_norm_stderr,none": 0.015801167237739118,
22
+ "alias": " - polemo2_in_multiple_choice"
23
+ },
24
+ "polemo2_out_multiple_choice": {
25
+ "acc,none": 0.6659919028340081,
26
+ "acc_stderr,none": 0.021241711816127367,
27
+ "acc_norm,none": 0.6862348178137652,
28
+ "acc_norm_stderr,none": 0.02089850837742652,
29
+ "alias": " - polemo2_out_multiple_choice"
30
+ },
31
+ "polish_8tags_multiple_choice": {
32
+ "acc,none": 0.7053979871912168,
33
+ "acc_stderr,none": 0.0068951633560859794,
34
+ "acc_norm,none": 0.6944190301921317,
35
+ "acc_norm_stderr,none": 0.0069676055457865435,
36
+ "alias": " - polish_8tags_multiple_choice"
37
+ },
38
+ "polish_dyk_multiple_choice": {
39
+ "acc,none": 0.7006802721088435,
40
+ "acc_stderr,none": 0.014283382238037845,
41
+ "acc_norm,none": 0.7006802721088435,
42
+ "acc_norm_stderr,none": 0.014283382238037845,
43
+ "alias": " - polish_dyk_multiple_choice"
44
+ },
45
+ "polish_ppc_multiple_choice": {
46
+ "acc,none": 0.479,
47
+ "acc_stderr,none": 0.015805341148131296,
48
+ "acc_norm,none": 0.479,
49
+ "acc_norm_stderr,none": 0.015805341148131296,
50
+ "alias": " - polish_ppc_multiple_choice"
51
+ },
52
+ "polish_psc_multiple_choice": {
53
+ "acc,none": 0.588126159554731,
54
+ "acc_stderr,none": 0.014997176459791971,
55
+ "acc_norm,none": 0.588126159554731,
56
+ "acc_norm_stderr,none": 0.014997176459791971,
57
+ "alias": " - polish_psc_multiple_choice"
58
+ }
59
+ },
60
+ "groups": {
61
+ "polish_mc": {
62
+ "acc,none": 0.6551328817092236,
63
+ "acc_stderr,none": 0.06798975937561243,
64
+ "acc_norm,none": 0.6501302761855133,
65
+ "acc_norm_stderr,none": 0.06581469456865614,
66
+ "alias": "polish_mc"
67
+ }
68
+ },
69
+ "configs": {
70
+ "belebele_pol_Latn": {
71
+ "task": "belebele_pol_Latn",
72
+ "group": "belebele",
73
+ "dataset_path": "facebook/belebele",
74
+ "test_split": "pol_Latn",
75
+ "fewshot_split": "pol_Latn",
76
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
77
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
78
+ "doc_to_choice": [
79
+ "A",
80
+ "B",
81
+ "C",
82
+ "D"
83
+ ],
84
+ "description": "",
85
+ "target_delimiter": " ",
86
+ "fewshot_delimiter": "\n\n",
87
+ "fewshot_config": {
88
+ "sampler": "first_n"
89
+ },
90
+ "num_fewshot": 5,
91
+ "metric_list": [
92
+ {
93
+ "metric": "acc",
94
+ "aggregation": "mean",
95
+ "higher_is_better": true
96
+ },
97
+ {
98
+ "metric": "acc_norm",
99
+ "aggregation": "mean",
100
+ "higher_is_better": true
101
+ }
102
+ ],
103
+ "output_type": "multiple_choice",
104
+ "repeats": 1,
105
+ "should_decontaminate": true,
106
+ "doc_to_decontamination_query": "{{question}}",
107
+ "metadata": {
108
+ "version": 0.0
109
+ }
110
+ },
111
+ "polemo2_in_multiple_choice": {
112
+ "task": "polemo2_in_multiple_choice",
113
+ "group": [
114
+ "polemo2_mc"
115
+ ],
116
+ "dataset_path": "allegro/klej-polemo2-in",
117
+ "training_split": "train",
118
+ "validation_split": "validation",
119
+ "test_split": "test",
120
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
121
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
122
+ "doc_to_choice": [
123
+ "Neutralny",
124
+ "Negatywny",
125
+ "Pozytywny",
126
+ "Niejednoznaczny"
127
+ ],
128
+ "description": "",
129
+ "target_delimiter": " ",
130
+ "fewshot_delimiter": "\n\n",
131
+ "num_fewshot": 5,
132
+ "metric_list": [
133
+ {
134
+ "metric": "acc",
135
+ "aggregation": "mean",
136
+ "higher_is_better": true
137
+ },
138
+ {
139
+ "metric": "acc_norm",
140
+ "aggregation": "mean",
141
+ "higher_is_better": true
142
+ }
143
+ ],
144
+ "output_type": "multiple_choice",
145
+ "repeats": 1,
146
+ "should_decontaminate": true,
147
+ "doc_to_decontamination_query": "{{sentence}}"
148
+ },
149
+ "polemo2_out_multiple_choice": {
150
+ "task": "polemo2_out_multiple_choice",
151
+ "group": [
152
+ "polemo2_mc"
153
+ ],
154
+ "dataset_path": "allegro/klej-polemo2-out",
155
+ "training_split": "train",
156
+ "validation_split": "validation",
157
+ "test_split": "test",
158
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
159
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
160
+ "doc_to_choice": [
161
+ "Neutralny",
162
+ "Negatywny",
163
+ "Pozytywny",
164
+ "Niejednoznaczny"
165
+ ],
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "acc",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ },
176
+ {
177
+ "metric": "acc_norm",
178
+ "aggregation": "mean",
179
+ "higher_is_better": true
180
+ }
181
+ ],
182
+ "output_type": "multiple_choice",
183
+ "repeats": 1,
184
+ "should_decontaminate": true,
185
+ "doc_to_decontamination_query": "{{sentence}}"
186
+ },
187
+ "polish_8tags_multiple_choice": {
188
+ "task": "polish_8tags_multiple_choice",
189
+ "dataset_path": "djstrong/8tags",
190
+ "training_split": "train",
191
+ "test_split": "test",
192
+ "fewshot_split": "train",
193
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
194
+ "doc_to_target": "{{label|int}}",
195
+ "doc_to_choice": [
196
+ "Film",
197
+ "Historia",
198
+ "Jedzenie",
199
+ "Medycyna",
200
+ "Motoryzacja",
201
+ "Praca",
202
+ "Sport",
203
+ "Technologie"
204
+ ],
205
+ "description": "",
206
+ "target_delimiter": " ",
207
+ "fewshot_delimiter": "\n\n",
208
+ "num_fewshot": 5,
209
+ "metric_list": [
210
+ {
211
+ "metric": "acc",
212
+ "aggregation": "mean",
213
+ "higher_is_better": true
214
+ },
215
+ {
216
+ "metric": "acc_norm",
217
+ "aggregation": "mean",
218
+ "higher_is_better": true
219
+ }
220
+ ],
221
+ "output_type": "multiple_choice",
222
+ "repeats": 1,
223
+ "should_decontaminate": true,
224
+ "doc_to_decontamination_query": "{{sentence}}"
225
+ },
226
+ "polish_dyk_multiple_choice": {
227
+ "task": "polish_dyk_multiple_choice",
228
+ "dataset_path": "allegro/klej-dyk",
229
+ "training_split": "train",
230
+ "test_split": "test",
231
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
232
+ "doc_to_target": "{{target|int}}",
233
+ "doc_to_choice": [
234
+ "Nie",
235
+ "Tak"
236
+ ],
237
+ "description": "",
238
+ "target_delimiter": " ",
239
+ "fewshot_delimiter": "\n\n",
240
+ "num_fewshot": 5,
241
+ "metric_list": [
242
+ {
243
+ "metric": "acc",
244
+ "aggregation": "mean",
245
+ "higher_is_better": true
246
+ },
247
+ {
248
+ "metric": "acc_norm",
249
+ "aggregation": "mean",
250
+ "higher_is_better": true
251
+ }
252
+ ],
253
+ "output_type": "multiple_choice",
254
+ "repeats": 1,
255
+ "should_decontaminate": true,
256
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
257
+ },
258
+ "polish_ppc_multiple_choice": {
259
+ "task": "polish_ppc_multiple_choice",
260
+ "dataset_path": "djstrong/ppc",
261
+ "training_split": "train",
262
+ "validation_split": "validation",
263
+ "test_split": "test",
264
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
265
+ "doc_to_target": "{{label|int - 1}}",
266
+ "doc_to_choice": [
267
+ "A",
268
+ "B",
269
+ "C"
270
+ ],
271
+ "description": "",
272
+ "target_delimiter": " ",
273
+ "fewshot_delimiter": "\n\n",
274
+ "num_fewshot": 5,
275
+ "metric_list": [
276
+ {
277
+ "metric": "acc",
278
+ "aggregation": "mean",
279
+ "higher_is_better": true
280
+ },
281
+ {
282
+ "metric": "acc_norm",
283
+ "aggregation": "mean",
284
+ "higher_is_better": true
285
+ }
286
+ ],
287
+ "output_type": "multiple_choice",
288
+ "repeats": 1,
289
+ "should_decontaminate": true,
290
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
291
+ },
292
+ "polish_psc_multiple_choice": {
293
+ "task": "polish_psc_multiple_choice",
294
+ "dataset_path": "allegro/klej-psc",
295
+ "training_split": "train",
296
+ "test_split": "test",
297
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
298
+ "doc_to_target": "{{label|int}}",
299
+ "doc_to_choice": [
300
+ "Nie",
301
+ "Tak"
302
+ ],
303
+ "description": "",
304
+ "target_delimiter": " ",
305
+ "fewshot_delimiter": "\n\n",
306
+ "num_fewshot": 5,
307
+ "metric_list": [
308
+ {
309
+ "metric": "acc",
310
+ "aggregation": "mean",
311
+ "higher_is_better": true
312
+ },
313
+ {
314
+ "metric": "acc_norm",
315
+ "aggregation": "mean",
316
+ "higher_is_better": true
317
+ }
318
+ ],
319
+ "output_type": "multiple_choice",
320
+ "repeats": 1,
321
+ "should_decontaminate": true,
322
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
323
+ }
324
+ },
325
+ "versions": {
326
+ "belebele_pol_Latn": 0.0,
327
+ "polemo2_in_multiple_choice": "Yaml",
328
+ "polemo2_out_multiple_choice": "Yaml",
329
+ "polish_8tags_multiple_choice": "Yaml",
330
+ "polish_dyk_multiple_choice": "Yaml",
331
+ "polish_mc": "N/A",
332
+ "polish_ppc_multiple_choice": "Yaml",
333
+ "polish_psc_multiple_choice": "Yaml"
334
+ },
335
+ "n-shot": {
336
+ "belebele_pol_Latn": 5,
337
+ "polemo2_in_multiple_choice": 5,
338
+ "polemo2_out_multiple_choice": 5,
339
+ "polish_8tags_multiple_choice": 5,
340
+ "polish_dyk_multiple_choice": 5,
341
+ "polish_mc": 5,
342
+ "polish_ppc_multiple_choice": 5,
343
+ "polish_psc_multiple_choice": 5
344
+ },
345
+ "config": {
346
+ "model": "hf",
347
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-600/adapter_model",
348
+ "batch_size": "1",
349
+ "batch_sizes": [],
350
+ "device": "cuda:0",
351
+ "use_cache": null,
352
+ "limit": null,
353
+ "bootstrap_iters": 100000,
354
+ "gen_kwargs": null
355
+ },
356
+ "git_hash": null
357
+ }
polish_benchmarks-out11/results_Mistral-7B-v0.1_lora_chp2000-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.5413241163615681,
5
+ "acc_stderr,none": 0.01084163176793659,
6
+ "acc_norm,none": 0.530481610196644,
7
+ "acc_norm_stderr,none": 0.008601325987021305,
8
+ "exact_match,score-first": 0.35592888097277775,
9
+ "exact_match_stderr,score-first": 0.08551675449050326,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.3022222222222222,
14
+ "acc_stderr,none": 0.01531587871597198,
15
+ "acc_norm,none": 0.3022222222222222,
16
+ "acc_norm_stderr,none": 0.01531587871597198,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.3268698060941828,
21
+ "exact_match_stderr,score-first": 0.017469050736806633,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.649584487534626,
26
+ "acc_stderr,none": 0.017768135276145475,
27
+ "acc_norm,none": 0.4903047091412742,
28
+ "acc_norm_stderr,none": 0.018617472045761484,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.4716599190283401,
33
+ "exact_match_stderr,score-first": 0.022482665840761362,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.3441295546558704,
38
+ "acc_stderr,none": 0.02139668816045485,
39
+ "acc_norm,none": 0.48785425101214575,
40
+ "acc_norm_stderr,none": 0.022512222543699473,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.6788655077767612,
45
+ "acc_stderr,none": 0.007062280219959377,
46
+ "acc_norm,none": 0.6710887465690759,
47
+ "acc_norm_stderr,none": 0.0071062247886994456,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.3369167429094236,
52
+ "exact_match_stderr,score-first": 0.007149158481943187,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.3622222222222222,
57
+ "exact_match_stderr,score-first": 0.01603032734626063,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.3556851311953353,
62
+ "acc_stderr,none": 0.014930878960690992,
63
+ "acc_norm,none": 0.3556851311953353,
64
+ "acc_norm_stderr,none": 0.014930878960690992,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.18658892128279883,
69
+ "exact_match_stderr,score-first": 0.012150703160439388,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.396,
74
+ "acc_stderr,none": 0.01547331326585941,
75
+ "acc_norm,none": 0.396,
76
+ "acc_norm_stderr,none": 0.01547331326585941,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.362,
81
+ "exact_match_stderr,score-first": 0.015204840912919501,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.6567717996289425,
86
+ "acc_stderr,none": 0.014467412965736809,
87
+ "acc_norm,none": 0.6567717996289425,
88
+ "acc_norm_stderr,none": 0.014467412965736809,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.6549165120593692,
93
+ "exact_match_stderr,score-first": 0.014485957542249024,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.5413241163615681,
100
+ "acc_stderr,none": 0.01084163176793659,
101
+ "acc_norm,none": 0.530481610196644,
102
+ "acc_norm_stderr,none": 0.008601325987021305,
103
+ "exact_match,score-first": 0.35592888097277775,
104
+ "exact_match_stderr,score-first": 0.08551675449050326,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-2000/adapter_model",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out11/results_Mistral-7B-v0.1_lora_chp2000-5_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.6244787172748504,
5
+ "acc_stderr,none": 0.007957885214651855,
6
+ "acc_norm,none": 0.6190519895959236,
7
+ "acc_norm_stderr,none": 0.007105848761469819,
8
+ "exact_match,score-first": 0.6531831123790377,
9
+ "exact_match_stderr,score-first": 0.10636173828524938,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.43,
14
+ "acc_stderr,none": 0.016511700775931788,
15
+ "acc_norm,none": 0.43,
16
+ "acc_norm_stderr,none": 0.016511700775931788,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.7950138504155124,
21
+ "exact_match_stderr,score-first": 0.01503425713972699,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.7548476454293629,
26
+ "acc_stderr,none": 0.016020647115740538,
27
+ "acc_norm,none": 0.6814404432132964,
28
+ "acc_norm_stderr,none": 0.017351684380922532,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.6295546558704453,
33
+ "exact_match_stderr,score-first": 0.02174980315792073,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.6497975708502024,
38
+ "acc_stderr,none": 0.021484495459914727,
39
+ "acc_norm,none": 0.645748987854251,
40
+ "acc_norm_stderr,none": 0.021540905590766254,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.6749771271729186,
45
+ "acc_stderr,none": 0.00708453070961662,
46
+ "acc_norm,none": 0.6784080512351327,
47
+ "acc_norm_stderr,none": 0.007064926974077679,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.5720494053064958,
52
+ "exact_match_stderr,score-first": 0.007483817410724092,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.46444444444444444,
57
+ "exact_match_stderr,score-first": 0.016633716862839924,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.7978620019436345,
62
+ "acc_stderr,none": 0.012525391634647394,
63
+ "acc_norm,none": 0.7978620019436345,
64
+ "acc_norm_stderr,none": 0.012525391634647394,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.5393586005830904,
69
+ "exact_match_stderr,score-first": 0.015546181377068011,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.445,
74
+ "acc_stderr,none": 0.015723301886760938,
75
+ "acc_norm,none": 0.445,
76
+ "acc_norm_stderr,none": 0.015723301886760938,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.391,
81
+ "exact_match_stderr,score-first": 0.015438826294681782,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.6743970315398887,
86
+ "acc_stderr,none": 0.014278880242336946,
87
+ "acc_norm,none": 0.6743970315398887,
88
+ "acc_norm_stderr,none": 0.014278880242336946,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.6419294990723562,
93
+ "exact_match_stderr,score-first": 0.014608986476832899,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.6244787172748504,
100
+ "acc_stderr,none": 0.007957885214651855,
101
+ "acc_norm,none": 0.6190519895959236,
102
+ "acc_norm_stderr,none": 0.007105848761469819,
103
+ "exact_match,score-first": 0.6531831123790377,
104
+ "exact_match_stderr,score-first": 0.10636173828524938,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 5,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 5,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 5,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 5,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 5,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 5,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 5,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 5,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 5,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 5,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 5,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 5,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 5,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 5,
720
+ "polemo2_in": 5,
721
+ "polemo2_in_multiple_choice": 5,
722
+ "polemo2_out": 5,
723
+ "polemo2_out_multiple_choice": 5,
724
+ "polish": 5,
725
+ "polish_8tags_multiple_choice": 5,
726
+ "polish_8tags_regex": 5,
727
+ "polish_belebele_regex": 5,
728
+ "polish_dyk_multiple_choice": 5,
729
+ "polish_dyk_regex": 5,
730
+ "polish_ppc_multiple_choice": 5,
731
+ "polish_ppc_regex": 5,
732
+ "polish_psc_multiple_choice": 5,
733
+ "polish_psc_regex": 5
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-2000/adapter_model",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out11/results_Mistral-7B-v0.1_lora_chp2200-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.4721071717287744,
5
+ "acc_stderr,none": 0.010755498956589112,
6
+ "acc_norm,none": 0.4566671808932055,
7
+ "acc_norm_stderr,none": 0.0068765892337264035,
8
+ "exact_match,score-first": 0.5154242863020202,
9
+ "exact_match_stderr,score-first": 0.16365606748645156,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.30333333333333334,
14
+ "acc_stderr,none": 0.015331785641933972,
15
+ "acc_norm,none": 0.30333333333333334,
16
+ "acc_norm_stderr,none": 0.015331785641933972,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.6426592797783933,
21
+ "exact_match_stderr,score-first": 0.01784695026310191,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.2880886426592798,
26
+ "acc_stderr,none": 0.016865856350741566,
27
+ "acc_norm,none": 0.23822714681440443,
28
+ "acc_norm_stderr,none": 0.01586502417719433,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.6174089068825911,
33
+ "exact_match_stderr,score-first": 0.021889226400747818,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.02631578947368421,
38
+ "acc_stderr,none": 0.007209311746493937,
39
+ "acc_norm,none": 0.2854251012145749,
40
+ "acc_norm_stderr,none": 0.02033979167488537,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.620768526989936,
45
+ "acc_stderr,none": 0.007338826630914385,
46
+ "acc_norm,none": 0.5681610247026533,
47
+ "acc_norm_stderr,none": 0.00749214603961998,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.4714089661482159,
52
+ "exact_match_stderr,score-first": 0.007550373089263302,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.3622222222222222,
57
+ "exact_match_stderr,score-first": 0.016030327346260632,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.3673469387755102,
62
+ "acc_stderr,none": 0.015035728051630332,
63
+ "acc_norm,none": 0.3673469387755102,
64
+ "acc_norm_stderr,none": 0.015035728051630332,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.1749271137026239,
69
+ "exact_match_stderr,score-first": 0.011848903583973948,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.475,
74
+ "acc_stderr,none": 0.015799513429996016,
75
+ "acc_norm,none": 0.475,
76
+ "acc_norm_stderr,none": 0.015799513429996016,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.0,
81
+ "exact_match_stderr,score-first": 0.0,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.6948051948051948,
86
+ "acc_stderr,none": 0.014031763148103638,
87
+ "acc_norm,none": 0.6948051948051948,
88
+ "acc_norm_stderr,none": 0.014031763148103638,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.6929499072356216,
93
+ "exact_match_stderr,score-first": 0.014055544850266423,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.4721071717287744,
100
+ "acc_stderr,none": 0.010755498956589112,
101
+ "acc_norm,none": 0.4566671808932055,
102
+ "acc_norm_stderr,none": 0.0068765892337264035,
103
+ "exact_match,score-first": 0.5154242863020202,
104
+ "exact_match_stderr,score-first": 0.16365606748645156,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-2200/adapter_model",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out11/results_Mistral-7B-v0.1_lora_chp2200-5_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.6053589419074686,
5
+ "acc_stderr,none": 0.00858516332911257,
6
+ "acc_norm,none": 0.5988944841086185,
7
+ "acc_norm_stderr,none": 0.007903150488458166,
8
+ "exact_match,score-first": 0.639276470277073,
9
+ "exact_match_stderr,score-first": 0.12203751850722994,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.39666666666666667,
14
+ "acc_stderr,none": 0.016315923768946142,
15
+ "acc_norm,none": 0.39666666666666667,
16
+ "acc_norm_stderr,none": 0.016315923768946142,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.7839335180055401,
21
+ "exact_match_stderr,score-first": 0.015327300950997175,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.760387811634349,
26
+ "acc_stderr,none": 0.0158966053979867,
27
+ "acc_norm,none": 0.7146814404432132,
28
+ "acc_norm_stderr,none": 0.016817197752968662,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.611336032388664,
33
+ "exact_match_stderr,score-first": 0.021953495375745476,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.5668016194331984,
38
+ "acc_stderr,none": 0.022316983574495623,
39
+ "acc_norm,none": 0.5506072874493927,
40
+ "acc_norm_stderr,none": 0.02240322448797156,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.6477584629460201,
45
+ "acc_stderr,none": 0.007224976326359538,
46
+ "acc_norm,none": 0.6447849954254345,
47
+ "acc_norm_stderr,none": 0.0072387355212667984,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.5953796889295517,
52
+ "exact_match_stderr,score-first": 0.0074238714645552826,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.43555555555555553,
57
+ "exact_match_stderr,score-first": 0.016536840376359645,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.8163265306122449,
62
+ "acc_stderr,none": 0.01207698739519081,
63
+ "acc_norm,none": 0.8163265306122449,
64
+ "acc_norm_stderr,none": 0.01207698739519081,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.34110787172011664,
69
+ "exact_match_stderr,score-first": 0.014786196488153141,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.49,
74
+ "acc_stderr,none": 0.0158161357527732,
75
+ "acc_norm,none": 0.49,
76
+ "acc_norm_stderr,none": 0.0158161357527732,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.417,
81
+ "exact_match_stderr,score-first": 0.015599819048769616,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.6818181818181818,
86
+ "acc_stderr,none": 0.014192670397052843,
87
+ "acc_norm,none": 0.6818181818181818,
88
+ "acc_norm_stderr,none": 0.014192670397052843,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.6048237476808905,
93
+ "exact_match_stderr,score-first": 0.014897108638787992,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.6053589419074686,
100
+ "acc_stderr,none": 0.00858516332911257,
101
+ "acc_norm,none": 0.5988944841086185,
102
+ "acc_norm_stderr,none": 0.007903150488458166,
103
+ "exact_match,score-first": 0.639276470277073,
104
+ "exact_match_stderr,score-first": 0.12203751850722994,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 5,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 5,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 5,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 5,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 5,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 5,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 5,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 5,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 5,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 5,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 5,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 5,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 5,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 5,
720
+ "polemo2_in": 5,
721
+ "polemo2_in_multiple_choice": 5,
722
+ "polemo2_out": 5,
723
+ "polemo2_out_multiple_choice": 5,
724
+ "polish": 5,
725
+ "polish_8tags_multiple_choice": 5,
726
+ "polish_8tags_regex": 5,
727
+ "polish_belebele_regex": 5,
728
+ "polish_dyk_multiple_choice": 5,
729
+ "polish_dyk_regex": 5,
730
+ "polish_ppc_multiple_choice": 5,
731
+ "polish_ppc_regex": 5,
732
+ "polish_psc_multiple_choice": 5,
733
+ "polish_psc_regex": 5
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-2200/adapter_model",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out13/results_Mistral-7B-v0.1-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.4343824081976938,
5
+ "acc_stderr,none": 0.009962623712715304,
6
+ "acc_norm,none": 0.4525485670942092,
7
+ "acc_norm_stderr,none": 0.005643694169180114,
8
+ "exact_match,score-first": 0.3294044653144906,
9
+ "exact_match_stderr,score-first": 0.10927504635268076,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.3888888888888889,
14
+ "acc_stderr,none": 0.016258969321955842,
15
+ "acc_norm,none": 0.3888888888888889,
16
+ "acc_norm_stderr,none": 0.016258969321955842,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.2880886426592798,
21
+ "exact_match_stderr,score-first": 0.016865856350741566,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.6842105263157895,
26
+ "acc_stderr,none": 0.017311155919553595,
27
+ "acc_norm,none": 0.5415512465373962,
28
+ "acc_norm_stderr,none": 0.018556563224686202,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.3117408906882591,
33
+ "exact_match_stderr,score-first": 0.020861686290680668,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.6174089068825911,
38
+ "acc_stderr,none": 0.021889226400747818,
39
+ "acc_norm,none": 0.5020242914979757,
40
+ "acc_norm_stderr,none": 0.022518682901144425,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.3321134492223239,
45
+ "acc_stderr,none": 0.007123676308111656,
46
+ "acc_norm,none": 0.41285452881976215,
47
+ "acc_norm_stderr,none": 0.00744699360559585,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.4556267154620311,
52
+ "exact_match_stderr,score-first": 0.007532906710515956,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.5155555555555555,
57
+ "exact_match_stderr,score-first": 0.016667861366977586,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.5957240038872692,
62
+ "acc_stderr,none": 0.015306114946219423,
63
+ "acc_norm,none": 0.5957240038872692,
64
+ "acc_norm_stderr,none": 0.015306114946219423,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.021379980563654033,
69
+ "exact_match_stderr,score-first": 0.004511429173283774,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.412,
74
+ "acc_stderr,none": 0.015572363292015093,
75
+ "acc_norm,none": 0.412,
76
+ "acc_norm_stderr,none": 0.015572363292015093,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.396,
81
+ "exact_match_stderr,score-first": 0.01547331326585941,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.686456400742115,
86
+ "acc_stderr,none": 0.014136685538524662,
87
+ "acc_norm,none": 0.686456400742115,
88
+ "acc_norm_stderr,none": 0.014136685538524662,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.10575139146567718,
93
+ "exact_match_stderr,score-first": 0.009370533769636572,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.4343824081976938,
100
+ "acc_stderr,none": 0.009962623712715304,
101
+ "acc_norm,none": 0.4525485670942092,
102
+ "acc_norm_stderr,none": 0.005643694169180114,
103
+ "exact_match,score-first": 0.3294044653144906,
104
+ "exact_match_stderr,score-first": 0.10927504635268076,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,load_in_4bit=True",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out13/results_Mistral-7B-v0.1-5_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.681789464586842,
5
+ "acc_stderr,none": 0.0021807667148682464,
6
+ "acc_norm,none": 0.6829617521438363,
7
+ "acc_norm_stderr,none": 0.0022133515507959355,
8
+ "exact_match,score-first": 0.6745926465621607,
9
+ "exact_match_stderr,score-first": 0.07702933015086402,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.63,
14
+ "acc_stderr,none": 0.01610242521754215,
15
+ "acc_norm,none": 0.63,
16
+ "acc_norm_stderr,none": 0.01610242521754215,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.7063711911357341,
21
+ "exact_match_stderr,score-first": 0.016960872466519703,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.7077562326869806,
26
+ "acc_stderr,none": 0.016937404043351943,
27
+ "acc_norm,none": 0.7146814404432132,
28
+ "acc_norm_stderr,none": 0.016817197752968666,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.6255060728744939,
33
+ "exact_match_stderr,score-first": 0.021797901981018202,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.6619433198380567,
38
+ "acc_stderr,none": 0.021305008084780325,
39
+ "acc_norm,none": 0.6882591093117408,
40
+ "acc_norm_stderr,none": 0.02086168629068067,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.6880146386093321,
45
+ "acc_stderr,none": 0.007007700697209871,
46
+ "acc_norm,none": 0.6861848124428179,
47
+ "acc_norm_stderr,none": 0.007018868833479085,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.6882433668801464,
52
+ "exact_match_stderr,score-first": 0.007006295741006132,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.6855555555555556,
57
+ "exact_match_stderr,score-first": 0.015485079682471441,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.8347910592808552,
62
+ "acc_stderr,none": 0.011582681398996698,
63
+ "acc_norm,none": 0.8347910592808552,
64
+ "acc_norm_stderr,none": 0.011582681398996698,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.8182701652089407,
69
+ "exact_match_stderr,score-first": 0.012027210635031366,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.594,
74
+ "acc_stderr,none": 0.015537226438634604,
75
+ "acc_norm,none": 0.594,
76
+ "acc_norm_stderr,none": 0.015537226438634604,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.534,
81
+ "exact_match_stderr,score-first": 0.015782683329937625,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.7115027829313544,
86
+ "acc_stderr,none": 0.013805472143575878,
87
+ "acc_norm,none": 0.7115027829313544,
88
+ "acc_norm_stderr,none": 0.013805472143575878,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.42393320964749537,
93
+ "exact_match_stderr,score-first": 0.015058346547608882,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.681789464586842,
100
+ "acc_stderr,none": 0.0021807667148682464,
101
+ "acc_norm,none": 0.6829617521438363,
102
+ "acc_norm_stderr,none": 0.0022133515507959355,
103
+ "exact_match,score-first": 0.6745926465621607,
104
+ "exact_match_stderr,score-first": 0.07702933015086402,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 5,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 5,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 5,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 5,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 5,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 5,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 5,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 5,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 5,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 5,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 5,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 5,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 5,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 5,
720
+ "polemo2_in": 5,
721
+ "polemo2_in_multiple_choice": 5,
722
+ "polemo2_out": 5,
723
+ "polemo2_out_multiple_choice": 5,
724
+ "polish": 5,
725
+ "polish_8tags_multiple_choice": 5,
726
+ "polish_8tags_regex": 5,
727
+ "polish_belebele_regex": 5,
728
+ "polish_dyk_multiple_choice": 5,
729
+ "polish_dyk_regex": 5,
730
+ "polish_ppc_multiple_choice": 5,
731
+ "polish_ppc_regex": 5,
732
+ "polish_psc_multiple_choice": 5,
733
+ "polish_psc_regex": 5
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,load_in_4bit=True",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out14/results_Mistral-7B-v0.1-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.36735172525836796,
5
+ "acc_stderr,none": 0.0038509677725416733,
6
+ "acc_norm,none": 0.4663147839437283,
7
+ "acc_norm_stderr,none": 0.0038461601230822184,
8
+ "exact_match,score-first": 0.3178566795480225,
9
+ "exact_match_stderr,score-first": 0.11809034149086765,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.3933333333333333,
14
+ "acc_stderr,none": 0.016292044853162806,
15
+ "acc_norm,none": 0.3933333333333333,
16
+ "acc_norm_stderr,none": 0.016292044853162806,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.19806094182825484,
21
+ "exact_match_stderr,score-first": 0.014842343237320114,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.6426592797783933,
26
+ "acc_stderr,none": 0.017846950263101943,
27
+ "acc_norm,none": 0.5581717451523546,
28
+ "acc_norm_stderr,none": 0.018494518739140306,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.20242914979757085,
33
+ "exact_match_stderr,score-first": 0.018096629806809615,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.5101214574898786,
38
+ "acc_stderr,none": 0.022514253141506547,
39
+ "acc_norm,none": 0.631578947368421,
40
+ "acc_norm_stderr,none": 0.021725139932578092,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.27950594693504116,
45
+ "acc_stderr,none": 0.006787663803150213,
46
+ "acc_norm,none": 0.4961116193961574,
47
+ "acc_norm_stderr,none": 0.007562518813342989,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.5205855443732845,
52
+ "exact_match_stderr,score-first": 0.0075563351383856545,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.5488888888888889,
57
+ "exact_match_stderr,score-first": 0.016596027123632052,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.3304178814382896,
62
+ "acc_stderr,none": 0.014670237249523797,
63
+ "acc_norm,none": 0.3304178814382896,
64
+ "acc_norm_stderr,none": 0.014670237249523797,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.06316812439261418,
69
+ "exact_match_stderr,score-first": 0.007587230238881371,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.394,
74
+ "acc_stderr,none": 0.015459721957493379,
75
+ "acc_norm,none": 0.394,
76
+ "acc_norm_stderr,none": 0.015459721957493379,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.378,
81
+ "exact_match_stderr,score-first": 0.015341165254026642,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.4202226345083488,
86
+ "acc_stderr,none": 0.015040507729236048,
87
+ "acc_norm,none": 0.4202226345083488,
88
+ "acc_norm_stderr,none": 0.015040507729236048,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.33302411873840443,
93
+ "exact_match_stderr,score-first": 0.014361012392990771,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.36735172525836796,
100
+ "acc_stderr,none": 0.0038509677725416733,
101
+ "acc_norm,none": 0.4663147839437283,
102
+ "acc_norm_stderr,none": 0.0038461601230822184,
103
+ "exact_match,score-first": 0.3178566795480225,
104
+ "exact_match_stderr,score-first": 0.11809034149086765,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,load_in_8bit=True",
738
+ "batch_size": "32",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out15/results_Mistral-7B-Instruct-v0.1-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.49398389283315836,
5
+ "acc_stderr,none": 0.012858915217951268,
6
+ "acc_norm,none": 0.46690518138156606,
7
+ "acc_norm_stderr,none": 0.009764315476783709,
8
+ "exact_match,score-first": 0.49718012068426903,
9
+ "exact_match_stderr,score-first": 0.05661357060141459,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.3888888888888889,
14
+ "acc_stderr,none": 0.016258969321955846,
15
+ "acc_norm,none": 0.3888888888888889,
16
+ "acc_norm_stderr,none": 0.016258969321955846,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.49445983379501385,
21
+ "exact_match_stderr,score-first": 0.0186198299495111,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.6426592797783933,
26
+ "acc_stderr,none": 0.017846950263101905,
27
+ "acc_norm,none": 0.5484764542936288,
28
+ "acc_norm_stderr,none": 0.018533249102656933,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.5060728744939271,
33
+ "exact_match_stderr,score-first": 0.02251720641142702,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.6295546558704453,
38
+ "acc_stderr,none": 0.02174980315792073,
39
+ "acc_norm,none": 0.6153846153846154,
40
+ "acc_norm_stderr,none": 0.021911049688303364,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.576395242451967,
45
+ "acc_stderr,none": 0.007473950290939037,
46
+ "acc_norm,none": 0.5384263494967978,
47
+ "acc_norm_stderr,none": 0.007540380361885667,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.5548947849954254,
52
+ "exact_match_stderr,score-first": 0.007517029588350812,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.43333333333333335,
57
+ "exact_match_stderr,score-first": 0.016527038403575993,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.16909620991253643,
62
+ "acc_stderr,none": 0.011690840877120627,
63
+ "acc_norm,none": 0.16909620991253643,
64
+ "acc_norm_stderr,none": 0.011690840877120627,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.47230320699708456,
69
+ "exact_match_stderr,score-first": 0.015570627580366102,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.411,
74
+ "acc_stderr,none": 0.015566673418599271,
75
+ "acc_norm,none": 0.411,
76
+ "acc_norm_stderr,none": 0.015566673418599271,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.517,
81
+ "exact_match_stderr,score-first": 0.015810153729833427,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.3042671614100185,
86
+ "acc_stderr,none": 0.014019771683156095,
87
+ "acc_norm,none": 0.3042671614100185,
88
+ "acc_norm_stderr,none": 0.014019771683156095,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.2894248608534323,
93
+ "exact_match_stderr,score-first": 0.013818632569955733,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.49398389283315836,
100
+ "acc_stderr,none": 0.012858915217951268,
101
+ "acc_norm,none": 0.46690518138156606,
102
+ "acc_norm_stderr,none": 0.009764315476783709,
103
+ "exact_match,score-first": 0.49718012068426903,
104
+ "exact_match_stderr,score-first": 0.05661357060141459,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=mistralai/Mistral-7B-Instruct-v0.1",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out15/results_Mistral-7B-Instruct-v0.1-5_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.6462052618292959,
5
+ "acc_stderr,none": 0.0015615769015835921,
6
+ "acc_norm,none": 0.6389874814590853,
7
+ "acc_norm_stderr,none": 0.001474434059209013,
8
+ "exact_match,score-first": 0.6367861534741802,
9
+ "exact_match_stderr,score-first": 0.06797895137677959,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.56,
14
+ "acc_stderr,none": 0.016555431546228898,
15
+ "acc_norm,none": 0.56,
16
+ "acc_norm_stderr,none": 0.016555431546228898,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.6371191135734072,
21
+ "exact_match_stderr,score-first": 0.017907078257599084,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.6454293628808865,
26
+ "acc_stderr,none": 0.017815914141239345,
27
+ "acc_norm,none": 0.6121883656509696,
28
+ "acc_norm_stderr,none": 0.018146184424594067,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.631578947368421,
33
+ "exact_match_stderr,score-first": 0.021725139932578096,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.6396761133603239,
38
+ "acc_stderr,none": 0.021622361995133323,
39
+ "acc_norm,none": 0.631578947368421,
40
+ "acc_norm_stderr,none": 0.021725139932578092,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.6825251601097896,
45
+ "acc_stderr,none": 0.0070408255818903516,
46
+ "acc_norm,none": 0.67451967063129,
47
+ "acc_norm_stderr,none": 0.007087111733661532,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.6690301921317475,
52
+ "exact_match_stderr,score-first": 0.007117486371656686,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.5166666666666667,
57
+ "exact_match_stderr,score-first": 0.016666666666666774,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.7482993197278912,
62
+ "acc_stderr,none": 0.01353578079370368,
63
+ "acc_norm,none": 0.7482993197278912,
64
+ "acc_norm_stderr,none": 0.01353578079370368,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.7978620019436345,
69
+ "exact_match_stderr,score-first": 0.012525391634647394,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.609,
74
+ "acc_stderr,none": 0.015438826294681787,
75
+ "acc_norm,none": 0.609,
76
+ "acc_norm_stderr,none": 0.015438826294681787,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.637,
81
+ "exact_match_stderr,score-first": 0.015213890444671288,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.5974025974025974,
86
+ "acc_stderr,none": 0.014943804796035598,
87
+ "acc_norm,none": 0.5974025974025974,
88
+ "acc_norm_stderr,none": 0.014943804796035598,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.43042671614100186,
93
+ "exact_match_stderr,score-first": 0.015087474995477622,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.6462052618292959,
100
+ "acc_stderr,none": 0.0015615769015835921,
101
+ "acc_norm,none": 0.6389874814590853,
102
+ "acc_norm_stderr,none": 0.001474434059209013,
103
+ "exact_match,score-first": 0.6367861534741802,
104
+ "exact_match_stderr,score-first": 0.06797895137677959,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 5,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 5,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 5,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 5,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 5,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 5,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 5,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 5,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 5,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 5,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 5,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 5,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 5,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 5,
720
+ "polemo2_in": 5,
721
+ "polemo2_in_multiple_choice": 5,
722
+ "polemo2_out": 5,
723
+ "polemo2_out_multiple_choice": 5,
724
+ "polish": 5,
725
+ "polish_8tags_multiple_choice": 5,
726
+ "polish_8tags_regex": 5,
727
+ "polish_belebele_regex": 5,
728
+ "polish_dyk_multiple_choice": 5,
729
+ "polish_dyk_regex": 5,
730
+ "polish_ppc_multiple_choice": 5,
731
+ "polish_ppc_regex": 5,
732
+ "polish_psc_multiple_choice": 5,
733
+ "polish_psc_regex": 5
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=mistralai/Mistral-7B-Instruct-v0.1",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out15/results_phi-2-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.27007257007836166,
5
+ "acc_stderr,none": 0.03582238048710471,
6
+ "acc_norm,none": 0.285109793494696,
7
+ "acc_norm_stderr,none": 0.03337929811581892,
8
+ "exact_match,score-first": 0.11447466303954626,
9
+ "exact_match_stderr,score-first": 0.0650463530525679,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.2788888888888889,
14
+ "acc_stderr,none": 0.014956736888683079,
15
+ "acc_norm,none": 0.2788888888888889,
16
+ "acc_norm_stderr,none": 0.014956736888683079,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.16204986149584488,
21
+ "exact_match_stderr,score-first": 0.013723528490778158,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.16204986149584488,
26
+ "acc_stderr,none": 0.013723528490778158,
27
+ "acc_norm,none": 0.16066481994459833,
28
+ "acc_norm_stderr,none": 0.01367604360268079,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.0020242914979757085,
33
+ "exact_match_stderr,score-first": 0.0020242914979757094,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.0020242914979757085,
38
+ "acc_stderr,none": 0.0020242914979757094,
39
+ "acc_norm,none": 0.01417004048582996,
40
+ "acc_norm_stderr,none": 0.0053230812117738635,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.16628545288197621,
45
+ "acc_stderr,none": 0.005631776551377185,
46
+ "acc_norm,none": 0.19762122598353157,
47
+ "acc_norm_stderr,none": 0.006023044988006115,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.12580054894784995,
52
+ "exact_match_stderr,score-first": 0.005015983850697009,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.23444444444444446,
57
+ "exact_match_stderr,score-first": 0.01412955496811077,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.8299319727891157,
62
+ "acc_stderr,none": 0.011717528745193611,
63
+ "acc_norm,none": 0.8299319727891157,
64
+ "acc_norm_stderr,none": 0.011717528745193611,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.0,
69
+ "exact_match_stderr,score-first": 0.0,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.419,
74
+ "acc_stderr,none": 0.015610338967577795,
75
+ "acc_norm,none": 0.419,
76
+ "acc_norm_stderr,none": 0.015610338967577795,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.0,
81
+ "exact_match_stderr,score-first": 0.0,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.6957328385899815,
86
+ "acc_stderr,none": 0.014019771683156095,
87
+ "acc_norm,none": 0.6957328385899815,
88
+ "acc_norm_stderr,none": 0.014019771683156095,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.0,
93
+ "exact_match_stderr,score-first": 0.0,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.27007257007836166,
100
+ "acc_stderr,none": 0.03582238048710471,
101
+ "acc_norm,none": 0.285109793494696,
102
+ "acc_norm_stderr,none": 0.03337929811581892,
103
+ "exact_match,score-first": 0.11447466303954626,
104
+ "exact_match_stderr,score-first": 0.0650463530525679,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=microsoft/phi-2",
738
+ "batch_size": "8",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out15/results_phi-2-5_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.3691258686580846,
5
+ "acc_stderr,none": 0.018175209512165027,
6
+ "acc_norm,none": 0.3731501947501108,
7
+ "acc_norm_stderr,none": 0.017798235118430115,
8
+ "exact_match,score-first": 0.31161270116954054,
9
+ "exact_match_stderr,score-first": 0.08765716963110161,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.3188888888888889,
14
+ "acc_stderr,none": 0.015543500457982978,
15
+ "acc_norm,none": 0.3188888888888889,
16
+ "acc_norm_stderr,none": 0.015543500457982978,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.3047091412742382,
21
+ "exact_match_stderr,score-first": 0.017141876783613008,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.25761772853185594,
26
+ "acc_stderr,none": 0.01628673526099632,
27
+ "acc_norm,none": 0.25069252077562326,
28
+ "acc_norm_stderr,none": 0.016141098584878066,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.23684210526315788,
33
+ "exact_match_stderr,score-first": 0.019147549344973702,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.26720647773279355,
38
+ "acc_stderr,none": 0.01992924482184967,
39
+ "acc_norm,none": 0.2894736842105263,
40
+ "acc_norm_stderr,none": 0.02042542813065894,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.3266239707227813,
45
+ "acc_stderr,none": 0.0070935306571683265,
46
+ "acc_norm,none": 0.33417200365965233,
47
+ "acc_norm_stderr,none": 0.007134698974088815,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.2584629460201281,
52
+ "exact_match_stderr,score-first": 0.00662178644480435,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.30444444444444446,
57
+ "exact_match_stderr,score-first": 0.01534758660383228,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.8172983479105929,
62
+ "acc_stderr,none": 0.012052162826921252,
63
+ "acc_norm,none": 0.8172983479105929,
64
+ "acc_norm_stderr,none": 0.012052162826921252,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.6530612244897959,
69
+ "exact_match_stderr,score-first": 0.014845909626683668,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.361,
74
+ "acc_stderr,none": 0.015195720118175129,
75
+ "acc_norm,none": 0.361,
76
+ "acc_norm_stderr,none": 0.015195720118175129,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.294,
81
+ "exact_match_stderr,score-first": 0.014414290540008206,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.6595547309833024,
86
+ "acc_stderr,none": 0.014439136432948205,
87
+ "acc_norm,none": 0.6595547309833024,
88
+ "acc_norm_stderr,none": 0.014439136432948205,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.3942486085343228,
93
+ "exact_match_stderr,score-first": 0.014891019870970914,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.3691258686580846,
100
+ "acc_stderr,none": 0.018175209512165027,
101
+ "acc_norm,none": 0.3731501947501108,
102
+ "acc_norm_stderr,none": 0.017798235118430115,
103
+ "exact_match,score-first": 0.31161270116954054,
104
+ "exact_match_stderr,score-first": 0.08765716963110161,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 5,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 5,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 5,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 5,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 5,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 5,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 5,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 5,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 5,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 5,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 5,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 5,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 5,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 5,
720
+ "polemo2_in": 5,
721
+ "polemo2_in_multiple_choice": 5,
722
+ "polemo2_out": 5,
723
+ "polemo2_out_multiple_choice": 5,
724
+ "polish": 5,
725
+ "polish_8tags_multiple_choice": 5,
726
+ "polish_8tags_regex": 5,
727
+ "polish_belebele_regex": 5,
728
+ "polish_dyk_multiple_choice": 5,
729
+ "polish_dyk_regex": 5,
730
+ "polish_ppc_multiple_choice": 5,
731
+ "polish_ppc_regex": 5,
732
+ "polish_psc_multiple_choice": 5,
733
+ "polish_psc_regex": 5
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=microsoft/phi-2",
738
+ "batch_size": "8",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out15/results_plt5-large-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.27715148073174195,
5
+ "acc_stderr,none": 0.03330027976782747,
6
+ "acc_norm,none": 0.2563417704118359,
7
+ "acc_norm_stderr,none": 0.03668364767449961,
8
+ "exact_match,score-first": 0.0010576844399036573,
9
+ "exact_match_stderr,score-first": 0.0012246701124591967,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.2788888888888889,
14
+ "acc_stderr,none": 0.014956736888683087,
15
+ "acc_norm,none": 0.2788888888888889,
16
+ "acc_norm_stderr,none": 0.014956736888683087,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.0,
21
+ "exact_match_stderr,score-first": 0.0,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.203601108033241,
26
+ "acc_stderr,none": 0.01499642568791465,
27
+ "acc_norm,none": 0.14958448753462603,
28
+ "acc_norm_stderr,none": 0.013282848932398793,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.0020242914979757085,
33
+ "exact_match_stderr,score-first": 0.0020242914979757155,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.36639676113360325,
38
+ "acc_stderr,none": 0.021700065645408242,
39
+ "acc_norm,none": 0.32793522267206476,
40
+ "acc_norm_stderr,none": 0.021143463996780142,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.13472095150960658,
45
+ "acc_stderr,none": 0.005164226257415815,
46
+ "acc_norm,none": 0.10498627630375114,
47
+ "acc_norm_stderr,none": 0.004636504762297018,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.0029734675205855443,
52
+ "exact_match_stderr,score-first": 0.0008235586900281045,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.0,
57
+ "exact_match_stderr,score-first": 0.0,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.8309037900874635,
62
+ "acc_stderr,none": 0.011690840877120627,
63
+ "acc_norm,none": 0.8309037900874635,
64
+ "acc_norm_stderr,none": 0.011690840877120627,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.0,
69
+ "exact_match_stderr,score-first": 0.0,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.397,
74
+ "acc_stderr,none": 0.015480007449307984,
75
+ "acc_norm,none": 0.397,
76
+ "acc_norm_stderr,none": 0.015480007449307984,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.001,
81
+ "exact_match_stderr,score-first": 0.0010000000000000015,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.6957328385899815,
86
+ "acc_stderr,none": 0.014019771683156095,
87
+ "acc_norm,none": 0.6957328385899815,
88
+ "acc_norm_stderr,none": 0.014019771683156095,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.0,
93
+ "exact_match_stderr,score-first": 0.0,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.27715148073174195,
100
+ "acc_stderr,none": 0.03330027976782747,
101
+ "acc_norm,none": 0.2563417704118359,
102
+ "acc_norm_stderr,none": 0.03668364767449961,
103
+ "exact_match,score-first": 0.0010576844399036573,
104
+ "exact_match_stderr,score-first": 0.0012246701124591967,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=allegro/plt5-large",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out15/results_plt5-large-5_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.27199468499631047,
5
+ "acc_stderr,none": 0.033784906809331355,
6
+ "acc_norm,none": 0.25512605070649047,
7
+ "acc_norm_stderr,none": 0.036788778161595106,
8
+ "exact_match,score-first": 0.0007225007392369538,
9
+ "exact_match_stderr,score-first": 0.0011188492270730265,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.2733333333333333,
14
+ "acc_stderr,none": 0.01486394440941748,
15
+ "acc_norm,none": 0.2733333333333333,
16
+ "acc_norm_stderr,none": 0.01486394440941748,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.0,
21
+ "exact_match_stderr,score-first": 0.0,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.18282548476454294,
26
+ "acc_stderr,none": 0.014394883071700351,
27
+ "acc_norm,none": 0.14958448753462603,
28
+ "acc_norm_stderr,none": 0.013282848932398793,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.0,
33
+ "exact_match_stderr,score-first": 0.0,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.32793522267206476,
38
+ "acc_stderr,none": 0.021143463996780142,
39
+ "acc_norm,none": 0.32793522267206476,
40
+ "acc_norm_stderr,none": 0.021143463996780142,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.13472095150960658,
45
+ "acc_stderr,none": 0.005164226257415815,
46
+ "acc_norm,none": 0.10498627630375114,
47
+ "acc_norm_stderr,none": 0.004636504762297021,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.0013723696248856359,
52
+ "exact_match_stderr,score-first": 0.0005599470156852725,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.0,
57
+ "exact_match_stderr,score-first": 0.0,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.8309037900874635,
62
+ "acc_stderr,none": 0.011690840877120627,
63
+ "acc_norm,none": 0.8309037900874635,
64
+ "acc_norm_stderr,none": 0.011690840877120627,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.001943634596695821,
69
+ "exact_match_stderr,score-first": 0.0013736885791548168,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.396,
74
+ "acc_stderr,none": 0.01547331326585941,
75
+ "acc_norm,none": 0.396,
76
+ "acc_norm_stderr,none": 0.01547331326585941,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.004,
81
+ "exact_match_stderr,score-first": 0.0019969947390987295,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.6957328385899815,
86
+ "acc_stderr,none": 0.014019771683156095,
87
+ "acc_norm,none": 0.6957328385899815,
88
+ "acc_norm_stderr,none": 0.014019771683156095,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.0,
93
+ "exact_match_stderr,score-first": 0.0,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.27199468499631047,
100
+ "acc_stderr,none": 0.033784906809331355,
101
+ "acc_norm,none": 0.25512605070649047,
102
+ "acc_norm_stderr,none": 0.036788778161595106,
103
+ "exact_match,score-first": 0.0007225007392369538,
104
+ "exact_match_stderr,score-first": 0.0011188492270730265,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 5,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 5,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 5,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 5,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 5,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 5,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 5,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 5,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 5,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 5,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 5,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 5,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 5,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 5,
720
+ "polemo2_in": 5,
721
+ "polemo2_in_multiple_choice": 5,
722
+ "polemo2_out": 5,
723
+ "polemo2_out_multiple_choice": 5,
724
+ "polish": 5,
725
+ "polish_8tags_multiple_choice": 5,
726
+ "polish_8tags_regex": 5,
727
+ "polish_belebele_regex": 5,
728
+ "polish_dyk_multiple_choice": 5,
729
+ "polish_dyk_regex": 5,
730
+ "polish_ppc_multiple_choice": 5,
731
+ "polish_ppc_regex": 5,
732
+ "polish_psc_multiple_choice": 5,
733
+ "polish_psc_regex": 5
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=allegro/plt5-large",
738
+ "batch_size": "1",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out15/results_polish-gpt2-xl-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.23191934194661715,
5
+ "acc_stderr,none": 0.017886442846475455,
6
+ "acc_norm,none": 0.2609556062133738,
7
+ "acc_norm_stderr,none": 0.014228167702518467,
8
+ "exact_match,score-first": 0.0017391614971304186,
9
+ "exact_match_stderr,score-first": 0.002440262666720261,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.21666666666666667,
14
+ "acc_stderr,none": 0.013740087830700175,
15
+ "acc_norm,none": 0.21666666666666667,
16
+ "acc_norm_stderr,none": 0.013740087830700175,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.0,
21
+ "exact_match_stderr,score-first": 0.0,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.16204986149584488,
26
+ "acc_stderr,none": 0.013723528490778158,
27
+ "acc_norm,none": 0.16066481994459833,
28
+ "acc_norm_stderr,none": 0.013676043602680779,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.0,
33
+ "exact_match_stderr,score-first": 0.0,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.004048582995951417,
38
+ "acc_stderr,none": 0.0028598755883223574,
39
+ "acc_norm,none": 0.08704453441295547,
40
+ "acc_norm_stderr,none": 0.012696151326390646,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.17108874656907594,
45
+ "acc_stderr,none": 0.005696057135228034,
46
+ "acc_norm,none": 0.22461116193961575,
47
+ "acc_norm_stderr,none": 0.00631226228845873,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.0034309240622140895,
52
+ "exact_match_stderr,score-first": 0.0008844409715419932,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.01,
57
+ "exact_match_stderr,score-first": 0.0033184688965809087,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.6598639455782312,
62
+ "acc_stderr,none": 0.014776003225079232,
63
+ "acc_norm,none": 0.6598639455782312,
64
+ "acc_norm_stderr,none": 0.014776003225079232,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.0,
69
+ "exact_match_stderr,score-first": 0.0,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.419,
74
+ "acc_stderr,none": 0.015610338967577794,
75
+ "acc_norm,none": 0.419,
76
+ "acc_norm_stderr,none": 0.015610338967577794,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.0,
81
+ "exact_match_stderr,score-first": 0.0,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.41836734693877553,
86
+ "acc_stderr,none": 0.015031261449258846,
87
+ "acc_norm,none": 0.41836734693877553,
88
+ "acc_norm_stderr,none": 0.015031261449258846,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.0037105751391465678,
93
+ "exact_match_stderr,score-first": 0.001852701801669208,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.23191934194661715,
100
+ "acc_stderr,none": 0.017886442846475455,
101
+ "acc_norm,none": 0.2609556062133738,
102
+ "acc_norm_stderr,none": 0.014228167702518467,
103
+ "exact_match,score-first": 0.0017391614971304186,
104
+ "exact_match_stderr,score-first": 0.002440262666720261,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=sdadas/polish-gpt2-xl",
738
+ "batch_size": "8",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out15/results_polish-gpt2-xl-5_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.25193855283943917,
5
+ "acc_stderr,none": 0.02184871540454155,
6
+ "acc_norm,none": 0.24514543827432092,
7
+ "acc_norm_stderr,none": 0.022718727658377173,
8
+ "exact_match,score-first": 0.047392714266956536,
9
+ "exact_match_stderr,score-first": 0.023745830856935577,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.22777777777777777,
14
+ "acc_stderr,none": 0.013987721523687937,
15
+ "acc_norm,none": 0.22777777777777777,
16
+ "acc_norm_stderr,none": 0.013987721523687937,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.037396121883656507,
21
+ "exact_match_stderr,score-first": 0.007065932013355643,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.23684210526315788,
26
+ "acc_stderr,none": 0.01583321192224944,
27
+ "acc_norm,none": 0.2299168975069252,
28
+ "acc_norm_stderr,none": 0.0156706355314007,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.032388663967611336,
33
+ "exact_match_stderr,score-first": 0.007973032439158353,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.19230769230769232,
38
+ "acc_stderr,none": 0.017749978932478032,
39
+ "acc_norm,none": 0.22064777327935223,
40
+ "acc_norm_stderr,none": 0.018676401648930148,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.1537053979871912,
45
+ "acc_stderr,none": 0.005455253568185525,
46
+ "acc_norm,none": 0.1372369624885636,
47
+ "acc_norm_stderr,none": 0.0052046425832142406,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.045516925892040254,
52
+ "exact_match_stderr,score-first": 0.0031526820496949807,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.11777777777777777,
57
+ "exact_match_stderr,score-first": 0.010750801136276045,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.7619047619047619,
62
+ "acc_stderr,none": 0.013284008396468271,
63
+ "acc_norm,none": 0.7619047619047619,
64
+ "acc_norm_stderr,none": 0.013284008396468271,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.007774538386783284,
69
+ "exact_match_stderr,score-first": 0.0027393399577880506,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.398,
74
+ "acc_stderr,none": 0.015486634102858925,
75
+ "acc_norm,none": 0.398,
76
+ "acc_norm_stderr,none": 0.015486634102858925,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.09,
81
+ "exact_match_stderr,score-first": 0.009054390204866437,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.47866419294990725,
86
+ "acc_stderr,none": 0.015221813482588829,
87
+ "acc_norm,none": 0.47866419294990725,
88
+ "acc_norm_stderr,none": 0.015221813482588829,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.08070500927643785,
93
+ "exact_match_stderr,score-first": 0.008299842306683255,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.25193855283943917,
100
+ "acc_stderr,none": 0.02184871540454155,
101
+ "acc_norm,none": 0.24514543827432092,
102
+ "acc_norm_stderr,none": 0.022718727658377173,
103
+ "exact_match,score-first": 0.047392714266956536,
104
+ "exact_match_stderr,score-first": 0.023745830856935577,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 5,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 5,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 5,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 5,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 5,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 5,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 5,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 5,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 5,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 5,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 5,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 5,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 5,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 5,
720
+ "polemo2_in": 5,
721
+ "polemo2_in_multiple_choice": 5,
722
+ "polemo2_out": 5,
723
+ "polemo2_out_multiple_choice": 5,
724
+ "polish": 5,
725
+ "polish_8tags_multiple_choice": 5,
726
+ "polish_8tags_regex": 5,
727
+ "polish_belebele_regex": 5,
728
+ "polish_dyk_multiple_choice": 5,
729
+ "polish_dyk_regex": 5,
730
+ "polish_ppc_multiple_choice": 5,
731
+ "polish_ppc_regex": 5,
732
+ "polish_psc_multiple_choice": 5,
733
+ "polish_psc_regex": 5
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=sdadas/polish-gpt2-xl",
738
+ "batch_size": "8",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out16/results_Mistral-7B-v0.1-0_polish_generate/results.json ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_generate": {
4
+ "exact_match,score-first": 0.4016675351745701,
5
+ "exact_match_stderr,score-first": 0.11168933768467093,
6
+ "alias": "polish_generate"
7
+ },
8
+ "polemo2_in": {
9
+ "exact_match,score-first": 0.20221606648199447,
10
+ "exact_match_stderr,score-first": 0.014958320686196893,
11
+ "alias": " - polemo2_in"
12
+ },
13
+ "polemo2_out": {
14
+ "exact_match,score-first": 0.20242914979757085,
15
+ "exact_match_stderr,score-first": 0.018096629806809615,
16
+ "alias": " - polemo2_out"
17
+ },
18
+ "polish_8tags_regex": {
19
+ "exact_match,score-first": 0.5233302836230558,
20
+ "exact_match_stderr,score-first": 0.007554510181529444,
21
+ "alias": " - polish_8tags_regex"
22
+ },
23
+ "polish_belebele_regex": {
24
+ "exact_match,score-first": 0.5522222222222222,
25
+ "exact_match_stderr,score-first": 0.01658472834066539,
26
+ "alias": " - polish_belebele_regex"
27
+ },
28
+ "polish_dyk_regex": {
29
+ "exact_match,score-first": 0.06997084548104957,
30
+ "exact_match_stderr,score-first": 0.007956284148707688,
31
+ "alias": " - polish_dyk_regex"
32
+ },
33
+ "polish_ppc_regex": {
34
+ "exact_match,score-first": 0.376,
35
+ "exact_match_stderr,score-first": 0.015325105508898125,
36
+ "alias": " - polish_ppc_regex"
37
+ },
38
+ "polish_psc_regex": {
39
+ "exact_match,score-first": 0.34786641929499074,
40
+ "exact_match_stderr,score-first": 0.014513317088383356,
41
+ "alias": " - polish_psc_regex"
42
+ }
43
+ },
44
+ "groups": {
45
+ "polish_generate": {
46
+ "exact_match,score-first": 0.4016675351745701,
47
+ "exact_match_stderr,score-first": 0.11168933768467093,
48
+ "alias": "polish_generate"
49
+ }
50
+ },
51
+ "configs": {
52
+ "polemo2_in": {
53
+ "task": "polemo2_in",
54
+ "group": [
55
+ "polemo2"
56
+ ],
57
+ "dataset_path": "allegro/klej-polemo2-in",
58
+ "training_split": "train",
59
+ "validation_split": "validation",
60
+ "test_split": "test",
61
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
62
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
63
+ "description": "",
64
+ "target_delimiter": " ",
65
+ "fewshot_delimiter": "\n\n",
66
+ "num_fewshot": 0,
67
+ "metric_list": [
68
+ {
69
+ "metric": "exact_match",
70
+ "aggregation": "mean",
71
+ "higher_is_better": true
72
+ }
73
+ ],
74
+ "output_type": "generate_until",
75
+ "generation_kwargs": {
76
+ "until": [
77
+ ".",
78
+ ","
79
+ ],
80
+ "do_sample": false,
81
+ "temperature": 0.0,
82
+ "max_gen_toks": 50
83
+ },
84
+ "repeats": 1,
85
+ "filter_list": [
86
+ {
87
+ "name": "score-first",
88
+ "filter": [
89
+ {
90
+ "function": "regex",
91
+ "regex_pattern": "(\\b[ABCD]\\b)"
92
+ },
93
+ {
94
+ "function": "take_first"
95
+ }
96
+ ]
97
+ }
98
+ ],
99
+ "should_decontaminate": true,
100
+ "doc_to_decontamination_query": "{{sentence}}",
101
+ "metadata": {
102
+ "version": 1.0
103
+ }
104
+ },
105
+ "polemo2_out": {
106
+ "task": "polemo2_out",
107
+ "group": [
108
+ "polemo2"
109
+ ],
110
+ "dataset_path": "allegro/klej-polemo2-out",
111
+ "training_split": "train",
112
+ "validation_split": "validation",
113
+ "test_split": "test",
114
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
115
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
116
+ "description": "",
117
+ "target_delimiter": " ",
118
+ "fewshot_delimiter": "\n\n",
119
+ "num_fewshot": 0,
120
+ "metric_list": [
121
+ {
122
+ "metric": "exact_match",
123
+ "aggregation": "mean",
124
+ "higher_is_better": true
125
+ }
126
+ ],
127
+ "output_type": "generate_until",
128
+ "generation_kwargs": {
129
+ "until": [
130
+ ".",
131
+ ","
132
+ ],
133
+ "do_sample": false,
134
+ "temperature": 0.0,
135
+ "max_gen_toks": 50
136
+ },
137
+ "repeats": 1,
138
+ "filter_list": [
139
+ {
140
+ "name": "score-first",
141
+ "filter": [
142
+ {
143
+ "function": "regex",
144
+ "regex_pattern": "(\\b[ABCD]\\b)"
145
+ },
146
+ {
147
+ "function": "take_first"
148
+ }
149
+ ]
150
+ }
151
+ ],
152
+ "should_decontaminate": true,
153
+ "doc_to_decontamination_query": "{{sentence}}",
154
+ "metadata": {
155
+ "version": 1.0
156
+ }
157
+ },
158
+ "polish_8tags_regex": {
159
+ "task": "polish_8tags_regex",
160
+ "dataset_path": "sdadas/8tags",
161
+ "training_split": "train",
162
+ "validation_split": "validation",
163
+ "test_split": "test",
164
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
165
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 0,
170
+ "metric_list": [
171
+ {
172
+ "metric": "exact_match",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ }
176
+ ],
177
+ "output_type": "generate_until",
178
+ "generation_kwargs": {
179
+ "until": [
180
+ ".",
181
+ ","
182
+ ],
183
+ "do_sample": false,
184
+ "temperature": 0.0,
185
+ "max_gen_toks": 50
186
+ },
187
+ "repeats": 1,
188
+ "filter_list": [
189
+ {
190
+ "name": "score-first",
191
+ "filter": [
192
+ {
193
+ "function": "regex",
194
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
195
+ },
196
+ {
197
+ "function": "take_first"
198
+ }
199
+ ]
200
+ }
201
+ ],
202
+ "should_decontaminate": true,
203
+ "doc_to_decontamination_query": "{{sentence}}"
204
+ },
205
+ "polish_belebele_regex": {
206
+ "task": "polish_belebele_regex",
207
+ "dataset_path": "facebook/belebele",
208
+ "test_split": "pol_Latn",
209
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
210
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
211
+ "description": "",
212
+ "target_delimiter": " ",
213
+ "fewshot_delimiter": "\n\n",
214
+ "num_fewshot": 0,
215
+ "metric_list": [
216
+ {
217
+ "metric": "exact_match",
218
+ "aggregation": "mean",
219
+ "higher_is_better": true
220
+ }
221
+ ],
222
+ "output_type": "generate_until",
223
+ "generation_kwargs": {
224
+ "until": [
225
+ ".",
226
+ ","
227
+ ],
228
+ "do_sample": false,
229
+ "temperature": 0.0,
230
+ "max_gen_toks": 50
231
+ },
232
+ "repeats": 1,
233
+ "filter_list": [
234
+ {
235
+ "name": "score-first",
236
+ "filter": [
237
+ {
238
+ "function": "regex",
239
+ "regex_pattern": "(\\b[ABCD]\\b)"
240
+ },
241
+ {
242
+ "function": "take_first"
243
+ }
244
+ ]
245
+ }
246
+ ],
247
+ "should_decontaminate": true,
248
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
249
+ },
250
+ "polish_dyk_regex": {
251
+ "task": "polish_dyk_regex",
252
+ "dataset_path": "allegro/klej-dyk",
253
+ "training_split": "train",
254
+ "test_split": "test",
255
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
256
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
257
+ "description": "",
258
+ "target_delimiter": " ",
259
+ "fewshot_delimiter": "\n\n",
260
+ "num_fewshot": 0,
261
+ "metric_list": [
262
+ {
263
+ "metric": "exact_match",
264
+ "aggregation": "mean",
265
+ "higher_is_better": true
266
+ }
267
+ ],
268
+ "output_type": "generate_until",
269
+ "generation_kwargs": {
270
+ "until": [
271
+ ".",
272
+ ","
273
+ ],
274
+ "do_sample": false,
275
+ "temperature": 0.0,
276
+ "max_gen_toks": 50
277
+ },
278
+ "repeats": 1,
279
+ "filter_list": [
280
+ {
281
+ "name": "score-first",
282
+ "filter": [
283
+ {
284
+ "function": "regex",
285
+ "regex_pattern": "(\\b[ABCD]\\b)"
286
+ },
287
+ {
288
+ "function": "take_first"
289
+ }
290
+ ]
291
+ }
292
+ ],
293
+ "should_decontaminate": true,
294
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
295
+ },
296
+ "polish_ppc_regex": {
297
+ "task": "polish_ppc_regex",
298
+ "dataset_path": "sdadas/ppc",
299
+ "training_split": "train",
300
+ "validation_split": "validation",
301
+ "test_split": "test",
302
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
303
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
304
+ "description": "",
305
+ "target_delimiter": " ",
306
+ "fewshot_delimiter": "\n\n",
307
+ "num_fewshot": 0,
308
+ "metric_list": [
309
+ {
310
+ "metric": "exact_match",
311
+ "aggregation": "mean",
312
+ "higher_is_better": true
313
+ }
314
+ ],
315
+ "output_type": "generate_until",
316
+ "generation_kwargs": {
317
+ "until": [
318
+ ".",
319
+ ","
320
+ ],
321
+ "do_sample": false,
322
+ "temperature": 0.0,
323
+ "max_gen_toks": 50
324
+ },
325
+ "repeats": 1,
326
+ "filter_list": [
327
+ {
328
+ "name": "score-first",
329
+ "filter": [
330
+ {
331
+ "function": "regex",
332
+ "regex_pattern": "(\\b[ABCD]\\b)"
333
+ },
334
+ {
335
+ "function": "take_first"
336
+ }
337
+ ]
338
+ }
339
+ ],
340
+ "should_decontaminate": true,
341
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
342
+ },
343
+ "polish_psc_regex": {
344
+ "task": "polish_psc_regex",
345
+ "dataset_path": "allegro/klej-psc",
346
+ "training_split": "train",
347
+ "test_split": "test",
348
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
349
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "exact_match",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ }
360
+ ],
361
+ "output_type": "generate_until",
362
+ "generation_kwargs": {
363
+ "until": [
364
+ ".",
365
+ ","
366
+ ],
367
+ "do_sample": false,
368
+ "temperature": 0.0,
369
+ "max_gen_toks": 50
370
+ },
371
+ "repeats": 1,
372
+ "filter_list": [
373
+ {
374
+ "name": "score-first",
375
+ "filter": [
376
+ {
377
+ "function": "regex",
378
+ "regex_pattern": "(\\b[ABCD]\\b)"
379
+ },
380
+ {
381
+ "function": "take_first"
382
+ }
383
+ ]
384
+ }
385
+ ],
386
+ "should_decontaminate": true,
387
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
388
+ }
389
+ },
390
+ "versions": {
391
+ "polemo2_in": 1.0,
392
+ "polemo2_out": 1.0,
393
+ "polish_8tags_regex": "Yaml",
394
+ "polish_belebele_regex": "Yaml",
395
+ "polish_dyk_regex": "Yaml",
396
+ "polish_generate": "N/A",
397
+ "polish_ppc_regex": "Yaml",
398
+ "polish_psc_regex": "Yaml"
399
+ },
400
+ "n-shot": {
401
+ "polemo2_in": 0,
402
+ "polemo2_out": 0,
403
+ "polish_8tags_regex": 0,
404
+ "polish_belebele_regex": 0,
405
+ "polish_dyk_regex": 0,
406
+ "polish_generate": 0,
407
+ "polish_ppc_regex": 0,
408
+ "polish_psc_regex": 0
409
+ },
410
+ "config": {
411
+ "model": "hf",
412
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,load_in_8bit=True",
413
+ "batch_size": "4",
414
+ "batch_sizes": [],
415
+ "device": "cuda:0",
416
+ "use_cache": null,
417
+ "limit": null,
418
+ "bootstrap_iters": 100000,
419
+ "gen_kwargs": null
420
+ },
421
+ "git_hash": null
422
+ }
polish_benchmarks-out16/results_Mistral-7B-v0.1-0_polish_mc/results.json ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_mc": {
4
+ "acc,none": 0.36227201667535175,
5
+ "acc_stderr,none": 0.04717067548681556,
6
+ "acc_norm,none": 0.4584679520583637,
7
+ "acc_norm_stderr,none": 0.05699426545309856,
8
+ "alias": "polish_mc"
9
+ },
10
+ "belebele_pol_Latn": {
11
+ "acc,none": 0.3844444444444444,
12
+ "acc_stderr,none": 0.016224472156351456,
13
+ "acc_norm,none": 0.3844444444444444,
14
+ "acc_norm_stderr,none": 0.016224472156351456,
15
+ "alias": " - belebele_pol_Latn"
16
+ },
17
+ "polemo2_in_multiple_choice": {
18
+ "acc,none": 0.6398891966759003,
19
+ "acc_stderr,none": 0.017877337200271817,
20
+ "acc_norm,none": 0.5512465373961218,
21
+ "acc_norm_stderr,none": 0.018522909771808996,
22
+ "alias": " - polemo2_in_multiple_choice"
23
+ },
24
+ "polemo2_out_multiple_choice": {
25
+ "acc,none": 0.4979757085020243,
26
+ "acc_stderr,none": 0.022518682901144425,
27
+ "acc_norm,none": 0.6153846153846154,
28
+ "acc_norm_stderr,none": 0.021911049688303364,
29
+ "alias": " - polemo2_out_multiple_choice"
30
+ },
31
+ "polish_8tags_multiple_choice": {
32
+ "acc,none": 0.27973467520585543,
33
+ "acc_stderr,none": 0.0067893625784577,
34
+ "acc_norm,none": 0.4922232387923147,
35
+ "acc_norm_stderr,none": 0.00756183269153676,
36
+ "alias": " - polish_8tags_multiple_choice"
37
+ },
38
+ "polish_dyk_multiple_choice": {
39
+ "acc,none": 0.34207968901846453,
40
+ "acc_stderr,none": 0.014796320639090105,
41
+ "acc_norm,none": 0.34207968901846453,
42
+ "acc_norm_stderr,none": 0.014796320639090105,
43
+ "alias": " - polish_dyk_multiple_choice"
44
+ },
45
+ "polish_ppc_multiple_choice": {
46
+ "acc,none": 0.399,
47
+ "acc_stderr,none": 0.015493193313162908,
48
+ "acc_norm,none": 0.399,
49
+ "acc_norm_stderr,none": 0.015493193313162908,
50
+ "alias": " - polish_ppc_multiple_choice"
51
+ },
52
+ "polish_psc_multiple_choice": {
53
+ "acc,none": 0.4155844155844156,
54
+ "acc_stderr,none": 0.015016982312043996,
55
+ "acc_norm,none": 0.4155844155844156,
56
+ "acc_norm_stderr,none": 0.015016982312043996,
57
+ "alias": " - polish_psc_multiple_choice"
58
+ }
59
+ },
60
+ "groups": {
61
+ "polish_mc": {
62
+ "acc,none": 0.36227201667535175,
63
+ "acc_stderr,none": 0.04717067548681556,
64
+ "acc_norm,none": 0.4584679520583637,
65
+ "acc_norm_stderr,none": 0.05699426545309856,
66
+ "alias": "polish_mc"
67
+ }
68
+ },
69
+ "configs": {
70
+ "belebele_pol_Latn": {
71
+ "task": "belebele_pol_Latn",
72
+ "group": "belebele",
73
+ "dataset_path": "facebook/belebele",
74
+ "test_split": "pol_Latn",
75
+ "fewshot_split": "pol_Latn",
76
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
77
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
78
+ "doc_to_choice": [
79
+ "A",
80
+ "B",
81
+ "C",
82
+ "D"
83
+ ],
84
+ "description": "",
85
+ "target_delimiter": " ",
86
+ "fewshot_delimiter": "\n\n",
87
+ "fewshot_config": {
88
+ "sampler": "first_n"
89
+ },
90
+ "num_fewshot": 0,
91
+ "metric_list": [
92
+ {
93
+ "metric": "acc",
94
+ "aggregation": "mean",
95
+ "higher_is_better": true
96
+ },
97
+ {
98
+ "metric": "acc_norm",
99
+ "aggregation": "mean",
100
+ "higher_is_better": true
101
+ }
102
+ ],
103
+ "output_type": "multiple_choice",
104
+ "repeats": 1,
105
+ "should_decontaminate": true,
106
+ "doc_to_decontamination_query": "{{question}}",
107
+ "metadata": {
108
+ "version": 0.0
109
+ }
110
+ },
111
+ "polemo2_in_multiple_choice": {
112
+ "task": "polemo2_in_multiple_choice",
113
+ "group": [
114
+ "polemo2_mc"
115
+ ],
116
+ "dataset_path": "allegro/klej-polemo2-in",
117
+ "training_split": "train",
118
+ "validation_split": "validation",
119
+ "test_split": "test",
120
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
121
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
122
+ "doc_to_choice": [
123
+ "Neutralny",
124
+ "Negatywny",
125
+ "Pozytywny",
126
+ "Niejednoznaczny"
127
+ ],
128
+ "description": "",
129
+ "target_delimiter": " ",
130
+ "fewshot_delimiter": "\n\n",
131
+ "num_fewshot": 0,
132
+ "metric_list": [
133
+ {
134
+ "metric": "acc",
135
+ "aggregation": "mean",
136
+ "higher_is_better": true
137
+ },
138
+ {
139
+ "metric": "acc_norm",
140
+ "aggregation": "mean",
141
+ "higher_is_better": true
142
+ }
143
+ ],
144
+ "output_type": "multiple_choice",
145
+ "repeats": 1,
146
+ "should_decontaminate": true,
147
+ "doc_to_decontamination_query": "{{sentence}}"
148
+ },
149
+ "polemo2_out_multiple_choice": {
150
+ "task": "polemo2_out_multiple_choice",
151
+ "group": [
152
+ "polemo2_mc"
153
+ ],
154
+ "dataset_path": "allegro/klej-polemo2-out",
155
+ "training_split": "train",
156
+ "validation_split": "validation",
157
+ "test_split": "test",
158
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
159
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
160
+ "doc_to_choice": [
161
+ "Neutralny",
162
+ "Negatywny",
163
+ "Pozytywny",
164
+ "Niejednoznaczny"
165
+ ],
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 0,
170
+ "metric_list": [
171
+ {
172
+ "metric": "acc",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ },
176
+ {
177
+ "metric": "acc_norm",
178
+ "aggregation": "mean",
179
+ "higher_is_better": true
180
+ }
181
+ ],
182
+ "output_type": "multiple_choice",
183
+ "repeats": 1,
184
+ "should_decontaminate": true,
185
+ "doc_to_decontamination_query": "{{sentence}}"
186
+ },
187
+ "polish_8tags_multiple_choice": {
188
+ "task": "polish_8tags_multiple_choice",
189
+ "dataset_path": "djstrong/8tags",
190
+ "training_split": "train",
191
+ "test_split": "test",
192
+ "fewshot_split": "train",
193
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
194
+ "doc_to_target": "{{label|int}}",
195
+ "doc_to_choice": [
196
+ "Film",
197
+ "Historia",
198
+ "Jedzenie",
199
+ "Medycyna",
200
+ "Motoryzacja",
201
+ "Praca",
202
+ "Sport",
203
+ "Technologie"
204
+ ],
205
+ "description": "",
206
+ "target_delimiter": " ",
207
+ "fewshot_delimiter": "\n\n",
208
+ "num_fewshot": 0,
209
+ "metric_list": [
210
+ {
211
+ "metric": "acc",
212
+ "aggregation": "mean",
213
+ "higher_is_better": true
214
+ },
215
+ {
216
+ "metric": "acc_norm",
217
+ "aggregation": "mean",
218
+ "higher_is_better": true
219
+ }
220
+ ],
221
+ "output_type": "multiple_choice",
222
+ "repeats": 1,
223
+ "should_decontaminate": true,
224
+ "doc_to_decontamination_query": "{{sentence}}"
225
+ },
226
+ "polish_dyk_multiple_choice": {
227
+ "task": "polish_dyk_multiple_choice",
228
+ "dataset_path": "allegro/klej-dyk",
229
+ "training_split": "train",
230
+ "test_split": "test",
231
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
232
+ "doc_to_target": "{{target|int}}",
233
+ "doc_to_choice": [
234
+ "Nie",
235
+ "Tak"
236
+ ],
237
+ "description": "",
238
+ "target_delimiter": " ",
239
+ "fewshot_delimiter": "\n\n",
240
+ "num_fewshot": 0,
241
+ "metric_list": [
242
+ {
243
+ "metric": "acc",
244
+ "aggregation": "mean",
245
+ "higher_is_better": true
246
+ },
247
+ {
248
+ "metric": "acc_norm",
249
+ "aggregation": "mean",
250
+ "higher_is_better": true
251
+ }
252
+ ],
253
+ "output_type": "multiple_choice",
254
+ "repeats": 1,
255
+ "should_decontaminate": true,
256
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
257
+ },
258
+ "polish_ppc_multiple_choice": {
259
+ "task": "polish_ppc_multiple_choice",
260
+ "dataset_path": "djstrong/ppc",
261
+ "training_split": "train",
262
+ "validation_split": "validation",
263
+ "test_split": "test",
264
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
265
+ "doc_to_target": "{{label|int - 1}}",
266
+ "doc_to_choice": [
267
+ "A",
268
+ "B",
269
+ "C"
270
+ ],
271
+ "description": "",
272
+ "target_delimiter": " ",
273
+ "fewshot_delimiter": "\n\n",
274
+ "num_fewshot": 0,
275
+ "metric_list": [
276
+ {
277
+ "metric": "acc",
278
+ "aggregation": "mean",
279
+ "higher_is_better": true
280
+ },
281
+ {
282
+ "metric": "acc_norm",
283
+ "aggregation": "mean",
284
+ "higher_is_better": true
285
+ }
286
+ ],
287
+ "output_type": "multiple_choice",
288
+ "repeats": 1,
289
+ "should_decontaminate": true,
290
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
291
+ },
292
+ "polish_psc_multiple_choice": {
293
+ "task": "polish_psc_multiple_choice",
294
+ "dataset_path": "allegro/klej-psc",
295
+ "training_split": "train",
296
+ "test_split": "test",
297
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
298
+ "doc_to_target": "{{label|int}}",
299
+ "doc_to_choice": [
300
+ "Nie",
301
+ "Tak"
302
+ ],
303
+ "description": "",
304
+ "target_delimiter": " ",
305
+ "fewshot_delimiter": "\n\n",
306
+ "num_fewshot": 0,
307
+ "metric_list": [
308
+ {
309
+ "metric": "acc",
310
+ "aggregation": "mean",
311
+ "higher_is_better": true
312
+ },
313
+ {
314
+ "metric": "acc_norm",
315
+ "aggregation": "mean",
316
+ "higher_is_better": true
317
+ }
318
+ ],
319
+ "output_type": "multiple_choice",
320
+ "repeats": 1,
321
+ "should_decontaminate": true,
322
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
323
+ }
324
+ },
325
+ "versions": {
326
+ "belebele_pol_Latn": 0.0,
327
+ "polemo2_in_multiple_choice": "Yaml",
328
+ "polemo2_out_multiple_choice": "Yaml",
329
+ "polish_8tags_multiple_choice": "Yaml",
330
+ "polish_dyk_multiple_choice": "Yaml",
331
+ "polish_mc": "N/A",
332
+ "polish_ppc_multiple_choice": "Yaml",
333
+ "polish_psc_multiple_choice": "Yaml"
334
+ },
335
+ "n-shot": {
336
+ "belebele_pol_Latn": 0,
337
+ "polemo2_in_multiple_choice": 0,
338
+ "polemo2_out_multiple_choice": 0,
339
+ "polish_8tags_multiple_choice": 0,
340
+ "polish_dyk_multiple_choice": 0,
341
+ "polish_mc": 0,
342
+ "polish_ppc_multiple_choice": 0,
343
+ "polish_psc_multiple_choice": 0
344
+ },
345
+ "config": {
346
+ "model": "hf",
347
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,load_in_8bit=True",
348
+ "batch_size": "4",
349
+ "batch_sizes": [],
350
+ "device": "cuda:0",
351
+ "use_cache": null,
352
+ "limit": null,
353
+ "bootstrap_iters": 100000,
354
+ "gen_kwargs": null
355
+ },
356
+ "git_hash": null
357
+ }
polish_benchmarks-out16/results_Mistral-7B-v0.1-5_polish_generate/results.json ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_generate": {
4
+ "exact_match,score-first": 0.6606565919749869,
5
+ "exact_match_stderr,score-first": 0.11832049608428108,
6
+ "alias": "polish_generate"
7
+ },
8
+ "polemo2_in": {
9
+ "exact_match,score-first": 0.6883656509695291,
10
+ "exact_match_stderr,score-first": 0.017249027853964156,
11
+ "alias": " - polemo2_in"
12
+ },
13
+ "polemo2_out": {
14
+ "exact_match,score-first": 0.631578947368421,
15
+ "exact_match_stderr,score-first": 0.021725139932578096,
16
+ "alias": " - polemo2_out"
17
+ },
18
+ "polish_8tags_regex": {
19
+ "exact_match,score-first": 0.7166056724611162,
20
+ "exact_match_stderr,score-first": 0.006816246600645744,
21
+ "alias": " - polish_8tags_regex"
22
+ },
23
+ "polish_belebele_regex": {
24
+ "exact_match,score-first": 0.7088888888888889,
25
+ "exact_match_stderr,score-first": 0.015150906906440088,
26
+ "alias": " - polish_belebele_regex"
27
+ },
28
+ "polish_dyk_regex": {
29
+ "exact_match,score-first": 0.8318756073858115,
30
+ "exact_match_stderr,score-first": 0.01166401318259015,
31
+ "alias": " - polish_dyk_regex"
32
+ },
33
+ "polish_ppc_regex": {
34
+ "exact_match,score-first": 0.535,
35
+ "exact_match_stderr,score-first": 0.01578049505003016,
36
+ "alias": " - polish_ppc_regex"
37
+ },
38
+ "polish_psc_regex": {
39
+ "exact_match,score-first": 0.34137291280148424,
40
+ "exact_match_stderr,score-first": 0.014448623392094184,
41
+ "alias": " - polish_psc_regex"
42
+ }
43
+ },
44
+ "groups": {
45
+ "polish_generate": {
46
+ "exact_match,score-first": 0.6606565919749869,
47
+ "exact_match_stderr,score-first": 0.11832049608428108,
48
+ "alias": "polish_generate"
49
+ }
50
+ },
51
+ "configs": {
52
+ "polemo2_in": {
53
+ "task": "polemo2_in",
54
+ "group": [
55
+ "polemo2"
56
+ ],
57
+ "dataset_path": "allegro/klej-polemo2-in",
58
+ "training_split": "train",
59
+ "validation_split": "validation",
60
+ "test_split": "test",
61
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
62
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
63
+ "description": "",
64
+ "target_delimiter": " ",
65
+ "fewshot_delimiter": "\n\n",
66
+ "num_fewshot": 5,
67
+ "metric_list": [
68
+ {
69
+ "metric": "exact_match",
70
+ "aggregation": "mean",
71
+ "higher_is_better": true
72
+ }
73
+ ],
74
+ "output_type": "generate_until",
75
+ "generation_kwargs": {
76
+ "until": [
77
+ ".",
78
+ ","
79
+ ],
80
+ "do_sample": false,
81
+ "temperature": 0.0,
82
+ "max_gen_toks": 50
83
+ },
84
+ "repeats": 1,
85
+ "filter_list": [
86
+ {
87
+ "name": "score-first",
88
+ "filter": [
89
+ {
90
+ "function": "regex",
91
+ "regex_pattern": "(\\b[ABCD]\\b)"
92
+ },
93
+ {
94
+ "function": "take_first"
95
+ }
96
+ ]
97
+ }
98
+ ],
99
+ "should_decontaminate": true,
100
+ "doc_to_decontamination_query": "{{sentence}}",
101
+ "metadata": {
102
+ "version": 1.0
103
+ }
104
+ },
105
+ "polemo2_out": {
106
+ "task": "polemo2_out",
107
+ "group": [
108
+ "polemo2"
109
+ ],
110
+ "dataset_path": "allegro/klej-polemo2-out",
111
+ "training_split": "train",
112
+ "validation_split": "validation",
113
+ "test_split": "test",
114
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
115
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
116
+ "description": "",
117
+ "target_delimiter": " ",
118
+ "fewshot_delimiter": "\n\n",
119
+ "num_fewshot": 5,
120
+ "metric_list": [
121
+ {
122
+ "metric": "exact_match",
123
+ "aggregation": "mean",
124
+ "higher_is_better": true
125
+ }
126
+ ],
127
+ "output_type": "generate_until",
128
+ "generation_kwargs": {
129
+ "until": [
130
+ ".",
131
+ ","
132
+ ],
133
+ "do_sample": false,
134
+ "temperature": 0.0,
135
+ "max_gen_toks": 50
136
+ },
137
+ "repeats": 1,
138
+ "filter_list": [
139
+ {
140
+ "name": "score-first",
141
+ "filter": [
142
+ {
143
+ "function": "regex",
144
+ "regex_pattern": "(\\b[ABCD]\\b)"
145
+ },
146
+ {
147
+ "function": "take_first"
148
+ }
149
+ ]
150
+ }
151
+ ],
152
+ "should_decontaminate": true,
153
+ "doc_to_decontamination_query": "{{sentence}}",
154
+ "metadata": {
155
+ "version": 1.0
156
+ }
157
+ },
158
+ "polish_8tags_regex": {
159
+ "task": "polish_8tags_regex",
160
+ "dataset_path": "sdadas/8tags",
161
+ "training_split": "train",
162
+ "validation_split": "validation",
163
+ "test_split": "test",
164
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
165
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "exact_match",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ }
176
+ ],
177
+ "output_type": "generate_until",
178
+ "generation_kwargs": {
179
+ "until": [
180
+ ".",
181
+ ","
182
+ ],
183
+ "do_sample": false,
184
+ "temperature": 0.0,
185
+ "max_gen_toks": 50
186
+ },
187
+ "repeats": 1,
188
+ "filter_list": [
189
+ {
190
+ "name": "score-first",
191
+ "filter": [
192
+ {
193
+ "function": "regex",
194
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
195
+ },
196
+ {
197
+ "function": "take_first"
198
+ }
199
+ ]
200
+ }
201
+ ],
202
+ "should_decontaminate": true,
203
+ "doc_to_decontamination_query": "{{sentence}}"
204
+ },
205
+ "polish_belebele_regex": {
206
+ "task": "polish_belebele_regex",
207
+ "dataset_path": "facebook/belebele",
208
+ "test_split": "pol_Latn",
209
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
210
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
211
+ "description": "",
212
+ "target_delimiter": " ",
213
+ "fewshot_delimiter": "\n\n",
214
+ "num_fewshot": 5,
215
+ "metric_list": [
216
+ {
217
+ "metric": "exact_match",
218
+ "aggregation": "mean",
219
+ "higher_is_better": true
220
+ }
221
+ ],
222
+ "output_type": "generate_until",
223
+ "generation_kwargs": {
224
+ "until": [
225
+ ".",
226
+ ","
227
+ ],
228
+ "do_sample": false,
229
+ "temperature": 0.0,
230
+ "max_gen_toks": 50
231
+ },
232
+ "repeats": 1,
233
+ "filter_list": [
234
+ {
235
+ "name": "score-first",
236
+ "filter": [
237
+ {
238
+ "function": "regex",
239
+ "regex_pattern": "(\\b[ABCD]\\b)"
240
+ },
241
+ {
242
+ "function": "take_first"
243
+ }
244
+ ]
245
+ }
246
+ ],
247
+ "should_decontaminate": true,
248
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
249
+ },
250
+ "polish_dyk_regex": {
251
+ "task": "polish_dyk_regex",
252
+ "dataset_path": "allegro/klej-dyk",
253
+ "training_split": "train",
254
+ "test_split": "test",
255
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
256
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
257
+ "description": "",
258
+ "target_delimiter": " ",
259
+ "fewshot_delimiter": "\n\n",
260
+ "num_fewshot": 5,
261
+ "metric_list": [
262
+ {
263
+ "metric": "exact_match",
264
+ "aggregation": "mean",
265
+ "higher_is_better": true
266
+ }
267
+ ],
268
+ "output_type": "generate_until",
269
+ "generation_kwargs": {
270
+ "until": [
271
+ ".",
272
+ ","
273
+ ],
274
+ "do_sample": false,
275
+ "temperature": 0.0,
276
+ "max_gen_toks": 50
277
+ },
278
+ "repeats": 1,
279
+ "filter_list": [
280
+ {
281
+ "name": "score-first",
282
+ "filter": [
283
+ {
284
+ "function": "regex",
285
+ "regex_pattern": "(\\b[ABCD]\\b)"
286
+ },
287
+ {
288
+ "function": "take_first"
289
+ }
290
+ ]
291
+ }
292
+ ],
293
+ "should_decontaminate": true,
294
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
295
+ },
296
+ "polish_ppc_regex": {
297
+ "task": "polish_ppc_regex",
298
+ "dataset_path": "sdadas/ppc",
299
+ "training_split": "train",
300
+ "validation_split": "validation",
301
+ "test_split": "test",
302
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
303
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
304
+ "description": "",
305
+ "target_delimiter": " ",
306
+ "fewshot_delimiter": "\n\n",
307
+ "num_fewshot": 5,
308
+ "metric_list": [
309
+ {
310
+ "metric": "exact_match",
311
+ "aggregation": "mean",
312
+ "higher_is_better": true
313
+ }
314
+ ],
315
+ "output_type": "generate_until",
316
+ "generation_kwargs": {
317
+ "until": [
318
+ ".",
319
+ ","
320
+ ],
321
+ "do_sample": false,
322
+ "temperature": 0.0,
323
+ "max_gen_toks": 50
324
+ },
325
+ "repeats": 1,
326
+ "filter_list": [
327
+ {
328
+ "name": "score-first",
329
+ "filter": [
330
+ {
331
+ "function": "regex",
332
+ "regex_pattern": "(\\b[ABCD]\\b)"
333
+ },
334
+ {
335
+ "function": "take_first"
336
+ }
337
+ ]
338
+ }
339
+ ],
340
+ "should_decontaminate": true,
341
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
342
+ },
343
+ "polish_psc_regex": {
344
+ "task": "polish_psc_regex",
345
+ "dataset_path": "allegro/klej-psc",
346
+ "training_split": "train",
347
+ "test_split": "test",
348
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
349
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "exact_match",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ }
360
+ ],
361
+ "output_type": "generate_until",
362
+ "generation_kwargs": {
363
+ "until": [
364
+ ".",
365
+ ","
366
+ ],
367
+ "do_sample": false,
368
+ "temperature": 0.0,
369
+ "max_gen_toks": 50
370
+ },
371
+ "repeats": 1,
372
+ "filter_list": [
373
+ {
374
+ "name": "score-first",
375
+ "filter": [
376
+ {
377
+ "function": "regex",
378
+ "regex_pattern": "(\\b[ABCD]\\b)"
379
+ },
380
+ {
381
+ "function": "take_first"
382
+ }
383
+ ]
384
+ }
385
+ ],
386
+ "should_decontaminate": true,
387
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
388
+ }
389
+ },
390
+ "versions": {
391
+ "polemo2_in": 1.0,
392
+ "polemo2_out": 1.0,
393
+ "polish_8tags_regex": "Yaml",
394
+ "polish_belebele_regex": "Yaml",
395
+ "polish_dyk_regex": "Yaml",
396
+ "polish_generate": "N/A",
397
+ "polish_ppc_regex": "Yaml",
398
+ "polish_psc_regex": "Yaml"
399
+ },
400
+ "n-shot": {
401
+ "polemo2_in": 5,
402
+ "polemo2_out": 5,
403
+ "polish_8tags_regex": 5,
404
+ "polish_belebele_regex": 5,
405
+ "polish_dyk_regex": 5,
406
+ "polish_generate": 5,
407
+ "polish_ppc_regex": 5,
408
+ "polish_psc_regex": 5
409
+ },
410
+ "config": {
411
+ "model": "hf",
412
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,load_in_8bit=True",
413
+ "batch_size": "4",
414
+ "batch_sizes": [],
415
+ "device": "cuda:0",
416
+ "use_cache": null,
417
+ "limit": null,
418
+ "bootstrap_iters": 100000,
419
+ "gen_kwargs": null
420
+ },
421
+ "git_hash": null
422
+ }
polish_benchmarks-out16/results_Mistral-7B-v0.1-5_polish_mc/results.json ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_mc": {
4
+ "acc,none": 0.7136008337675873,
5
+ "acc_stderr,none": 0.04667902460946271,
6
+ "acc_norm,none": 0.7102657634184472,
7
+ "acc_norm_stderr,none": 0.04676619055581406,
8
+ "alias": "polish_mc"
9
+ },
10
+ "belebele_pol_Latn": {
11
+ "acc,none": 0.6766666666666666,
12
+ "acc_stderr,none": 0.015600294087844734,
13
+ "acc_norm,none": 0.6766666666666666,
14
+ "acc_norm_stderr,none": 0.015600294087844734,
15
+ "alias": " - belebele_pol_Latn"
16
+ },
17
+ "polemo2_in_multiple_choice": {
18
+ "acc,none": 0.7188365650969529,
19
+ "acc_stderr,none": 0.01674275290990633,
20
+ "acc_norm,none": 0.7202216066481995,
21
+ "acc_norm_stderr,none": 0.016717545989724802,
22
+ "alias": " - polemo2_in_multiple_choice"
23
+ },
24
+ "polemo2_out_multiple_choice": {
25
+ "acc,none": 0.6862348178137652,
26
+ "acc_stderr,none": 0.020898508377426517,
27
+ "acc_norm,none": 0.7024291497975709,
28
+ "acc_norm_stderr,none": 0.02059078937593709,
29
+ "alias": " - polemo2_out_multiple_choice"
30
+ },
31
+ "polish_8tags_multiple_choice": {
32
+ "acc,none": 0.7188929551692589,
33
+ "acc_stderr,none": 0.006799509351827805,
34
+ "acc_norm,none": 0.7095150960658737,
35
+ "acc_norm_stderr,none": 0.006866765250193195,
36
+ "alias": " - polish_8tags_multiple_choice"
37
+ },
38
+ "polish_dyk_multiple_choice": {
39
+ "acc,none": 0.8357628765792031,
40
+ "acc_stderr,none": 0.011555284585816245,
41
+ "acc_norm,none": 0.8357628765792031,
42
+ "acc_norm_stderr,none": 0.011555284585816245,
43
+ "alias": " - polish_dyk_multiple_choice"
44
+ },
45
+ "polish_ppc_multiple_choice": {
46
+ "acc,none": 0.615,
47
+ "acc_stderr,none": 0.015395194445410808,
48
+ "acc_norm,none": 0.615,
49
+ "acc_norm_stderr,none": 0.015395194445410808,
50
+ "alias": " - polish_ppc_multiple_choice"
51
+ },
52
+ "polish_psc_multiple_choice": {
53
+ "acc,none": 0.7068645640074211,
54
+ "acc_stderr,none": 0.013870573387180219,
55
+ "acc_norm,none": 0.7068645640074211,
56
+ "acc_norm_stderr,none": 0.013870573387180219,
57
+ "alias": " - polish_psc_multiple_choice"
58
+ }
59
+ },
60
+ "groups": {
61
+ "polish_mc": {
62
+ "acc,none": 0.7136008337675873,
63
+ "acc_stderr,none": 0.04667902460946271,
64
+ "acc_norm,none": 0.7102657634184472,
65
+ "acc_norm_stderr,none": 0.04676619055581406,
66
+ "alias": "polish_mc"
67
+ }
68
+ },
69
+ "configs": {
70
+ "belebele_pol_Latn": {
71
+ "task": "belebele_pol_Latn",
72
+ "group": "belebele",
73
+ "dataset_path": "facebook/belebele",
74
+ "test_split": "pol_Latn",
75
+ "fewshot_split": "pol_Latn",
76
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
77
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
78
+ "doc_to_choice": [
79
+ "A",
80
+ "B",
81
+ "C",
82
+ "D"
83
+ ],
84
+ "description": "",
85
+ "target_delimiter": " ",
86
+ "fewshot_delimiter": "\n\n",
87
+ "fewshot_config": {
88
+ "sampler": "first_n"
89
+ },
90
+ "num_fewshot": 5,
91
+ "metric_list": [
92
+ {
93
+ "metric": "acc",
94
+ "aggregation": "mean",
95
+ "higher_is_better": true
96
+ },
97
+ {
98
+ "metric": "acc_norm",
99
+ "aggregation": "mean",
100
+ "higher_is_better": true
101
+ }
102
+ ],
103
+ "output_type": "multiple_choice",
104
+ "repeats": 1,
105
+ "should_decontaminate": true,
106
+ "doc_to_decontamination_query": "{{question}}",
107
+ "metadata": {
108
+ "version": 0.0
109
+ }
110
+ },
111
+ "polemo2_in_multiple_choice": {
112
+ "task": "polemo2_in_multiple_choice",
113
+ "group": [
114
+ "polemo2_mc"
115
+ ],
116
+ "dataset_path": "allegro/klej-polemo2-in",
117
+ "training_split": "train",
118
+ "validation_split": "validation",
119
+ "test_split": "test",
120
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
121
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
122
+ "doc_to_choice": [
123
+ "Neutralny",
124
+ "Negatywny",
125
+ "Pozytywny",
126
+ "Niejednoznaczny"
127
+ ],
128
+ "description": "",
129
+ "target_delimiter": " ",
130
+ "fewshot_delimiter": "\n\n",
131
+ "num_fewshot": 5,
132
+ "metric_list": [
133
+ {
134
+ "metric": "acc",
135
+ "aggregation": "mean",
136
+ "higher_is_better": true
137
+ },
138
+ {
139
+ "metric": "acc_norm",
140
+ "aggregation": "mean",
141
+ "higher_is_better": true
142
+ }
143
+ ],
144
+ "output_type": "multiple_choice",
145
+ "repeats": 1,
146
+ "should_decontaminate": true,
147
+ "doc_to_decontamination_query": "{{sentence}}"
148
+ },
149
+ "polemo2_out_multiple_choice": {
150
+ "task": "polemo2_out_multiple_choice",
151
+ "group": [
152
+ "polemo2_mc"
153
+ ],
154
+ "dataset_path": "allegro/klej-polemo2-out",
155
+ "training_split": "train",
156
+ "validation_split": "validation",
157
+ "test_split": "test",
158
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
159
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
160
+ "doc_to_choice": [
161
+ "Neutralny",
162
+ "Negatywny",
163
+ "Pozytywny",
164
+ "Niejednoznaczny"
165
+ ],
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "acc",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ },
176
+ {
177
+ "metric": "acc_norm",
178
+ "aggregation": "mean",
179
+ "higher_is_better": true
180
+ }
181
+ ],
182
+ "output_type": "multiple_choice",
183
+ "repeats": 1,
184
+ "should_decontaminate": true,
185
+ "doc_to_decontamination_query": "{{sentence}}"
186
+ },
187
+ "polish_8tags_multiple_choice": {
188
+ "task": "polish_8tags_multiple_choice",
189
+ "dataset_path": "djstrong/8tags",
190
+ "training_split": "train",
191
+ "test_split": "test",
192
+ "fewshot_split": "train",
193
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
194
+ "doc_to_target": "{{label|int}}",
195
+ "doc_to_choice": [
196
+ "Film",
197
+ "Historia",
198
+ "Jedzenie",
199
+ "Medycyna",
200
+ "Motoryzacja",
201
+ "Praca",
202
+ "Sport",
203
+ "Technologie"
204
+ ],
205
+ "description": "",
206
+ "target_delimiter": " ",
207
+ "fewshot_delimiter": "\n\n",
208
+ "num_fewshot": 5,
209
+ "metric_list": [
210
+ {
211
+ "metric": "acc",
212
+ "aggregation": "mean",
213
+ "higher_is_better": true
214
+ },
215
+ {
216
+ "metric": "acc_norm",
217
+ "aggregation": "mean",
218
+ "higher_is_better": true
219
+ }
220
+ ],
221
+ "output_type": "multiple_choice",
222
+ "repeats": 1,
223
+ "should_decontaminate": true,
224
+ "doc_to_decontamination_query": "{{sentence}}"
225
+ },
226
+ "polish_dyk_multiple_choice": {
227
+ "task": "polish_dyk_multiple_choice",
228
+ "dataset_path": "allegro/klej-dyk",
229
+ "training_split": "train",
230
+ "test_split": "test",
231
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
232
+ "doc_to_target": "{{target|int}}",
233
+ "doc_to_choice": [
234
+ "Nie",
235
+ "Tak"
236
+ ],
237
+ "description": "",
238
+ "target_delimiter": " ",
239
+ "fewshot_delimiter": "\n\n",
240
+ "num_fewshot": 5,
241
+ "metric_list": [
242
+ {
243
+ "metric": "acc",
244
+ "aggregation": "mean",
245
+ "higher_is_better": true
246
+ },
247
+ {
248
+ "metric": "acc_norm",
249
+ "aggregation": "mean",
250
+ "higher_is_better": true
251
+ }
252
+ ],
253
+ "output_type": "multiple_choice",
254
+ "repeats": 1,
255
+ "should_decontaminate": true,
256
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
257
+ },
258
+ "polish_ppc_multiple_choice": {
259
+ "task": "polish_ppc_multiple_choice",
260
+ "dataset_path": "djstrong/ppc",
261
+ "training_split": "train",
262
+ "validation_split": "validation",
263
+ "test_split": "test",
264
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
265
+ "doc_to_target": "{{label|int - 1}}",
266
+ "doc_to_choice": [
267
+ "A",
268
+ "B",
269
+ "C"
270
+ ],
271
+ "description": "",
272
+ "target_delimiter": " ",
273
+ "fewshot_delimiter": "\n\n",
274
+ "num_fewshot": 5,
275
+ "metric_list": [
276
+ {
277
+ "metric": "acc",
278
+ "aggregation": "mean",
279
+ "higher_is_better": true
280
+ },
281
+ {
282
+ "metric": "acc_norm",
283
+ "aggregation": "mean",
284
+ "higher_is_better": true
285
+ }
286
+ ],
287
+ "output_type": "multiple_choice",
288
+ "repeats": 1,
289
+ "should_decontaminate": true,
290
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
291
+ },
292
+ "polish_psc_multiple_choice": {
293
+ "task": "polish_psc_multiple_choice",
294
+ "dataset_path": "allegro/klej-psc",
295
+ "training_split": "train",
296
+ "test_split": "test",
297
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
298
+ "doc_to_target": "{{label|int}}",
299
+ "doc_to_choice": [
300
+ "Nie",
301
+ "Tak"
302
+ ],
303
+ "description": "",
304
+ "target_delimiter": " ",
305
+ "fewshot_delimiter": "\n\n",
306
+ "num_fewshot": 5,
307
+ "metric_list": [
308
+ {
309
+ "metric": "acc",
310
+ "aggregation": "mean",
311
+ "higher_is_better": true
312
+ },
313
+ {
314
+ "metric": "acc_norm",
315
+ "aggregation": "mean",
316
+ "higher_is_better": true
317
+ }
318
+ ],
319
+ "output_type": "multiple_choice",
320
+ "repeats": 1,
321
+ "should_decontaminate": true,
322
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
323
+ }
324
+ },
325
+ "versions": {
326
+ "belebele_pol_Latn": 0.0,
327
+ "polemo2_in_multiple_choice": "Yaml",
328
+ "polemo2_out_multiple_choice": "Yaml",
329
+ "polish_8tags_multiple_choice": "Yaml",
330
+ "polish_dyk_multiple_choice": "Yaml",
331
+ "polish_mc": "N/A",
332
+ "polish_ppc_multiple_choice": "Yaml",
333
+ "polish_psc_multiple_choice": "Yaml"
334
+ },
335
+ "n-shot": {
336
+ "belebele_pol_Latn": 5,
337
+ "polemo2_in_multiple_choice": 5,
338
+ "polemo2_out_multiple_choice": 5,
339
+ "polish_8tags_multiple_choice": 5,
340
+ "polish_dyk_multiple_choice": 5,
341
+ "polish_mc": 5,
342
+ "polish_ppc_multiple_choice": 5,
343
+ "polish_psc_multiple_choice": 5
344
+ },
345
+ "config": {
346
+ "model": "hf",
347
+ "model_args": "pretrained=mistralai/Mistral-7B-v0.1,load_in_8bit=True",
348
+ "batch_size": "4",
349
+ "batch_sizes": [],
350
+ "device": "cuda:0",
351
+ "use_cache": null,
352
+ "limit": null,
353
+ "bootstrap_iters": 100000,
354
+ "gen_kwargs": null
355
+ },
356
+ "git_hash": null
357
+ }
polish_benchmarks-out17/results_zephyr-speakleash-007-pl-8192-32-16-0.05-0_polish/results.json ADDED
@@ -0,0 +1,747 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.5150426314064328,
5
+ "acc_stderr,none": 0.02084925895237145,
6
+ "acc_norm,none": 0.51034687546702,
7
+ "acc_norm_stderr,none": 0.01981008430231029,
8
+ "exact_match,score-first": 0.20640208219447245,
9
+ "exact_match_stderr,score-first": 0.16605823463539698,
10
+ "alias": "polish"
11
+ },
12
+ "belebele_pol_Latn": {
13
+ "acc,none": 0.3188888888888889,
14
+ "acc_stderr,none": 0.015543500457982978,
15
+ "acc_norm,none": 0.3188888888888889,
16
+ "acc_norm_stderr,none": 0.015543500457982978,
17
+ "alias": " - belebele_pol_Latn"
18
+ },
19
+ "polemo2_in": {
20
+ "exact_match,score-first": 0.0,
21
+ "exact_match_stderr,score-first": 0.0,
22
+ "alias": " - polemo2_in"
23
+ },
24
+ "polemo2_in_multiple_choice": {
25
+ "acc,none": 0.8088642659279779,
26
+ "acc_stderr,none": 0.014643373399304915,
27
+ "acc_norm,none": 0.721606648199446,
28
+ "acc_norm_stderr,none": 0.01669214160924836,
29
+ "alias": " - polemo2_in_multiple_choice"
30
+ },
31
+ "polemo2_out": {
32
+ "exact_match,score-first": 0.0,
33
+ "exact_match_stderr,score-first": 0.0,
34
+ "alias": " - polemo2_out"
35
+ },
36
+ "polemo2_out_multiple_choice": {
37
+ "acc,none": 0.5890688259109311,
38
+ "acc_stderr,none": 0.02215869118042188,
39
+ "acc_norm,none": 0.7064777327935222,
40
+ "acc_norm_stderr,none": 0.020509085992218148,
41
+ "alias": " - polemo2_out_multiple_choice"
42
+ },
43
+ "polish_8tags_multiple_choice": {
44
+ "acc,none": 0.6301463860933212,
45
+ "acc_stderr,none": 0.007302057610105687,
46
+ "acc_norm,none": 0.62419945105215,
47
+ "acc_norm_stderr,none": 0.007325714564602613,
48
+ "alias": " - polish_8tags_multiple_choice"
49
+ },
50
+ "polish_8tags_regex": {
51
+ "exact_match,score-first": 0.4846752058554437,
52
+ "exact_match_stderr,score-first": 0.007559194460000764,
53
+ "alias": " - polish_8tags_regex"
54
+ },
55
+ "polish_belebele_regex": {
56
+ "exact_match,score-first": 0.3511111111111111,
57
+ "exact_match_stderr,score-first": 0.015919433516195364,
58
+ "alias": " - polish_belebele_regex"
59
+ },
60
+ "polish_dyk_multiple_choice": {
61
+ "acc,none": 0.17103984450923226,
62
+ "acc_stderr,none": 0.011744077740056324,
63
+ "acc_norm,none": 0.17103984450923226,
64
+ "acc_norm_stderr,none": 0.011744077740056324,
65
+ "alias": " - polish_dyk_multiple_choice"
66
+ },
67
+ "polish_dyk_regex": {
68
+ "exact_match,score-first": 0.0,
69
+ "exact_match_stderr,score-first": 0.0,
70
+ "alias": " - polish_dyk_regex"
71
+ },
72
+ "polish_ppc_multiple_choice": {
73
+ "acc,none": 0.365,
74
+ "acc_stderr,none": 0.015231776226264914,
75
+ "acc_norm,none": 0.365,
76
+ "acc_norm_stderr,none": 0.015231776226264914,
77
+ "alias": " - polish_ppc_multiple_choice"
78
+ },
79
+ "polish_ppc_regex": {
80
+ "exact_match,score-first": 0.404,
81
+ "exact_match_stderr,score-first": 0.015524980677122583,
82
+ "alias": " - polish_ppc_regex"
83
+ },
84
+ "polish_psc_multiple_choice": {
85
+ "acc,none": 0.3042671614100185,
86
+ "acc_stderr,none": 0.014019771683156095,
87
+ "acc_norm,none": 0.3042671614100185,
88
+ "acc_norm_stderr,none": 0.014019771683156095,
89
+ "alias": " - polish_psc_multiple_choice"
90
+ },
91
+ "polish_psc_regex": {
92
+ "exact_match,score-first": 0.5111317254174397,
93
+ "exact_match_stderr,score-first": 0.015231914544654893,
94
+ "alias": " - polish_psc_regex"
95
+ }
96
+ },
97
+ "groups": {
98
+ "polish": {
99
+ "acc,none": 0.5150426314064328,
100
+ "acc_stderr,none": 0.02084925895237145,
101
+ "acc_norm,none": 0.51034687546702,
102
+ "acc_norm_stderr,none": 0.01981008430231029,
103
+ "exact_match,score-first": 0.20640208219447245,
104
+ "exact_match_stderr,score-first": 0.16605823463539698,
105
+ "alias": "polish"
106
+ }
107
+ },
108
+ "configs": {
109
+ "belebele_pol_Latn": {
110
+ "task": "belebele_pol_Latn",
111
+ "group": "belebele",
112
+ "dataset_path": "facebook/belebele",
113
+ "test_split": "pol_Latn",
114
+ "fewshot_split": "pol_Latn",
115
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
116
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
117
+ "doc_to_choice": [
118
+ "A",
119
+ "B",
120
+ "C",
121
+ "D"
122
+ ],
123
+ "description": "",
124
+ "target_delimiter": " ",
125
+ "fewshot_delimiter": "\n\n",
126
+ "fewshot_config": {
127
+ "sampler": "first_n"
128
+ },
129
+ "num_fewshot": 0,
130
+ "metric_list": [
131
+ {
132
+ "metric": "acc",
133
+ "aggregation": "mean",
134
+ "higher_is_better": true
135
+ },
136
+ {
137
+ "metric": "acc_norm",
138
+ "aggregation": "mean",
139
+ "higher_is_better": true
140
+ }
141
+ ],
142
+ "output_type": "multiple_choice",
143
+ "repeats": 1,
144
+ "should_decontaminate": true,
145
+ "doc_to_decontamination_query": "{{question}}",
146
+ "metadata": {
147
+ "version": 0.0
148
+ }
149
+ },
150
+ "polemo2_in": {
151
+ "task": "polemo2_in",
152
+ "group": [
153
+ "polemo2"
154
+ ],
155
+ "dataset_path": "allegro/klej-polemo2-in",
156
+ "training_split": "train",
157
+ "validation_split": "validation",
158
+ "test_split": "test",
159
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
160
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
161
+ "description": "",
162
+ "target_delimiter": " ",
163
+ "fewshot_delimiter": "\n\n",
164
+ "num_fewshot": 0,
165
+ "metric_list": [
166
+ {
167
+ "metric": "exact_match",
168
+ "aggregation": "mean",
169
+ "higher_is_better": true
170
+ }
171
+ ],
172
+ "output_type": "generate_until",
173
+ "generation_kwargs": {
174
+ "until": [
175
+ ".",
176
+ ","
177
+ ],
178
+ "do_sample": false,
179
+ "temperature": 0.0,
180
+ "max_gen_toks": 50
181
+ },
182
+ "repeats": 1,
183
+ "filter_list": [
184
+ {
185
+ "name": "score-first",
186
+ "filter": [
187
+ {
188
+ "function": "regex",
189
+ "regex_pattern": "(\\b[ABCD]\\b)"
190
+ },
191
+ {
192
+ "function": "take_first"
193
+ }
194
+ ]
195
+ }
196
+ ],
197
+ "should_decontaminate": true,
198
+ "doc_to_decontamination_query": "{{sentence}}",
199
+ "metadata": {
200
+ "version": 1.0
201
+ }
202
+ },
203
+ "polemo2_in_multiple_choice": {
204
+ "task": "polemo2_in_multiple_choice",
205
+ "group": [
206
+ "polemo2_mc"
207
+ ],
208
+ "dataset_path": "allegro/klej-polemo2-in",
209
+ "training_split": "train",
210
+ "validation_split": "validation",
211
+ "test_split": "test",
212
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
213
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
214
+ "doc_to_choice": [
215
+ "Neutralny",
216
+ "Negatywny",
217
+ "Pozytywny",
218
+ "Niejednoznaczny"
219
+ ],
220
+ "description": "",
221
+ "target_delimiter": " ",
222
+ "fewshot_delimiter": "\n\n",
223
+ "num_fewshot": 0,
224
+ "metric_list": [
225
+ {
226
+ "metric": "acc",
227
+ "aggregation": "mean",
228
+ "higher_is_better": true
229
+ },
230
+ {
231
+ "metric": "acc_norm",
232
+ "aggregation": "mean",
233
+ "higher_is_better": true
234
+ }
235
+ ],
236
+ "output_type": "multiple_choice",
237
+ "repeats": 1,
238
+ "should_decontaminate": true,
239
+ "doc_to_decontamination_query": "{{sentence}}"
240
+ },
241
+ "polemo2_out": {
242
+ "task": "polemo2_out",
243
+ "group": [
244
+ "polemo2"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-out",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
251
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
252
+ "description": "",
253
+ "target_delimiter": " ",
254
+ "fewshot_delimiter": "\n\n",
255
+ "num_fewshot": 0,
256
+ "metric_list": [
257
+ {
258
+ "metric": "exact_match",
259
+ "aggregation": "mean",
260
+ "higher_is_better": true
261
+ }
262
+ ],
263
+ "output_type": "generate_until",
264
+ "generation_kwargs": {
265
+ "until": [
266
+ ".",
267
+ ","
268
+ ],
269
+ "do_sample": false,
270
+ "temperature": 0.0,
271
+ "max_gen_toks": 50
272
+ },
273
+ "repeats": 1,
274
+ "filter_list": [
275
+ {
276
+ "name": "score-first",
277
+ "filter": [
278
+ {
279
+ "function": "regex",
280
+ "regex_pattern": "(\\b[ABCD]\\b)"
281
+ },
282
+ {
283
+ "function": "take_first"
284
+ }
285
+ ]
286
+ }
287
+ ],
288
+ "should_decontaminate": true,
289
+ "doc_to_decontamination_query": "{{sentence}}",
290
+ "metadata": {
291
+ "version": 1.0
292
+ }
293
+ },
294
+ "polemo2_out_multiple_choice": {
295
+ "task": "polemo2_out_multiple_choice",
296
+ "group": [
297
+ "polemo2_mc"
298
+ ],
299
+ "dataset_path": "allegro/klej-polemo2-out",
300
+ "training_split": "train",
301
+ "validation_split": "validation",
302
+ "test_split": "test",
303
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
304
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
305
+ "doc_to_choice": [
306
+ "Neutralny",
307
+ "Negatywny",
308
+ "Pozytywny",
309
+ "Niejednoznaczny"
310
+ ],
311
+ "description": "",
312
+ "target_delimiter": " ",
313
+ "fewshot_delimiter": "\n\n",
314
+ "num_fewshot": 0,
315
+ "metric_list": [
316
+ {
317
+ "metric": "acc",
318
+ "aggregation": "mean",
319
+ "higher_is_better": true
320
+ },
321
+ {
322
+ "metric": "acc_norm",
323
+ "aggregation": "mean",
324
+ "higher_is_better": true
325
+ }
326
+ ],
327
+ "output_type": "multiple_choice",
328
+ "repeats": 1,
329
+ "should_decontaminate": true,
330
+ "doc_to_decontamination_query": "{{sentence}}"
331
+ },
332
+ "polish_8tags_multiple_choice": {
333
+ "task": "polish_8tags_multiple_choice",
334
+ "dataset_path": "djstrong/8tags",
335
+ "training_split": "train",
336
+ "test_split": "test",
337
+ "fewshot_split": "train",
338
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
339
+ "doc_to_target": "{{label|int}}",
340
+ "doc_to_choice": [
341
+ "Film",
342
+ "Historia",
343
+ "Jedzenie",
344
+ "Medycyna",
345
+ "Motoryzacja",
346
+ "Praca",
347
+ "Sport",
348
+ "Technologie"
349
+ ],
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "acc",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ },
360
+ {
361
+ "metric": "acc_norm",
362
+ "aggregation": "mean",
363
+ "higher_is_better": true
364
+ }
365
+ ],
366
+ "output_type": "multiple_choice",
367
+ "repeats": 1,
368
+ "should_decontaminate": true,
369
+ "doc_to_decontamination_query": "{{sentence}}"
370
+ },
371
+ "polish_8tags_regex": {
372
+ "task": "polish_8tags_regex",
373
+ "dataset_path": "sdadas/8tags",
374
+ "training_split": "train",
375
+ "validation_split": "validation",
376
+ "test_split": "test",
377
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
378
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
379
+ "description": "",
380
+ "target_delimiter": " ",
381
+ "fewshot_delimiter": "\n\n",
382
+ "num_fewshot": 0,
383
+ "metric_list": [
384
+ {
385
+ "metric": "exact_match",
386
+ "aggregation": "mean",
387
+ "higher_is_better": true
388
+ }
389
+ ],
390
+ "output_type": "generate_until",
391
+ "generation_kwargs": {
392
+ "until": [
393
+ ".",
394
+ ","
395
+ ],
396
+ "do_sample": false,
397
+ "temperature": 0.0,
398
+ "max_gen_toks": 50
399
+ },
400
+ "repeats": 1,
401
+ "filter_list": [
402
+ {
403
+ "name": "score-first",
404
+ "filter": [
405
+ {
406
+ "function": "regex",
407
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
408
+ },
409
+ {
410
+ "function": "take_first"
411
+ }
412
+ ]
413
+ }
414
+ ],
415
+ "should_decontaminate": true,
416
+ "doc_to_decontamination_query": "{{sentence}}"
417
+ },
418
+ "polish_belebele_regex": {
419
+ "task": "polish_belebele_regex",
420
+ "dataset_path": "facebook/belebele",
421
+ "test_split": "pol_Latn",
422
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
423
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
424
+ "description": "",
425
+ "target_delimiter": " ",
426
+ "fewshot_delimiter": "\n\n",
427
+ "num_fewshot": 0,
428
+ "metric_list": [
429
+ {
430
+ "metric": "exact_match",
431
+ "aggregation": "mean",
432
+ "higher_is_better": true
433
+ }
434
+ ],
435
+ "output_type": "generate_until",
436
+ "generation_kwargs": {
437
+ "until": [
438
+ ".",
439
+ ","
440
+ ],
441
+ "do_sample": false,
442
+ "temperature": 0.0,
443
+ "max_gen_toks": 50
444
+ },
445
+ "repeats": 1,
446
+ "filter_list": [
447
+ {
448
+ "name": "score-first",
449
+ "filter": [
450
+ {
451
+ "function": "regex",
452
+ "regex_pattern": "(\\b[ABCD]\\b)"
453
+ },
454
+ {
455
+ "function": "take_first"
456
+ }
457
+ ]
458
+ }
459
+ ],
460
+ "should_decontaminate": true,
461
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
462
+ },
463
+ "polish_dyk_multiple_choice": {
464
+ "task": "polish_dyk_multiple_choice",
465
+ "dataset_path": "allegro/klej-dyk",
466
+ "training_split": "train",
467
+ "test_split": "test",
468
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
469
+ "doc_to_target": "{{target|int}}",
470
+ "doc_to_choice": [
471
+ "Nie",
472
+ "Tak"
473
+ ],
474
+ "description": "",
475
+ "target_delimiter": " ",
476
+ "fewshot_delimiter": "\n\n",
477
+ "num_fewshot": 0,
478
+ "metric_list": [
479
+ {
480
+ "metric": "acc",
481
+ "aggregation": "mean",
482
+ "higher_is_better": true
483
+ },
484
+ {
485
+ "metric": "acc_norm",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": true,
493
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
494
+ },
495
+ "polish_dyk_regex": {
496
+ "task": "polish_dyk_regex",
497
+ "dataset_path": "allegro/klej-dyk",
498
+ "training_split": "train",
499
+ "test_split": "test",
500
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
501
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
502
+ "description": "",
503
+ "target_delimiter": " ",
504
+ "fewshot_delimiter": "\n\n",
505
+ "num_fewshot": 0,
506
+ "metric_list": [
507
+ {
508
+ "metric": "exact_match",
509
+ "aggregation": "mean",
510
+ "higher_is_better": true
511
+ }
512
+ ],
513
+ "output_type": "generate_until",
514
+ "generation_kwargs": {
515
+ "until": [
516
+ ".",
517
+ ","
518
+ ],
519
+ "do_sample": false,
520
+ "temperature": 0.0,
521
+ "max_gen_toks": 50
522
+ },
523
+ "repeats": 1,
524
+ "filter_list": [
525
+ {
526
+ "name": "score-first",
527
+ "filter": [
528
+ {
529
+ "function": "regex",
530
+ "regex_pattern": "(\\b[ABCD]\\b)"
531
+ },
532
+ {
533
+ "function": "take_first"
534
+ }
535
+ ]
536
+ }
537
+ ],
538
+ "should_decontaminate": true,
539
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
540
+ },
541
+ "polish_ppc_multiple_choice": {
542
+ "task": "polish_ppc_multiple_choice",
543
+ "dataset_path": "djstrong/ppc",
544
+ "training_split": "train",
545
+ "validation_split": "validation",
546
+ "test_split": "test",
547
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
548
+ "doc_to_target": "{{label|int - 1}}",
549
+ "doc_to_choice": [
550
+ "A",
551
+ "B",
552
+ "C"
553
+ ],
554
+ "description": "",
555
+ "target_delimiter": " ",
556
+ "fewshot_delimiter": "\n\n",
557
+ "num_fewshot": 0,
558
+ "metric_list": [
559
+ {
560
+ "metric": "acc",
561
+ "aggregation": "mean",
562
+ "higher_is_better": true
563
+ },
564
+ {
565
+ "metric": "acc_norm",
566
+ "aggregation": "mean",
567
+ "higher_is_better": true
568
+ }
569
+ ],
570
+ "output_type": "multiple_choice",
571
+ "repeats": 1,
572
+ "should_decontaminate": true,
573
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
574
+ },
575
+ "polish_ppc_regex": {
576
+ "task": "polish_ppc_regex",
577
+ "dataset_path": "sdadas/ppc",
578
+ "training_split": "train",
579
+ "validation_split": "validation",
580
+ "test_split": "test",
581
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
582
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
583
+ "description": "",
584
+ "target_delimiter": " ",
585
+ "fewshot_delimiter": "\n\n",
586
+ "num_fewshot": 0,
587
+ "metric_list": [
588
+ {
589
+ "metric": "exact_match",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "generate_until",
595
+ "generation_kwargs": {
596
+ "until": [
597
+ ".",
598
+ ","
599
+ ],
600
+ "do_sample": false,
601
+ "temperature": 0.0,
602
+ "max_gen_toks": 50
603
+ },
604
+ "repeats": 1,
605
+ "filter_list": [
606
+ {
607
+ "name": "score-first",
608
+ "filter": [
609
+ {
610
+ "function": "regex",
611
+ "regex_pattern": "(\\b[ABCD]\\b)"
612
+ },
613
+ {
614
+ "function": "take_first"
615
+ }
616
+ ]
617
+ }
618
+ ],
619
+ "should_decontaminate": true,
620
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
621
+ },
622
+ "polish_psc_multiple_choice": {
623
+ "task": "polish_psc_multiple_choice",
624
+ "dataset_path": "allegro/klej-psc",
625
+ "training_split": "train",
626
+ "test_split": "test",
627
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
628
+ "doc_to_target": "{{label|int}}",
629
+ "doc_to_choice": [
630
+ "Nie",
631
+ "Tak"
632
+ ],
633
+ "description": "",
634
+ "target_delimiter": " ",
635
+ "fewshot_delimiter": "\n\n",
636
+ "num_fewshot": 0,
637
+ "metric_list": [
638
+ {
639
+ "metric": "acc",
640
+ "aggregation": "mean",
641
+ "higher_is_better": true
642
+ },
643
+ {
644
+ "metric": "acc_norm",
645
+ "aggregation": "mean",
646
+ "higher_is_better": true
647
+ }
648
+ ],
649
+ "output_type": "multiple_choice",
650
+ "repeats": 1,
651
+ "should_decontaminate": true,
652
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
653
+ },
654
+ "polish_psc_regex": {
655
+ "task": "polish_psc_regex",
656
+ "dataset_path": "allegro/klej-psc",
657
+ "training_split": "train",
658
+ "test_split": "test",
659
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
660
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
661
+ "description": "",
662
+ "target_delimiter": " ",
663
+ "fewshot_delimiter": "\n\n",
664
+ "num_fewshot": 0,
665
+ "metric_list": [
666
+ {
667
+ "metric": "exact_match",
668
+ "aggregation": "mean",
669
+ "higher_is_better": true
670
+ }
671
+ ],
672
+ "output_type": "generate_until",
673
+ "generation_kwargs": {
674
+ "until": [
675
+ ".",
676
+ ","
677
+ ],
678
+ "do_sample": false,
679
+ "temperature": 0.0,
680
+ "max_gen_toks": 50
681
+ },
682
+ "repeats": 1,
683
+ "filter_list": [
684
+ {
685
+ "name": "score-first",
686
+ "filter": [
687
+ {
688
+ "function": "regex",
689
+ "regex_pattern": "(\\b[ABCD]\\b)"
690
+ },
691
+ {
692
+ "function": "take_first"
693
+ }
694
+ ]
695
+ }
696
+ ],
697
+ "should_decontaminate": true,
698
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
699
+ }
700
+ },
701
+ "versions": {
702
+ "belebele_pol_Latn": 0.0,
703
+ "polemo2_in": 1.0,
704
+ "polemo2_in_multiple_choice": "Yaml",
705
+ "polemo2_out": 1.0,
706
+ "polemo2_out_multiple_choice": "Yaml",
707
+ "polish": "N/A",
708
+ "polish_8tags_multiple_choice": "Yaml",
709
+ "polish_8tags_regex": "Yaml",
710
+ "polish_belebele_regex": "Yaml",
711
+ "polish_dyk_multiple_choice": "Yaml",
712
+ "polish_dyk_regex": "Yaml",
713
+ "polish_ppc_multiple_choice": "Yaml",
714
+ "polish_ppc_regex": "Yaml",
715
+ "polish_psc_multiple_choice": "Yaml",
716
+ "polish_psc_regex": "Yaml"
717
+ },
718
+ "n-shot": {
719
+ "belebele_pol_Latn": 0,
720
+ "polemo2_in": 0,
721
+ "polemo2_in_multiple_choice": 0,
722
+ "polemo2_out": 0,
723
+ "polemo2_out_multiple_choice": 0,
724
+ "polish": 0,
725
+ "polish_8tags_multiple_choice": 0,
726
+ "polish_8tags_regex": 0,
727
+ "polish_belebele_regex": 0,
728
+ "polish_dyk_multiple_choice": 0,
729
+ "polish_dyk_regex": 0,
730
+ "polish_ppc_multiple_choice": 0,
731
+ "polish_ppc_regex": 0,
732
+ "polish_psc_multiple_choice": 0,
733
+ "polish_psc_regex": 0
734
+ },
735
+ "config": {
736
+ "model": "hf",
737
+ "model_args": "pretrained=Nondzu/zephyr-speakleash-007-pl-8192-32-16-0.05",
738
+ "batch_size": "8",
739
+ "batch_sizes": [],
740
+ "device": "cuda:0",
741
+ "use_cache": null,
742
+ "limit": null,
743
+ "bootstrap_iters": 100000,
744
+ "gen_kwargs": null
745
+ },
746
+ "git_hash": null
747
+ }
polish_benchmarks-out17/results_zephyr-speakleash-007-pl-8192-32-16-0.05-5_polish_generate/results.json ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_generate": {
4
+ "exact_match,score-first": 0.6150078165711308,
5
+ "exact_match_stderr,score-first": 0.1279880115837021,
6
+ "alias": "polish_generate"
7
+ },
8
+ "polemo2_in": {
9
+ "exact_match,score-first": 0.7936288088642659,
10
+ "exact_match_stderr,score-first": 0.015071817101529623,
11
+ "alias": " - polemo2_in"
12
+ },
13
+ "polemo2_out": {
14
+ "exact_match,score-first": 0.6821862348178138,
15
+ "exact_match_stderr,score-first": 0.02097076956508622,
16
+ "alias": " - polemo2_out"
17
+ },
18
+ "polish_8tags_regex": {
19
+ "exact_match,score-first": 0.6626258005489478,
20
+ "exact_match_stderr,score-first": 0.007151542126842266,
21
+ "alias": " - polish_8tags_regex"
22
+ },
23
+ "polish_belebele_regex": {
24
+ "exact_match,score-first": 0.6377777777777778,
25
+ "exact_match_stderr,score-first": 0.01603032734626064,
26
+ "alias": " - polish_belebele_regex"
27
+ },
28
+ "polish_dyk_regex": {
29
+ "exact_match,score-first": 0.640427599611273,
30
+ "exact_match_stderr,score-first": 0.014966892427527188,
31
+ "alias": " - polish_dyk_regex"
32
+ },
33
+ "polish_ppc_regex": {
34
+ "exact_match,score-first": 0.626,
35
+ "exact_match_stderr,score-first": 0.01530876736900636,
36
+ "alias": " - polish_ppc_regex"
37
+ },
38
+ "polish_psc_regex": {
39
+ "exact_match,score-first": 0.21799628942486085,
40
+ "exact_match_stderr,score-first": 0.01258117218020259,
41
+ "alias": " - polish_psc_regex"
42
+ }
43
+ },
44
+ "groups": {
45
+ "polish_generate": {
46
+ "exact_match,score-first": 0.6150078165711308,
47
+ "exact_match_stderr,score-first": 0.1279880115837021,
48
+ "alias": "polish_generate"
49
+ }
50
+ },
51
+ "configs": {
52
+ "polemo2_in": {
53
+ "task": "polemo2_in",
54
+ "group": [
55
+ "polemo2"
56
+ ],
57
+ "dataset_path": "allegro/klej-polemo2-in",
58
+ "training_split": "train",
59
+ "validation_split": "validation",
60
+ "test_split": "test",
61
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
62
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
63
+ "description": "",
64
+ "target_delimiter": " ",
65
+ "fewshot_delimiter": "\n\n",
66
+ "num_fewshot": 5,
67
+ "metric_list": [
68
+ {
69
+ "metric": "exact_match",
70
+ "aggregation": "mean",
71
+ "higher_is_better": true
72
+ }
73
+ ],
74
+ "output_type": "generate_until",
75
+ "generation_kwargs": {
76
+ "until": [
77
+ ".",
78
+ ","
79
+ ],
80
+ "do_sample": false,
81
+ "temperature": 0.0,
82
+ "max_gen_toks": 50
83
+ },
84
+ "repeats": 1,
85
+ "filter_list": [
86
+ {
87
+ "name": "score-first",
88
+ "filter": [
89
+ {
90
+ "function": "regex",
91
+ "regex_pattern": "(\\b[ABCD]\\b)"
92
+ },
93
+ {
94
+ "function": "take_first"
95
+ }
96
+ ]
97
+ }
98
+ ],
99
+ "should_decontaminate": true,
100
+ "doc_to_decontamination_query": "{{sentence}}",
101
+ "metadata": {
102
+ "version": 1.0
103
+ }
104
+ },
105
+ "polemo2_out": {
106
+ "task": "polemo2_out",
107
+ "group": [
108
+ "polemo2"
109
+ ],
110
+ "dataset_path": "allegro/klej-polemo2-out",
111
+ "training_split": "train",
112
+ "validation_split": "validation",
113
+ "test_split": "test",
114
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
115
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
116
+ "description": "",
117
+ "target_delimiter": " ",
118
+ "fewshot_delimiter": "\n\n",
119
+ "num_fewshot": 5,
120
+ "metric_list": [
121
+ {
122
+ "metric": "exact_match",
123
+ "aggregation": "mean",
124
+ "higher_is_better": true
125
+ }
126
+ ],
127
+ "output_type": "generate_until",
128
+ "generation_kwargs": {
129
+ "until": [
130
+ ".",
131
+ ","
132
+ ],
133
+ "do_sample": false,
134
+ "temperature": 0.0,
135
+ "max_gen_toks": 50
136
+ },
137
+ "repeats": 1,
138
+ "filter_list": [
139
+ {
140
+ "name": "score-first",
141
+ "filter": [
142
+ {
143
+ "function": "regex",
144
+ "regex_pattern": "(\\b[ABCD]\\b)"
145
+ },
146
+ {
147
+ "function": "take_first"
148
+ }
149
+ ]
150
+ }
151
+ ],
152
+ "should_decontaminate": true,
153
+ "doc_to_decontamination_query": "{{sentence}}",
154
+ "metadata": {
155
+ "version": 1.0
156
+ }
157
+ },
158
+ "polish_8tags_regex": {
159
+ "task": "polish_8tags_regex",
160
+ "dataset_path": "sdadas/8tags",
161
+ "training_split": "train",
162
+ "validation_split": "validation",
163
+ "test_split": "test",
164
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
165
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "exact_match",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ }
176
+ ],
177
+ "output_type": "generate_until",
178
+ "generation_kwargs": {
179
+ "until": [
180
+ ".",
181
+ ","
182
+ ],
183
+ "do_sample": false,
184
+ "temperature": 0.0,
185
+ "max_gen_toks": 50
186
+ },
187
+ "repeats": 1,
188
+ "filter_list": [
189
+ {
190
+ "name": "score-first",
191
+ "filter": [
192
+ {
193
+ "function": "regex",
194
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
195
+ },
196
+ {
197
+ "function": "take_first"
198
+ }
199
+ ]
200
+ }
201
+ ],
202
+ "should_decontaminate": true,
203
+ "doc_to_decontamination_query": "{{sentence}}"
204
+ },
205
+ "polish_belebele_regex": {
206
+ "task": "polish_belebele_regex",
207
+ "dataset_path": "facebook/belebele",
208
+ "test_split": "pol_Latn",
209
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
210
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
211
+ "description": "",
212
+ "target_delimiter": " ",
213
+ "fewshot_delimiter": "\n\n",
214
+ "num_fewshot": 5,
215
+ "metric_list": [
216
+ {
217
+ "metric": "exact_match",
218
+ "aggregation": "mean",
219
+ "higher_is_better": true
220
+ }
221
+ ],
222
+ "output_type": "generate_until",
223
+ "generation_kwargs": {
224
+ "until": [
225
+ ".",
226
+ ","
227
+ ],
228
+ "do_sample": false,
229
+ "temperature": 0.0,
230
+ "max_gen_toks": 50
231
+ },
232
+ "repeats": 1,
233
+ "filter_list": [
234
+ {
235
+ "name": "score-first",
236
+ "filter": [
237
+ {
238
+ "function": "regex",
239
+ "regex_pattern": "(\\b[ABCD]\\b)"
240
+ },
241
+ {
242
+ "function": "take_first"
243
+ }
244
+ ]
245
+ }
246
+ ],
247
+ "should_decontaminate": true,
248
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
249
+ },
250
+ "polish_dyk_regex": {
251
+ "task": "polish_dyk_regex",
252
+ "dataset_path": "allegro/klej-dyk",
253
+ "training_split": "train",
254
+ "test_split": "test",
255
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
256
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
257
+ "description": "",
258
+ "target_delimiter": " ",
259
+ "fewshot_delimiter": "\n\n",
260
+ "num_fewshot": 5,
261
+ "metric_list": [
262
+ {
263
+ "metric": "exact_match",
264
+ "aggregation": "mean",
265
+ "higher_is_better": true
266
+ }
267
+ ],
268
+ "output_type": "generate_until",
269
+ "generation_kwargs": {
270
+ "until": [
271
+ ".",
272
+ ","
273
+ ],
274
+ "do_sample": false,
275
+ "temperature": 0.0,
276
+ "max_gen_toks": 50
277
+ },
278
+ "repeats": 1,
279
+ "filter_list": [
280
+ {
281
+ "name": "score-first",
282
+ "filter": [
283
+ {
284
+ "function": "regex",
285
+ "regex_pattern": "(\\b[ABCD]\\b)"
286
+ },
287
+ {
288
+ "function": "take_first"
289
+ }
290
+ ]
291
+ }
292
+ ],
293
+ "should_decontaminate": true,
294
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
295
+ },
296
+ "polish_ppc_regex": {
297
+ "task": "polish_ppc_regex",
298
+ "dataset_path": "sdadas/ppc",
299
+ "training_split": "train",
300
+ "validation_split": "validation",
301
+ "test_split": "test",
302
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
303
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
304
+ "description": "",
305
+ "target_delimiter": " ",
306
+ "fewshot_delimiter": "\n\n",
307
+ "num_fewshot": 5,
308
+ "metric_list": [
309
+ {
310
+ "metric": "exact_match",
311
+ "aggregation": "mean",
312
+ "higher_is_better": true
313
+ }
314
+ ],
315
+ "output_type": "generate_until",
316
+ "generation_kwargs": {
317
+ "until": [
318
+ ".",
319
+ ","
320
+ ],
321
+ "do_sample": false,
322
+ "temperature": 0.0,
323
+ "max_gen_toks": 50
324
+ },
325
+ "repeats": 1,
326
+ "filter_list": [
327
+ {
328
+ "name": "score-first",
329
+ "filter": [
330
+ {
331
+ "function": "regex",
332
+ "regex_pattern": "(\\b[ABCD]\\b)"
333
+ },
334
+ {
335
+ "function": "take_first"
336
+ }
337
+ ]
338
+ }
339
+ ],
340
+ "should_decontaminate": true,
341
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
342
+ },
343
+ "polish_psc_regex": {
344
+ "task": "polish_psc_regex",
345
+ "dataset_path": "allegro/klej-psc",
346
+ "training_split": "train",
347
+ "test_split": "test",
348
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
349
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 5,
354
+ "metric_list": [
355
+ {
356
+ "metric": "exact_match",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ }
360
+ ],
361
+ "output_type": "generate_until",
362
+ "generation_kwargs": {
363
+ "until": [
364
+ ".",
365
+ ","
366
+ ],
367
+ "do_sample": false,
368
+ "temperature": 0.0,
369
+ "max_gen_toks": 50
370
+ },
371
+ "repeats": 1,
372
+ "filter_list": [
373
+ {
374
+ "name": "score-first",
375
+ "filter": [
376
+ {
377
+ "function": "regex",
378
+ "regex_pattern": "(\\b[ABCD]\\b)"
379
+ },
380
+ {
381
+ "function": "take_first"
382
+ }
383
+ ]
384
+ }
385
+ ],
386
+ "should_decontaminate": true,
387
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
388
+ }
389
+ },
390
+ "versions": {
391
+ "polemo2_in": 1.0,
392
+ "polemo2_out": 1.0,
393
+ "polish_8tags_regex": "Yaml",
394
+ "polish_belebele_regex": "Yaml",
395
+ "polish_dyk_regex": "Yaml",
396
+ "polish_generate": "N/A",
397
+ "polish_ppc_regex": "Yaml",
398
+ "polish_psc_regex": "Yaml"
399
+ },
400
+ "n-shot": {
401
+ "polemo2_in": 5,
402
+ "polemo2_out": 5,
403
+ "polish_8tags_regex": 5,
404
+ "polish_belebele_regex": 5,
405
+ "polish_dyk_regex": 5,
406
+ "polish_generate": 5,
407
+ "polish_ppc_regex": 5,
408
+ "polish_psc_regex": 5
409
+ },
410
+ "config": {
411
+ "model": "hf",
412
+ "model_args": "pretrained=Nondzu/zephyr-speakleash-007-pl-8192-32-16-0.05",
413
+ "batch_size": "4",
414
+ "batch_sizes": [],
415
+ "device": "cuda:0",
416
+ "use_cache": null,
417
+ "limit": null,
418
+ "bootstrap_iters": 100000,
419
+ "gen_kwargs": null
420
+ },
421
+ "git_hash": null
422
+ }
polish_benchmarks-out17/results_zephyr-speakleash-007-pl-8192-32-16-0.05-5_polish_mc/results.json ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_mc": {
4
+ "acc,none": 0.7073475768629495,
5
+ "acc_stderr,none": 0.05030717028335053,
6
+ "acc_norm,none": 0.7093277748827514,
7
+ "acc_norm_stderr,none": 0.049301586343680995,
8
+ "alias": "polish_mc"
9
+ },
10
+ "belebele_pol_Latn": {
11
+ "acc,none": 0.54,
12
+ "acc_stderr,none": 0.016622485006352963,
13
+ "acc_norm,none": 0.54,
14
+ "acc_norm_stderr,none": 0.016622485006352963,
15
+ "alias": " - belebele_pol_Latn"
16
+ },
17
+ "polemo2_in_multiple_choice": {
18
+ "acc,none": 0.832409972299169,
19
+ "acc_stderr,none": 0.013909934700735784,
20
+ "acc_norm,none": 0.8116343490304709,
21
+ "acc_norm_stderr,none": 0.0145617453584378,
22
+ "alias": " - polemo2_in_multiple_choice"
23
+ },
24
+ "polemo2_out_multiple_choice": {
25
+ "acc,none": 0.6761133603238867,
26
+ "acc_stderr,none": 0.021075739218382666,
27
+ "acc_norm,none": 0.7327935222672065,
28
+ "acc_norm_stderr,none": 0.019929244821849684,
29
+ "alias": " - polemo2_out_multiple_choice"
30
+ },
31
+ "polish_8tags_multiple_choice": {
32
+ "acc,none": 0.692131747483989,
33
+ "acc_stderr,none": 0.006982105958572425,
34
+ "acc_norm,none": 0.6935041171088746,
35
+ "acc_norm_stderr,none": 0.00697342992568039,
36
+ "alias": " - polish_8tags_multiple_choice"
37
+ },
38
+ "polish_dyk_multiple_choice": {
39
+ "acc,none": 0.7862001943634597,
40
+ "acc_stderr,none": 0.012787147332879332,
41
+ "acc_norm,none": 0.7862001943634597,
42
+ "acc_norm_stderr,none": 0.012787147332879332,
43
+ "alias": " - polish_dyk_multiple_choice"
44
+ },
45
+ "polish_ppc_multiple_choice": {
46
+ "acc,none": 0.687,
47
+ "acc_stderr,none": 0.014671272822977883,
48
+ "acc_norm,none": 0.687,
49
+ "acc_norm_stderr,none": 0.014671272822977883,
50
+ "alias": " - polish_ppc_multiple_choice"
51
+ },
52
+ "polish_psc_multiple_choice": {
53
+ "acc,none": 0.7829313543599258,
54
+ "acc_stderr,none": 0.012561819232608731,
55
+ "acc_norm,none": 0.7829313543599258,
56
+ "acc_norm_stderr,none": 0.012561819232608731,
57
+ "alias": " - polish_psc_multiple_choice"
58
+ }
59
+ },
60
+ "groups": {
61
+ "polish_mc": {
62
+ "acc,none": 0.7073475768629495,
63
+ "acc_stderr,none": 0.05030717028335053,
64
+ "acc_norm,none": 0.7093277748827514,
65
+ "acc_norm_stderr,none": 0.049301586343680995,
66
+ "alias": "polish_mc"
67
+ }
68
+ },
69
+ "configs": {
70
+ "belebele_pol_Latn": {
71
+ "task": "belebele_pol_Latn",
72
+ "group": "belebele",
73
+ "dataset_path": "facebook/belebele",
74
+ "test_split": "pol_Latn",
75
+ "fewshot_split": "pol_Latn",
76
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
77
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
78
+ "doc_to_choice": [
79
+ "A",
80
+ "B",
81
+ "C",
82
+ "D"
83
+ ],
84
+ "description": "",
85
+ "target_delimiter": " ",
86
+ "fewshot_delimiter": "\n\n",
87
+ "fewshot_config": {
88
+ "sampler": "first_n"
89
+ },
90
+ "num_fewshot": 5,
91
+ "metric_list": [
92
+ {
93
+ "metric": "acc",
94
+ "aggregation": "mean",
95
+ "higher_is_better": true
96
+ },
97
+ {
98
+ "metric": "acc_norm",
99
+ "aggregation": "mean",
100
+ "higher_is_better": true
101
+ }
102
+ ],
103
+ "output_type": "multiple_choice",
104
+ "repeats": 1,
105
+ "should_decontaminate": true,
106
+ "doc_to_decontamination_query": "{{question}}",
107
+ "metadata": {
108
+ "version": 0.0
109
+ }
110
+ },
111
+ "polemo2_in_multiple_choice": {
112
+ "task": "polemo2_in_multiple_choice",
113
+ "group": [
114
+ "polemo2_mc"
115
+ ],
116
+ "dataset_path": "allegro/klej-polemo2-in",
117
+ "training_split": "train",
118
+ "validation_split": "validation",
119
+ "test_split": "test",
120
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
121
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
122
+ "doc_to_choice": [
123
+ "Neutralny",
124
+ "Negatywny",
125
+ "Pozytywny",
126
+ "Niejednoznaczny"
127
+ ],
128
+ "description": "",
129
+ "target_delimiter": " ",
130
+ "fewshot_delimiter": "\n\n",
131
+ "num_fewshot": 5,
132
+ "metric_list": [
133
+ {
134
+ "metric": "acc",
135
+ "aggregation": "mean",
136
+ "higher_is_better": true
137
+ },
138
+ {
139
+ "metric": "acc_norm",
140
+ "aggregation": "mean",
141
+ "higher_is_better": true
142
+ }
143
+ ],
144
+ "output_type": "multiple_choice",
145
+ "repeats": 1,
146
+ "should_decontaminate": true,
147
+ "doc_to_decontamination_query": "{{sentence}}"
148
+ },
149
+ "polemo2_out_multiple_choice": {
150
+ "task": "polemo2_out_multiple_choice",
151
+ "group": [
152
+ "polemo2_mc"
153
+ ],
154
+ "dataset_path": "allegro/klej-polemo2-out",
155
+ "training_split": "train",
156
+ "validation_split": "validation",
157
+ "test_split": "test",
158
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
159
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
160
+ "doc_to_choice": [
161
+ "Neutralny",
162
+ "Negatywny",
163
+ "Pozytywny",
164
+ "Niejednoznaczny"
165
+ ],
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 5,
170
+ "metric_list": [
171
+ {
172
+ "metric": "acc",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ },
176
+ {
177
+ "metric": "acc_norm",
178
+ "aggregation": "mean",
179
+ "higher_is_better": true
180
+ }
181
+ ],
182
+ "output_type": "multiple_choice",
183
+ "repeats": 1,
184
+ "should_decontaminate": true,
185
+ "doc_to_decontamination_query": "{{sentence}}"
186
+ },
187
+ "polish_8tags_multiple_choice": {
188
+ "task": "polish_8tags_multiple_choice",
189
+ "dataset_path": "djstrong/8tags",
190
+ "training_split": "train",
191
+ "test_split": "test",
192
+ "fewshot_split": "train",
193
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
194
+ "doc_to_target": "{{label|int}}",
195
+ "doc_to_choice": [
196
+ "Film",
197
+ "Historia",
198
+ "Jedzenie",
199
+ "Medycyna",
200
+ "Motoryzacja",
201
+ "Praca",
202
+ "Sport",
203
+ "Technologie"
204
+ ],
205
+ "description": "",
206
+ "target_delimiter": " ",
207
+ "fewshot_delimiter": "\n\n",
208
+ "num_fewshot": 5,
209
+ "metric_list": [
210
+ {
211
+ "metric": "acc",
212
+ "aggregation": "mean",
213
+ "higher_is_better": true
214
+ },
215
+ {
216
+ "metric": "acc_norm",
217
+ "aggregation": "mean",
218
+ "higher_is_better": true
219
+ }
220
+ ],
221
+ "output_type": "multiple_choice",
222
+ "repeats": 1,
223
+ "should_decontaminate": true,
224
+ "doc_to_decontamination_query": "{{sentence}}"
225
+ },
226
+ "polish_dyk_multiple_choice": {
227
+ "task": "polish_dyk_multiple_choice",
228
+ "dataset_path": "allegro/klej-dyk",
229
+ "training_split": "train",
230
+ "test_split": "test",
231
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
232
+ "doc_to_target": "{{target|int}}",
233
+ "doc_to_choice": [
234
+ "Nie",
235
+ "Tak"
236
+ ],
237
+ "description": "",
238
+ "target_delimiter": " ",
239
+ "fewshot_delimiter": "\n\n",
240
+ "num_fewshot": 5,
241
+ "metric_list": [
242
+ {
243
+ "metric": "acc",
244
+ "aggregation": "mean",
245
+ "higher_is_better": true
246
+ },
247
+ {
248
+ "metric": "acc_norm",
249
+ "aggregation": "mean",
250
+ "higher_is_better": true
251
+ }
252
+ ],
253
+ "output_type": "multiple_choice",
254
+ "repeats": 1,
255
+ "should_decontaminate": true,
256
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
257
+ },
258
+ "polish_ppc_multiple_choice": {
259
+ "task": "polish_ppc_multiple_choice",
260
+ "dataset_path": "djstrong/ppc",
261
+ "training_split": "train",
262
+ "validation_split": "validation",
263
+ "test_split": "test",
264
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
265
+ "doc_to_target": "{{label|int - 1}}",
266
+ "doc_to_choice": [
267
+ "A",
268
+ "B",
269
+ "C"
270
+ ],
271
+ "description": "",
272
+ "target_delimiter": " ",
273
+ "fewshot_delimiter": "\n\n",
274
+ "num_fewshot": 5,
275
+ "metric_list": [
276
+ {
277
+ "metric": "acc",
278
+ "aggregation": "mean",
279
+ "higher_is_better": true
280
+ },
281
+ {
282
+ "metric": "acc_norm",
283
+ "aggregation": "mean",
284
+ "higher_is_better": true
285
+ }
286
+ ],
287
+ "output_type": "multiple_choice",
288
+ "repeats": 1,
289
+ "should_decontaminate": true,
290
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
291
+ },
292
+ "polish_psc_multiple_choice": {
293
+ "task": "polish_psc_multiple_choice",
294
+ "dataset_path": "allegro/klej-psc",
295
+ "training_split": "train",
296
+ "test_split": "test",
297
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
298
+ "doc_to_target": "{{label|int}}",
299
+ "doc_to_choice": [
300
+ "Nie",
301
+ "Tak"
302
+ ],
303
+ "description": "",
304
+ "target_delimiter": " ",
305
+ "fewshot_delimiter": "\n\n",
306
+ "num_fewshot": 5,
307
+ "metric_list": [
308
+ {
309
+ "metric": "acc",
310
+ "aggregation": "mean",
311
+ "higher_is_better": true
312
+ },
313
+ {
314
+ "metric": "acc_norm",
315
+ "aggregation": "mean",
316
+ "higher_is_better": true
317
+ }
318
+ ],
319
+ "output_type": "multiple_choice",
320
+ "repeats": 1,
321
+ "should_decontaminate": true,
322
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
323
+ }
324
+ },
325
+ "versions": {
326
+ "belebele_pol_Latn": 0.0,
327
+ "polemo2_in_multiple_choice": "Yaml",
328
+ "polemo2_out_multiple_choice": "Yaml",
329
+ "polish_8tags_multiple_choice": "Yaml",
330
+ "polish_dyk_multiple_choice": "Yaml",
331
+ "polish_mc": "N/A",
332
+ "polish_ppc_multiple_choice": "Yaml",
333
+ "polish_psc_multiple_choice": "Yaml"
334
+ },
335
+ "n-shot": {
336
+ "belebele_pol_Latn": 5,
337
+ "polemo2_in_multiple_choice": 5,
338
+ "polemo2_out_multiple_choice": 5,
339
+ "polish_8tags_multiple_choice": 5,
340
+ "polish_dyk_multiple_choice": 5,
341
+ "polish_mc": 5,
342
+ "polish_ppc_multiple_choice": 5,
343
+ "polish_psc_multiple_choice": 5
344
+ },
345
+ "config": {
346
+ "model": "hf",
347
+ "model_args": "pretrained=Nondzu/zephyr-speakleash-007-pl-8192-32-16-0.05",
348
+ "batch_size": "4",
349
+ "batch_sizes": [],
350
+ "device": "cuda:0",
351
+ "use_cache": null,
352
+ "limit": null,
353
+ "bootstrap_iters": 100000,
354
+ "gen_kwargs": null
355
+ },
356
+ "git_hash": null
357
+ }
polish_benchmarks-out2/results_APT3-1B-Base-0_polish/results.json ADDED
@@ -0,0 +1,911 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.2469516433240102,
5
+ "acc_stderr,none": 0.002521729293912833,
6
+ "acc_norm,none": 0.18576755323628613,
7
+ "acc_norm_stderr,none": 0.005694795296320046,
8
+ "f1,score-first": 0.045672385611171505,
9
+ "f1_stderr,score-first": 0.024089620479639893,
10
+ "accuracy,score-first": 0.045672385611171505,
11
+ "accuracy_stderr,score-first": 0.024089620479639893,
12
+ "alias": "polish"
13
+ },
14
+ "belebele_pol_Latn": {
15
+ "acc,none": 0.22111111111111112,
16
+ "acc_stderr,none": 0.013840863699859518,
17
+ "acc_norm,none": 0.22111111111111112,
18
+ "acc_norm_stderr,none": 0.013840863699859518,
19
+ "alias": " - belebele_pol_Latn"
20
+ },
21
+ "polemo2_in": {
22
+ "f1,score-first": 0.03185595567867036,
23
+ "f1_stderr,score-first": 0.006540300095172154,
24
+ "accuracy,score-first": 0.03185595567867036,
25
+ "accuracy_stderr,score-first": 0.006540300095172154,
26
+ "alias": " - polemo2_in"
27
+ },
28
+ "polemo2_in_multiple_choice": {
29
+ "acc,none": 0.3060941828254848,
30
+ "acc_stderr,none": 0.017163670550803943,
31
+ "acc_norm,none": 0.16204986149584488,
32
+ "acc_norm_stderr,none": 0.01372352849077817,
33
+ "alias": " - polemo2_in_multiple_choice"
34
+ },
35
+ "polemo2_out": {
36
+ "f1,score-first": 0.006072874493927126,
37
+ "f1_stderr,score-first": 0.003499056577953027,
38
+ "accuracy,score-first": 0.006072874493927126,
39
+ "accuracy_stderr,score-first": 0.003499056577953027,
40
+ "alias": " - polemo2_out"
41
+ },
42
+ "polemo2_out_multiple_choice": {
43
+ "acc,none": 0.31781376518218624,
44
+ "acc_stderr,none": 0.020970769565086228,
45
+ "acc_norm,none": 0.35020242914979755,
46
+ "acc_norm_stderr,none": 0.021484495459914724,
47
+ "alias": " - polemo2_out_multiple_choice"
48
+ },
49
+ "polish_8tags_multiple_choice": {
50
+ "acc,none": 0.21294602012808783,
51
+ "acc_stderr,none": 0.006192223457856202,
52
+ "acc_norm,none": 0.10612991765782251,
53
+ "acc_norm_stderr,none": 0.004658710368821291,
54
+ "alias": " - polish_8tags_multiple_choice"
55
+ },
56
+ "polish_8tags_regex": {
57
+ "f1,score-first": 0.08828911253430924,
58
+ "f1_stderr,score-first": 0.0042913283228207064,
59
+ "accuracy,score-first": 0.08828911253430924,
60
+ "accuracy_stderr,score-first": 0.0042913283228207064,
61
+ "alias": " - polish_8tags_regex"
62
+ },
63
+ "polish_belebele_regex": {
64
+ "f1,score-first": 0.06333333333333334,
65
+ "f1_stderr,score-first": 0.008123232055260209,
66
+ "accuracy,score-first": 0.06333333333333334,
67
+ "accuracy_stderr,score-first": 0.008123232055260209,
68
+ "alias": " - polish_belebele_regex"
69
+ },
70
+ "polish_dyk_multiple_choice": {
71
+ "acc,none": 0.2196307094266278,
72
+ "acc_stderr,none": 0.01291219452361766,
73
+ "acc_norm,none": 0.2196307094266278,
74
+ "acc_norm_stderr,none": 0.01291219452361766,
75
+ "alias": " - polish_dyk_multiple_choice"
76
+ },
77
+ "polish_dyk_regex": {
78
+ "f1,score-first": 0.0,
79
+ "f1_stderr,score-first": 0.0,
80
+ "accuracy,score-first": 0.0,
81
+ "accuracy_stderr,score-first": 0.0,
82
+ "alias": " - polish_dyk_regex"
83
+ },
84
+ "polish_ppc_multiple_choice": {
85
+ "acc,none": 0.419,
86
+ "acc_stderr,none": 0.015610338967577797,
87
+ "acc_norm,none": 0.419,
88
+ "acc_norm_stderr,none": 0.015610338967577797,
89
+ "alias": " - polish_ppc_multiple_choice"
90
+ },
91
+ "polish_ppc_regex": {
92
+ "f1,score-first": 0.003,
93
+ "f1_stderr,score-first": 0.0017303161543469228,
94
+ "accuracy,score-first": 0.003,
95
+ "accuracy_stderr,score-first": 0.0017303161543469228,
96
+ "alias": " - polish_ppc_regex"
97
+ },
98
+ "polish_psc_multiple_choice": {
99
+ "acc,none": 0.3079777365491651,
100
+ "acc_stderr,none": 0.014067335427669261,
101
+ "acc_norm,none": 0.3079777365491651,
102
+ "acc_norm_stderr,none": 0.014067335427669261,
103
+ "alias": " - polish_psc_multiple_choice"
104
+ },
105
+ "polish_psc_regex": {
106
+ "f1,score-first": 0.06771799628942486,
107
+ "f1_stderr,score-first": 0.0076562807170192275,
108
+ "accuracy,score-first": 0.06771799628942486,
109
+ "accuracy_stderr,score-first": 0.0076562807170192275,
110
+ "alias": " - polish_psc_regex"
111
+ }
112
+ },
113
+ "groups": {
114
+ "polish": {
115
+ "acc,none": 0.2469516433240102,
116
+ "acc_stderr,none": 0.002521729293912833,
117
+ "acc_norm,none": 0.18576755323628613,
118
+ "acc_norm_stderr,none": 0.005694795296320046,
119
+ "f1,score-first": 0.045672385611171505,
120
+ "f1_stderr,score-first": 0.024089620479639893,
121
+ "accuracy,score-first": 0.045672385611171505,
122
+ "accuracy_stderr,score-first": 0.024089620479639893,
123
+ "alias": "polish"
124
+ }
125
+ },
126
+ "configs": {
127
+ "belebele_pol_Latn": {
128
+ "task": "belebele_pol_Latn",
129
+ "group": "belebele",
130
+ "dataset_path": "facebook/belebele",
131
+ "test_split": "pol_Latn",
132
+ "fewshot_split": "pol_Latn",
133
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
134
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
135
+ "doc_to_choice": [
136
+ "A",
137
+ "B",
138
+ "C",
139
+ "D"
140
+ ],
141
+ "description": "",
142
+ "target_delimiter": " ",
143
+ "fewshot_delimiter": "\n\n",
144
+ "fewshot_config": {
145
+ "sampler": "first_n"
146
+ },
147
+ "num_fewshot": 0,
148
+ "metric_list": [
149
+ {
150
+ "metric": "acc",
151
+ "aggregation": "mean",
152
+ "higher_is_better": true
153
+ },
154
+ {
155
+ "metric": "acc_norm",
156
+ "aggregation": "mean",
157
+ "higher_is_better": true
158
+ }
159
+ ],
160
+ "output_type": "multiple_choice",
161
+ "repeats": 1,
162
+ "should_decontaminate": true,
163
+ "doc_to_decontamination_query": "{{question}}",
164
+ "metadata": {
165
+ "version": 0.0
166
+ }
167
+ },
168
+ "polemo2_in": {
169
+ "task": "polemo2_in",
170
+ "group": [
171
+ "polemo2"
172
+ ],
173
+ "dataset_path": "allegro/klej-polemo2-in",
174
+ "training_split": "train",
175
+ "validation_split": "validation",
176
+ "test_split": "test",
177
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
178
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
179
+ "description": "",
180
+ "target_delimiter": " ",
181
+ "fewshot_delimiter": "\n\n",
182
+ "num_fewshot": 0,
183
+ "metric_list": [
184
+ {
185
+ "metric": "f1",
186
+ "aggregation": "mean",
187
+ "higher_is_better": true,
188
+ "hf_evaluate": true,
189
+ "average": "micro"
190
+ },
191
+ {
192
+ "metric": "accuracy",
193
+ "aggregation": "mean",
194
+ "higher_is_better": true
195
+ }
196
+ ],
197
+ "output_type": "generate_until",
198
+ "generation_kwargs": {
199
+ "until": [
200
+ ".",
201
+ ","
202
+ ],
203
+ "do_sample": false,
204
+ "temperature": 0.0,
205
+ "max_gen_toks": 50
206
+ },
207
+ "repeats": 1,
208
+ "filter_list": [
209
+ {
210
+ "name": "score-first",
211
+ "filter": [
212
+ {
213
+ "function": "regex",
214
+ "regex_pattern": "(\\b[ABCD]\\b)"
215
+ },
216
+ {
217
+ "function": "take_first"
218
+ },
219
+ {
220
+ "function": "map",
221
+ "mapping_dict": {
222
+ "A": 0,
223
+ "B": 1,
224
+ "C": 2,
225
+ "D": 3
226
+ },
227
+ "default_value": -1
228
+ },
229
+ {
230
+ "function": "take_first"
231
+ }
232
+ ]
233
+ }
234
+ ],
235
+ "should_decontaminate": true,
236
+ "doc_to_decontamination_query": "{{sentence}}",
237
+ "metadata": {
238
+ "version": 1.0
239
+ }
240
+ },
241
+ "polemo2_in_multiple_choice": {
242
+ "task": "polemo2_in_multiple_choice",
243
+ "group": [
244
+ "polemo2_mc"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-in",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
251
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
252
+ "doc_to_choice": [
253
+ "Neutralny",
254
+ "Negatywny",
255
+ "Pozytywny",
256
+ "Niejednoznaczny"
257
+ ],
258
+ "description": "",
259
+ "target_delimiter": " ",
260
+ "fewshot_delimiter": "\n\n",
261
+ "num_fewshot": 0,
262
+ "metric_list": [
263
+ {
264
+ "metric": "acc",
265
+ "aggregation": "mean",
266
+ "higher_is_better": true
267
+ },
268
+ {
269
+ "metric": "acc_norm",
270
+ "aggregation": "mean",
271
+ "higher_is_better": true
272
+ }
273
+ ],
274
+ "output_type": "multiple_choice",
275
+ "repeats": 1,
276
+ "should_decontaminate": true,
277
+ "doc_to_decontamination_query": "{{sentence}}"
278
+ },
279
+ "polemo2_out": {
280
+ "task": "polemo2_out",
281
+ "group": [
282
+ "polemo2"
283
+ ],
284
+ "dataset_path": "allegro/klej-polemo2-out",
285
+ "training_split": "train",
286
+ "validation_split": "validation",
287
+ "test_split": "test",
288
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
289
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
290
+ "description": "",
291
+ "target_delimiter": " ",
292
+ "fewshot_delimiter": "\n\n",
293
+ "num_fewshot": 0,
294
+ "metric_list": [
295
+ {
296
+ "metric": "f1",
297
+ "aggregation": "mean",
298
+ "higher_is_better": true,
299
+ "hf_evaluate": true,
300
+ "average": "micro"
301
+ },
302
+ {
303
+ "metric": "accuracy",
304
+ "aggregation": "mean",
305
+ "higher_is_better": true
306
+ }
307
+ ],
308
+ "output_type": "generate_until",
309
+ "generation_kwargs": {
310
+ "until": [
311
+ ".",
312
+ ","
313
+ ],
314
+ "do_sample": false,
315
+ "temperature": 0.0,
316
+ "max_gen_toks": 50
317
+ },
318
+ "repeats": 1,
319
+ "filter_list": [
320
+ {
321
+ "name": "score-first",
322
+ "filter": [
323
+ {
324
+ "function": "regex",
325
+ "regex_pattern": "(\\b[ABCD]\\b)"
326
+ },
327
+ {
328
+ "function": "take_first"
329
+ },
330
+ {
331
+ "function": "map",
332
+ "mapping_dict": {
333
+ "A": 0,
334
+ "B": 1,
335
+ "C": 2,
336
+ "D": 3
337
+ },
338
+ "default_value": -1
339
+ },
340
+ {
341
+ "function": "take_first"
342
+ }
343
+ ]
344
+ }
345
+ ],
346
+ "should_decontaminate": true,
347
+ "doc_to_decontamination_query": "{{sentence}}",
348
+ "metadata": {
349
+ "version": 1.0
350
+ }
351
+ },
352
+ "polemo2_out_multiple_choice": {
353
+ "task": "polemo2_out_multiple_choice",
354
+ "group": [
355
+ "polemo2_mc"
356
+ ],
357
+ "dataset_path": "allegro/klej-polemo2-out",
358
+ "training_split": "train",
359
+ "validation_split": "validation",
360
+ "test_split": "test",
361
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
362
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
363
+ "doc_to_choice": [
364
+ "Neutralny",
365
+ "Negatywny",
366
+ "Pozytywny",
367
+ "Niejednoznaczny"
368
+ ],
369
+ "description": "",
370
+ "target_delimiter": " ",
371
+ "fewshot_delimiter": "\n\n",
372
+ "num_fewshot": 0,
373
+ "metric_list": [
374
+ {
375
+ "metric": "acc",
376
+ "aggregation": "mean",
377
+ "higher_is_better": true
378
+ },
379
+ {
380
+ "metric": "acc_norm",
381
+ "aggregation": "mean",
382
+ "higher_is_better": true
383
+ }
384
+ ],
385
+ "output_type": "multiple_choice",
386
+ "repeats": 1,
387
+ "should_decontaminate": true,
388
+ "doc_to_decontamination_query": "{{sentence}}"
389
+ },
390
+ "polish_8tags_multiple_choice": {
391
+ "task": "polish_8tags_multiple_choice",
392
+ "dataset_path": "djstrong/8tags",
393
+ "training_split": "train",
394
+ "test_split": "test",
395
+ "fewshot_split": "train",
396
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
397
+ "doc_to_target": "{{label|int}}",
398
+ "doc_to_choice": [
399
+ "Film",
400
+ "Historia",
401
+ "Jedzenie",
402
+ "Medycyna",
403
+ "Motoryzacja",
404
+ "Praca",
405
+ "Sport",
406
+ "Technologie"
407
+ ],
408
+ "description": "",
409
+ "target_delimiter": " ",
410
+ "fewshot_delimiter": "\n\n",
411
+ "num_fewshot": 0,
412
+ "metric_list": [
413
+ {
414
+ "metric": "acc",
415
+ "aggregation": "mean",
416
+ "higher_is_better": true
417
+ },
418
+ {
419
+ "metric": "acc_norm",
420
+ "aggregation": "mean",
421
+ "higher_is_better": true
422
+ }
423
+ ],
424
+ "output_type": "multiple_choice",
425
+ "repeats": 1,
426
+ "should_decontaminate": true,
427
+ "doc_to_decontamination_query": "{{sentence}}"
428
+ },
429
+ "polish_8tags_regex": {
430
+ "task": "polish_8tags_regex",
431
+ "dataset_path": "djstrong/8tags",
432
+ "training_split": "train",
433
+ "validation_split": "validation",
434
+ "test_split": "test",
435
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
436
+ "doc_to_target": "{{label|int}}",
437
+ "description": "",
438
+ "target_delimiter": " ",
439
+ "fewshot_delimiter": "\n\n",
440
+ "num_fewshot": 0,
441
+ "metric_list": [
442
+ {
443
+ "metric": "f1",
444
+ "aggregation": "mean",
445
+ "higher_is_better": true,
446
+ "hf_evaluate": true,
447
+ "average": "micro"
448
+ },
449
+ {
450
+ "metric": "accuracy",
451
+ "aggregation": "mean",
452
+ "higher_is_better": true
453
+ }
454
+ ],
455
+ "output_type": "generate_until",
456
+ "generation_kwargs": {
457
+ "until": [
458
+ ".",
459
+ ","
460
+ ],
461
+ "do_sample": false,
462
+ "temperature": 0.0,
463
+ "max_gen_toks": 50
464
+ },
465
+ "repeats": 1,
466
+ "filter_list": [
467
+ {
468
+ "name": "score-first",
469
+ "filter": [
470
+ {
471
+ "function": "regex",
472
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
473
+ },
474
+ {
475
+ "function": "take_first"
476
+ },
477
+ {
478
+ "function": "map",
479
+ "mapping_dict": {
480
+ "A": 0,
481
+ "B": 1,
482
+ "C": 2,
483
+ "D": 3,
484
+ "E": 4,
485
+ "F": 5,
486
+ "G": 6,
487
+ "H": 7
488
+ },
489
+ "default_value": -1
490
+ },
491
+ {
492
+ "function": "take_first"
493
+ }
494
+ ]
495
+ }
496
+ ],
497
+ "should_decontaminate": true,
498
+ "doc_to_decontamination_query": "{{sentence}}"
499
+ },
500
+ "polish_belebele_regex": {
501
+ "task": "polish_belebele_regex",
502
+ "dataset_path": "facebook/belebele",
503
+ "test_split": "pol_Latn",
504
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
505
+ "doc_to_target": "{{correct_answer_num|int - 1}}",
506
+ "description": "",
507
+ "target_delimiter": " ",
508
+ "fewshot_delimiter": "\n\n",
509
+ "num_fewshot": 0,
510
+ "metric_list": [
511
+ {
512
+ "metric": "f1",
513
+ "aggregation": "mean",
514
+ "higher_is_better": true,
515
+ "hf_evaluate": true,
516
+ "average": "micro"
517
+ },
518
+ {
519
+ "metric": "accuracy",
520
+ "aggregation": "mean",
521
+ "higher_is_better": true
522
+ }
523
+ ],
524
+ "output_type": "generate_until",
525
+ "generation_kwargs": {
526
+ "until": [
527
+ ".",
528
+ ","
529
+ ],
530
+ "do_sample": false,
531
+ "temperature": 0.0,
532
+ "max_gen_toks": 50
533
+ },
534
+ "repeats": 1,
535
+ "filter_list": [
536
+ {
537
+ "name": "score-first",
538
+ "filter": [
539
+ {
540
+ "function": "regex",
541
+ "regex_pattern": "(\\b[ABCD]\\b)"
542
+ },
543
+ {
544
+ "function": "take_first"
545
+ },
546
+ {
547
+ "function": "map",
548
+ "mapping_dict": {
549
+ "A": 0,
550
+ "B": 1,
551
+ "C": 2,
552
+ "D": 3
553
+ },
554
+ "default_value": -1
555
+ },
556
+ {
557
+ "function": "take_first"
558
+ }
559
+ ]
560
+ }
561
+ ],
562
+ "should_decontaminate": true,
563
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
564
+ },
565
+ "polish_dyk_multiple_choice": {
566
+ "task": "polish_dyk_multiple_choice",
567
+ "dataset_path": "allegro/klej-dyk",
568
+ "training_split": "train",
569
+ "test_split": "test",
570
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
571
+ "doc_to_target": "{{target|int}}",
572
+ "doc_to_choice": [
573
+ "Nie",
574
+ "Tak"
575
+ ],
576
+ "description": "",
577
+ "target_delimiter": " ",
578
+ "fewshot_delimiter": "\n\n",
579
+ "num_fewshot": 0,
580
+ "metric_list": [
581
+ {
582
+ "metric": "acc",
583
+ "aggregation": "mean",
584
+ "higher_is_better": true
585
+ },
586
+ {
587
+ "metric": "acc_norm",
588
+ "aggregation": "mean",
589
+ "higher_is_better": true
590
+ }
591
+ ],
592
+ "output_type": "multiple_choice",
593
+ "repeats": 1,
594
+ "should_decontaminate": true,
595
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
596
+ },
597
+ "polish_dyk_regex": {
598
+ "task": "polish_dyk_regex",
599
+ "dataset_path": "allegro/klej-dyk",
600
+ "training_split": "train",
601
+ "test_split": "test",
602
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
603
+ "doc_to_target": "{{target|int+1}}",
604
+ "description": "",
605
+ "target_delimiter": " ",
606
+ "fewshot_delimiter": "\n\n",
607
+ "num_fewshot": 0,
608
+ "metric_list": [
609
+ {
610
+ "metric": "f1",
611
+ "aggregation": "mean",
612
+ "higher_is_better": true,
613
+ "hf_evaluate": true,
614
+ "average": "micro"
615
+ },
616
+ {
617
+ "metric": "accuracy",
618
+ "aggregation": "mean",
619
+ "higher_is_better": true
620
+ }
621
+ ],
622
+ "output_type": "generate_until",
623
+ "generation_kwargs": {
624
+ "until": [
625
+ ".",
626
+ ","
627
+ ],
628
+ "do_sample": false,
629
+ "temperature": 0.0,
630
+ "max_gen_toks": 50
631
+ },
632
+ "repeats": 1,
633
+ "filter_list": [
634
+ {
635
+ "name": "score-first",
636
+ "filter": [
637
+ {
638
+ "function": "regex",
639
+ "regex_pattern": "(\\b[ABCD]\\b)"
640
+ },
641
+ {
642
+ "function": "take_first"
643
+ },
644
+ {
645
+ "function": "map",
646
+ "mapping_dict": {
647
+ "A": 0,
648
+ "B": 1,
649
+ "C": 2,
650
+ "D": 3
651
+ },
652
+ "default_value": -1
653
+ },
654
+ {
655
+ "function": "take_first"
656
+ }
657
+ ]
658
+ }
659
+ ],
660
+ "should_decontaminate": true,
661
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
662
+ },
663
+ "polish_ppc_multiple_choice": {
664
+ "task": "polish_ppc_multiple_choice",
665
+ "dataset_path": "djstrong/ppc",
666
+ "training_split": "train",
667
+ "validation_split": "validation",
668
+ "test_split": "test",
669
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
670
+ "doc_to_target": "{{label|int - 1}}",
671
+ "doc_to_choice": [
672
+ "A",
673
+ "B",
674
+ "C"
675
+ ],
676
+ "description": "",
677
+ "target_delimiter": " ",
678
+ "fewshot_delimiter": "\n\n",
679
+ "num_fewshot": 0,
680
+ "metric_list": [
681
+ {
682
+ "metric": "acc",
683
+ "aggregation": "mean",
684
+ "higher_is_better": true
685
+ },
686
+ {
687
+ "metric": "acc_norm",
688
+ "aggregation": "mean",
689
+ "higher_is_better": true
690
+ }
691
+ ],
692
+ "output_type": "multiple_choice",
693
+ "repeats": 1,
694
+ "should_decontaminate": true,
695
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
696
+ },
697
+ "polish_ppc_regex": {
698
+ "task": "polish_ppc_regex",
699
+ "dataset_path": "djstrong/ppc",
700
+ "training_split": "train",
701
+ "validation_split": "validation",
702
+ "test_split": "test",
703
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
704
+ "doc_to_target": "{{label|int}}",
705
+ "description": "",
706
+ "target_delimiter": " ",
707
+ "fewshot_delimiter": "\n\n",
708
+ "num_fewshot": 0,
709
+ "metric_list": [
710
+ {
711
+ "metric": "f1",
712
+ "aggregation": "mean",
713
+ "higher_is_better": true,
714
+ "hf_evaluate": true,
715
+ "average": "micro"
716
+ },
717
+ {
718
+ "metric": "accuracy",
719
+ "aggregation": "mean",
720
+ "higher_is_better": true
721
+ }
722
+ ],
723
+ "output_type": "generate_until",
724
+ "generation_kwargs": {
725
+ "until": [
726
+ ".",
727
+ ","
728
+ ],
729
+ "do_sample": false,
730
+ "temperature": 0.0,
731
+ "max_gen_toks": 50
732
+ },
733
+ "repeats": 1,
734
+ "filter_list": [
735
+ {
736
+ "name": "score-first",
737
+ "filter": [
738
+ {
739
+ "function": "regex",
740
+ "regex_pattern": "(\\b[ABCD]\\b)"
741
+ },
742
+ {
743
+ "function": "take_first"
744
+ },
745
+ {
746
+ "function": "map",
747
+ "mapping_dict": {
748
+ "A": 0,
749
+ "B": 1,
750
+ "C": 2,
751
+ "D": 3
752
+ },
753
+ "default_value": -1
754
+ },
755
+ {
756
+ "function": "take_first"
757
+ }
758
+ ]
759
+ }
760
+ ],
761
+ "should_decontaminate": true,
762
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
763
+ },
764
+ "polish_psc_multiple_choice": {
765
+ "task": "polish_psc_multiple_choice",
766
+ "dataset_path": "allegro/klej-psc",
767
+ "training_split": "train",
768
+ "test_split": "test",
769
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
770
+ "doc_to_target": "{{label|int}}",
771
+ "doc_to_choice": [
772
+ "Nie",
773
+ "Tak"
774
+ ],
775
+ "description": "",
776
+ "target_delimiter": " ",
777
+ "fewshot_delimiter": "\n\n",
778
+ "num_fewshot": 0,
779
+ "metric_list": [
780
+ {
781
+ "metric": "acc",
782
+ "aggregation": "mean",
783
+ "higher_is_better": true
784
+ },
785
+ {
786
+ "metric": "acc_norm",
787
+ "aggregation": "mean",
788
+ "higher_is_better": true
789
+ }
790
+ ],
791
+ "output_type": "multiple_choice",
792
+ "repeats": 1,
793
+ "should_decontaminate": true,
794
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
795
+ },
796
+ "polish_psc_regex": {
797
+ "task": "polish_psc_regex",
798
+ "dataset_path": "allegro/klej-psc",
799
+ "training_split": "train",
800
+ "test_split": "test",
801
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
802
+ "doc_to_target": "{{label|int + 1}}",
803
+ "description": "",
804
+ "target_delimiter": " ",
805
+ "fewshot_delimiter": "\n\n",
806
+ "num_fewshot": 0,
807
+ "metric_list": [
808
+ {
809
+ "metric": "f1",
810
+ "aggregation": "mean",
811
+ "higher_is_better": true,
812
+ "hf_evaluate": true,
813
+ "average": "micro"
814
+ },
815
+ {
816
+ "metric": "accuracy",
817
+ "aggregation": "mean",
818
+ "higher_is_better": true
819
+ }
820
+ ],
821
+ "output_type": "generate_until",
822
+ "generation_kwargs": {
823
+ "until": [
824
+ ".",
825
+ ","
826
+ ],
827
+ "do_sample": false,
828
+ "temperature": 0.0,
829
+ "max_gen_toks": 50
830
+ },
831
+ "repeats": 1,
832
+ "filter_list": [
833
+ {
834
+ "name": "score-first",
835
+ "filter": [
836
+ {
837
+ "function": "regex",
838
+ "regex_pattern": "(\\b[ABCD]\\b)"
839
+ },
840
+ {
841
+ "function": "take_first"
842
+ },
843
+ {
844
+ "function": "map",
845
+ "mapping_dict": {
846
+ "A": 0,
847
+ "B": 1,
848
+ "C": 2,
849
+ "D": 3
850
+ },
851
+ "default_value": -1
852
+ },
853
+ {
854
+ "function": "take_first"
855
+ }
856
+ ]
857
+ }
858
+ ],
859
+ "should_decontaminate": true,
860
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
861
+ }
862
+ },
863
+ "versions": {
864
+ "belebele_pol_Latn": 0.0,
865
+ "polemo2_in": 1.0,
866
+ "polemo2_in_multiple_choice": "Yaml",
867
+ "polemo2_out": 1.0,
868
+ "polemo2_out_multiple_choice": "Yaml",
869
+ "polish": "N/A",
870
+ "polish_8tags_multiple_choice": "Yaml",
871
+ "polish_8tags_regex": "Yaml",
872
+ "polish_belebele_regex": "Yaml",
873
+ "polish_dyk_multiple_choice": "Yaml",
874
+ "polish_dyk_regex": "Yaml",
875
+ "polish_ppc_multiple_choice": "Yaml",
876
+ "polish_ppc_regex": "Yaml",
877
+ "polish_psc_multiple_choice": "Yaml",
878
+ "polish_psc_regex": "Yaml"
879
+ },
880
+ "n-shot": {
881
+ "belebele_pol_Latn": 0,
882
+ "polemo2_in": 0,
883
+ "polemo2_in_multiple_choice": 0,
884
+ "polemo2_out": 0,
885
+ "polemo2_out_multiple_choice": 0,
886
+ "polish": 0,
887
+ "polish_8tags_multiple_choice": 0,
888
+ "polish_8tags_regex": 0,
889
+ "polish_belebele_regex": 0,
890
+ "polish_dyk_multiple_choice": 0,
891
+ "polish_dyk_regex": 0,
892
+ "polish_ppc_multiple_choice": 0,
893
+ "polish_ppc_regex": 0,
894
+ "polish_psc_multiple_choice": 0,
895
+ "polish_psc_regex": 0
896
+ },
897
+ "config": {
898
+ "model": "hf",
899
+ "model_args": "pretrained=Azurro/APT3-1B-Base",
900
+ "batch_size": "auto",
901
+ "batch_sizes": [
902
+ 64
903
+ ],
904
+ "device": "cuda:0",
905
+ "use_cache": null,
906
+ "limit": null,
907
+ "bootstrap_iters": 100000,
908
+ "gen_kwargs": null
909
+ },
910
+ "git_hash": null
911
+ }
polish_benchmarks-out2/results_APT3-1B-Base-5_polish/results.json ADDED
@@ -0,0 +1,911 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.3072715967681741,
5
+ "acc_stderr,none": 0.01969090193138441,
6
+ "acc_norm,none": 0.2568383472880424,
7
+ "acc_norm_stderr,none": 0.025431041932479657,
8
+ "f1,score-first": 0.005671732362664017,
9
+ "f1_stderr,score-first": 0.0060835998620534645,
10
+ "accuracy,score-first": 0.005671732362664017,
11
+ "accuracy_stderr,score-first": 0.0060835998620534645,
12
+ "alias": "polish"
13
+ },
14
+ "belebele_pol_Latn": {
15
+ "acc,none": 0.25666666666666665,
16
+ "acc_stderr,none": 0.014567891342380032,
17
+ "acc_norm,none": 0.25666666666666665,
18
+ "acc_norm_stderr,none": 0.014567891342380032,
19
+ "alias": " - belebele_pol_Latn"
20
+ },
21
+ "polemo2_in": {
22
+ "f1,score-first": 0.0,
23
+ "f1_stderr,score-first": 0.0,
24
+ "accuracy,score-first": 0.0,
25
+ "accuracy_stderr,score-first": 0.0,
26
+ "alias": " - polemo2_in"
27
+ },
28
+ "polemo2_in_multiple_choice": {
29
+ "acc,none": 0.389196675900277,
30
+ "acc_stderr,none": 0.018157983819187574,
31
+ "acc_norm,none": 0.2673130193905817,
32
+ "acc_norm_stderr,none": 0.016481686935413936,
33
+ "alias": " - polemo2_in_multiple_choice"
34
+ },
35
+ "polemo2_out": {
36
+ "f1,score-first": 0.0,
37
+ "f1_stderr,score-first": 0.0,
38
+ "accuracy,score-first": 0.0,
39
+ "accuracy_stderr,score-first": 0.0,
40
+ "alias": " - polemo2_out"
41
+ },
42
+ "polemo2_out_multiple_choice": {
43
+ "acc,none": 0.37449392712550605,
44
+ "acc_stderr,none": 0.021797901981018206,
45
+ "acc_norm,none": 0.36639676113360325,
46
+ "acc_norm_stderr,none": 0.021700065645408245,
47
+ "alias": " - polemo2_out_multiple_choice"
48
+ },
49
+ "polish_8tags_multiple_choice": {
50
+ "acc,none": 0.20288197621225984,
51
+ "acc_stderr,none": 0.00608264743751534,
52
+ "acc_norm,none": 0.11939615736505033,
53
+ "acc_norm_stderr,none": 0.004904503623515018,
54
+ "alias": " - polish_8tags_multiple_choice"
55
+ },
56
+ "polish_8tags_regex": {
57
+ "f1,score-first": 0.01989935956084172,
58
+ "f1_stderr,score-first": 0.0021123432094103728,
59
+ "accuracy,score-first": 0.01989935956084172,
60
+ "accuracy_stderr,score-first": 0.0021123432094103728,
61
+ "alias": " - polish_8tags_regex"
62
+ },
63
+ "polish_belebele_regex": {
64
+ "f1,score-first": 0.0011111111111111111,
65
+ "f1_stderr,score-first": 0.0011111111111111124,
66
+ "accuracy,score-first": 0.0011111111111111111,
67
+ "accuracy_stderr,score-first": 0.0011111111111111124,
68
+ "alias": " - polish_belebele_regex"
69
+ },
70
+ "polish_dyk_multiple_choice": {
71
+ "acc,none": 0.7677356656948494,
72
+ "acc_stderr,none": 0.013170448486721168,
73
+ "acc_norm,none": 0.7677356656948494,
74
+ "acc_norm_stderr,none": 0.013170448486721168,
75
+ "alias": " - polish_dyk_multiple_choice"
76
+ },
77
+ "polish_dyk_regex": {
78
+ "f1,score-first": 0.0009718172983479105,
79
+ "f1_stderr,score-first": 0.0009718172983479058,
80
+ "accuracy,score-first": 0.0009718172983479105,
81
+ "accuracy_stderr,score-first": 0.0009718172983479058,
82
+ "alias": " - polish_dyk_regex"
83
+ },
84
+ "polish_ppc_multiple_choice": {
85
+ "acc,none": 0.329,
86
+ "acc_stderr,none": 0.014865395385928367,
87
+ "acc_norm,none": 0.329,
88
+ "acc_norm_stderr,none": 0.014865395385928367,
89
+ "alias": " - polish_ppc_multiple_choice"
90
+ },
91
+ "polish_ppc_regex": {
92
+ "f1,score-first": 0.0,
93
+ "f1_stderr,score-first": 0.0,
94
+ "accuracy,score-first": 0.0,
95
+ "accuracy_stderr,score-first": 0.0,
96
+ "alias": " - polish_ppc_regex"
97
+ },
98
+ "polish_psc_multiple_choice": {
99
+ "acc,none": 0.588126159554731,
100
+ "acc_stderr,none": 0.014997176459791973,
101
+ "acc_norm,none": 0.588126159554731,
102
+ "acc_norm_stderr,none": 0.014997176459791973,
103
+ "alias": " - polish_psc_multiple_choice"
104
+ },
105
+ "polish_psc_regex": {
106
+ "f1,score-first": 0.0,
107
+ "f1_stderr,score-first": 0.0,
108
+ "accuracy,score-first": 0.0,
109
+ "accuracy_stderr,score-first": 0.0,
110
+ "alias": " - polish_psc_regex"
111
+ }
112
+ },
113
+ "groups": {
114
+ "polish": {
115
+ "acc,none": 0.3072715967681741,
116
+ "acc_stderr,none": 0.01969090193138441,
117
+ "acc_norm,none": 0.2568383472880424,
118
+ "acc_norm_stderr,none": 0.025431041932479657,
119
+ "f1,score-first": 0.005671732362664017,
120
+ "f1_stderr,score-first": 0.0060835998620534645,
121
+ "accuracy,score-first": 0.005671732362664017,
122
+ "accuracy_stderr,score-first": 0.0060835998620534645,
123
+ "alias": "polish"
124
+ }
125
+ },
126
+ "configs": {
127
+ "belebele_pol_Latn": {
128
+ "task": "belebele_pol_Latn",
129
+ "group": "belebele",
130
+ "dataset_path": "facebook/belebele",
131
+ "test_split": "pol_Latn",
132
+ "fewshot_split": "pol_Latn",
133
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
134
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
135
+ "doc_to_choice": [
136
+ "A",
137
+ "B",
138
+ "C",
139
+ "D"
140
+ ],
141
+ "description": "",
142
+ "target_delimiter": " ",
143
+ "fewshot_delimiter": "\n\n",
144
+ "fewshot_config": {
145
+ "sampler": "first_n"
146
+ },
147
+ "num_fewshot": 5,
148
+ "metric_list": [
149
+ {
150
+ "metric": "acc",
151
+ "aggregation": "mean",
152
+ "higher_is_better": true
153
+ },
154
+ {
155
+ "metric": "acc_norm",
156
+ "aggregation": "mean",
157
+ "higher_is_better": true
158
+ }
159
+ ],
160
+ "output_type": "multiple_choice",
161
+ "repeats": 1,
162
+ "should_decontaminate": true,
163
+ "doc_to_decontamination_query": "{{question}}",
164
+ "metadata": {
165
+ "version": 0.0
166
+ }
167
+ },
168
+ "polemo2_in": {
169
+ "task": "polemo2_in",
170
+ "group": [
171
+ "polemo2"
172
+ ],
173
+ "dataset_path": "allegro/klej-polemo2-in",
174
+ "training_split": "train",
175
+ "validation_split": "validation",
176
+ "test_split": "test",
177
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
178
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
179
+ "description": "",
180
+ "target_delimiter": " ",
181
+ "fewshot_delimiter": "\n\n",
182
+ "num_fewshot": 5,
183
+ "metric_list": [
184
+ {
185
+ "metric": "f1",
186
+ "aggregation": "mean",
187
+ "higher_is_better": true,
188
+ "hf_evaluate": true,
189
+ "average": "micro"
190
+ },
191
+ {
192
+ "metric": "accuracy",
193
+ "aggregation": "mean",
194
+ "higher_is_better": true
195
+ }
196
+ ],
197
+ "output_type": "generate_until",
198
+ "generation_kwargs": {
199
+ "until": [
200
+ ".",
201
+ ","
202
+ ],
203
+ "do_sample": false,
204
+ "temperature": 0.0,
205
+ "max_gen_toks": 50
206
+ },
207
+ "repeats": 1,
208
+ "filter_list": [
209
+ {
210
+ "name": "score-first",
211
+ "filter": [
212
+ {
213
+ "function": "regex",
214
+ "regex_pattern": "(\\b[ABCD]\\b)"
215
+ },
216
+ {
217
+ "function": "take_first"
218
+ },
219
+ {
220
+ "function": "map",
221
+ "mapping_dict": {
222
+ "A": 0,
223
+ "B": 1,
224
+ "C": 2,
225
+ "D": 3
226
+ },
227
+ "default_value": -1
228
+ },
229
+ {
230
+ "function": "take_first"
231
+ }
232
+ ]
233
+ }
234
+ ],
235
+ "should_decontaminate": true,
236
+ "doc_to_decontamination_query": "{{sentence}}",
237
+ "metadata": {
238
+ "version": 1.0
239
+ }
240
+ },
241
+ "polemo2_in_multiple_choice": {
242
+ "task": "polemo2_in_multiple_choice",
243
+ "group": [
244
+ "polemo2_mc"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-in",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
251
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
252
+ "doc_to_choice": [
253
+ "Neutralny",
254
+ "Negatywny",
255
+ "Pozytywny",
256
+ "Niejednoznaczny"
257
+ ],
258
+ "description": "",
259
+ "target_delimiter": " ",
260
+ "fewshot_delimiter": "\n\n",
261
+ "num_fewshot": 5,
262
+ "metric_list": [
263
+ {
264
+ "metric": "acc",
265
+ "aggregation": "mean",
266
+ "higher_is_better": true
267
+ },
268
+ {
269
+ "metric": "acc_norm",
270
+ "aggregation": "mean",
271
+ "higher_is_better": true
272
+ }
273
+ ],
274
+ "output_type": "multiple_choice",
275
+ "repeats": 1,
276
+ "should_decontaminate": true,
277
+ "doc_to_decontamination_query": "{{sentence}}"
278
+ },
279
+ "polemo2_out": {
280
+ "task": "polemo2_out",
281
+ "group": [
282
+ "polemo2"
283
+ ],
284
+ "dataset_path": "allegro/klej-polemo2-out",
285
+ "training_split": "train",
286
+ "validation_split": "validation",
287
+ "test_split": "test",
288
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
289
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
290
+ "description": "",
291
+ "target_delimiter": " ",
292
+ "fewshot_delimiter": "\n\n",
293
+ "num_fewshot": 5,
294
+ "metric_list": [
295
+ {
296
+ "metric": "f1",
297
+ "aggregation": "mean",
298
+ "higher_is_better": true,
299
+ "hf_evaluate": true,
300
+ "average": "micro"
301
+ },
302
+ {
303
+ "metric": "accuracy",
304
+ "aggregation": "mean",
305
+ "higher_is_better": true
306
+ }
307
+ ],
308
+ "output_type": "generate_until",
309
+ "generation_kwargs": {
310
+ "until": [
311
+ ".",
312
+ ","
313
+ ],
314
+ "do_sample": false,
315
+ "temperature": 0.0,
316
+ "max_gen_toks": 50
317
+ },
318
+ "repeats": 1,
319
+ "filter_list": [
320
+ {
321
+ "name": "score-first",
322
+ "filter": [
323
+ {
324
+ "function": "regex",
325
+ "regex_pattern": "(\\b[ABCD]\\b)"
326
+ },
327
+ {
328
+ "function": "take_first"
329
+ },
330
+ {
331
+ "function": "map",
332
+ "mapping_dict": {
333
+ "A": 0,
334
+ "B": 1,
335
+ "C": 2,
336
+ "D": 3
337
+ },
338
+ "default_value": -1
339
+ },
340
+ {
341
+ "function": "take_first"
342
+ }
343
+ ]
344
+ }
345
+ ],
346
+ "should_decontaminate": true,
347
+ "doc_to_decontamination_query": "{{sentence}}",
348
+ "metadata": {
349
+ "version": 1.0
350
+ }
351
+ },
352
+ "polemo2_out_multiple_choice": {
353
+ "task": "polemo2_out_multiple_choice",
354
+ "group": [
355
+ "polemo2_mc"
356
+ ],
357
+ "dataset_path": "allegro/klej-polemo2-out",
358
+ "training_split": "train",
359
+ "validation_split": "validation",
360
+ "test_split": "test",
361
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
362
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
363
+ "doc_to_choice": [
364
+ "Neutralny",
365
+ "Negatywny",
366
+ "Pozytywny",
367
+ "Niejednoznaczny"
368
+ ],
369
+ "description": "",
370
+ "target_delimiter": " ",
371
+ "fewshot_delimiter": "\n\n",
372
+ "num_fewshot": 5,
373
+ "metric_list": [
374
+ {
375
+ "metric": "acc",
376
+ "aggregation": "mean",
377
+ "higher_is_better": true
378
+ },
379
+ {
380
+ "metric": "acc_norm",
381
+ "aggregation": "mean",
382
+ "higher_is_better": true
383
+ }
384
+ ],
385
+ "output_type": "multiple_choice",
386
+ "repeats": 1,
387
+ "should_decontaminate": true,
388
+ "doc_to_decontamination_query": "{{sentence}}"
389
+ },
390
+ "polish_8tags_multiple_choice": {
391
+ "task": "polish_8tags_multiple_choice",
392
+ "dataset_path": "djstrong/8tags",
393
+ "training_split": "train",
394
+ "test_split": "test",
395
+ "fewshot_split": "train",
396
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
397
+ "doc_to_target": "{{label|int}}",
398
+ "doc_to_choice": [
399
+ "Film",
400
+ "Historia",
401
+ "Jedzenie",
402
+ "Medycyna",
403
+ "Motoryzacja",
404
+ "Praca",
405
+ "Sport",
406
+ "Technologie"
407
+ ],
408
+ "description": "",
409
+ "target_delimiter": " ",
410
+ "fewshot_delimiter": "\n\n",
411
+ "num_fewshot": 5,
412
+ "metric_list": [
413
+ {
414
+ "metric": "acc",
415
+ "aggregation": "mean",
416
+ "higher_is_better": true
417
+ },
418
+ {
419
+ "metric": "acc_norm",
420
+ "aggregation": "mean",
421
+ "higher_is_better": true
422
+ }
423
+ ],
424
+ "output_type": "multiple_choice",
425
+ "repeats": 1,
426
+ "should_decontaminate": true,
427
+ "doc_to_decontamination_query": "{{sentence}}"
428
+ },
429
+ "polish_8tags_regex": {
430
+ "task": "polish_8tags_regex",
431
+ "dataset_path": "djstrong/8tags",
432
+ "training_split": "train",
433
+ "validation_split": "validation",
434
+ "test_split": "test",
435
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
436
+ "doc_to_target": "{{label|int}}",
437
+ "description": "",
438
+ "target_delimiter": " ",
439
+ "fewshot_delimiter": "\n\n",
440
+ "num_fewshot": 5,
441
+ "metric_list": [
442
+ {
443
+ "metric": "f1",
444
+ "aggregation": "mean",
445
+ "higher_is_better": true,
446
+ "hf_evaluate": true,
447
+ "average": "micro"
448
+ },
449
+ {
450
+ "metric": "accuracy",
451
+ "aggregation": "mean",
452
+ "higher_is_better": true
453
+ }
454
+ ],
455
+ "output_type": "generate_until",
456
+ "generation_kwargs": {
457
+ "until": [
458
+ ".",
459
+ ","
460
+ ],
461
+ "do_sample": false,
462
+ "temperature": 0.0,
463
+ "max_gen_toks": 50
464
+ },
465
+ "repeats": 1,
466
+ "filter_list": [
467
+ {
468
+ "name": "score-first",
469
+ "filter": [
470
+ {
471
+ "function": "regex",
472
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
473
+ },
474
+ {
475
+ "function": "take_first"
476
+ },
477
+ {
478
+ "function": "map",
479
+ "mapping_dict": {
480
+ "A": 0,
481
+ "B": 1,
482
+ "C": 2,
483
+ "D": 3,
484
+ "E": 4,
485
+ "F": 5,
486
+ "G": 6,
487
+ "H": 7
488
+ },
489
+ "default_value": -1
490
+ },
491
+ {
492
+ "function": "take_first"
493
+ }
494
+ ]
495
+ }
496
+ ],
497
+ "should_decontaminate": true,
498
+ "doc_to_decontamination_query": "{{sentence}}"
499
+ },
500
+ "polish_belebele_regex": {
501
+ "task": "polish_belebele_regex",
502
+ "dataset_path": "facebook/belebele",
503
+ "test_split": "pol_Latn",
504
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
505
+ "doc_to_target": "{{correct_answer_num|int - 1}}",
506
+ "description": "",
507
+ "target_delimiter": " ",
508
+ "fewshot_delimiter": "\n\n",
509
+ "num_fewshot": 5,
510
+ "metric_list": [
511
+ {
512
+ "metric": "f1",
513
+ "aggregation": "mean",
514
+ "higher_is_better": true,
515
+ "hf_evaluate": true,
516
+ "average": "micro"
517
+ },
518
+ {
519
+ "metric": "accuracy",
520
+ "aggregation": "mean",
521
+ "higher_is_better": true
522
+ }
523
+ ],
524
+ "output_type": "generate_until",
525
+ "generation_kwargs": {
526
+ "until": [
527
+ ".",
528
+ ","
529
+ ],
530
+ "do_sample": false,
531
+ "temperature": 0.0,
532
+ "max_gen_toks": 50
533
+ },
534
+ "repeats": 1,
535
+ "filter_list": [
536
+ {
537
+ "name": "score-first",
538
+ "filter": [
539
+ {
540
+ "function": "regex",
541
+ "regex_pattern": "(\\b[ABCD]\\b)"
542
+ },
543
+ {
544
+ "function": "take_first"
545
+ },
546
+ {
547
+ "function": "map",
548
+ "mapping_dict": {
549
+ "A": 0,
550
+ "B": 1,
551
+ "C": 2,
552
+ "D": 3
553
+ },
554
+ "default_value": -1
555
+ },
556
+ {
557
+ "function": "take_first"
558
+ }
559
+ ]
560
+ }
561
+ ],
562
+ "should_decontaminate": true,
563
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
564
+ },
565
+ "polish_dyk_multiple_choice": {
566
+ "task": "polish_dyk_multiple_choice",
567
+ "dataset_path": "allegro/klej-dyk",
568
+ "training_split": "train",
569
+ "test_split": "test",
570
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
571
+ "doc_to_target": "{{target|int}}",
572
+ "doc_to_choice": [
573
+ "Nie",
574
+ "Tak"
575
+ ],
576
+ "description": "",
577
+ "target_delimiter": " ",
578
+ "fewshot_delimiter": "\n\n",
579
+ "num_fewshot": 5,
580
+ "metric_list": [
581
+ {
582
+ "metric": "acc",
583
+ "aggregation": "mean",
584
+ "higher_is_better": true
585
+ },
586
+ {
587
+ "metric": "acc_norm",
588
+ "aggregation": "mean",
589
+ "higher_is_better": true
590
+ }
591
+ ],
592
+ "output_type": "multiple_choice",
593
+ "repeats": 1,
594
+ "should_decontaminate": true,
595
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
596
+ },
597
+ "polish_dyk_regex": {
598
+ "task": "polish_dyk_regex",
599
+ "dataset_path": "allegro/klej-dyk",
600
+ "training_split": "train",
601
+ "test_split": "test",
602
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
603
+ "doc_to_target": "{{target|int+1}}",
604
+ "description": "",
605
+ "target_delimiter": " ",
606
+ "fewshot_delimiter": "\n\n",
607
+ "num_fewshot": 5,
608
+ "metric_list": [
609
+ {
610
+ "metric": "f1",
611
+ "aggregation": "mean",
612
+ "higher_is_better": true,
613
+ "hf_evaluate": true,
614
+ "average": "micro"
615
+ },
616
+ {
617
+ "metric": "accuracy",
618
+ "aggregation": "mean",
619
+ "higher_is_better": true
620
+ }
621
+ ],
622
+ "output_type": "generate_until",
623
+ "generation_kwargs": {
624
+ "until": [
625
+ ".",
626
+ ","
627
+ ],
628
+ "do_sample": false,
629
+ "temperature": 0.0,
630
+ "max_gen_toks": 50
631
+ },
632
+ "repeats": 1,
633
+ "filter_list": [
634
+ {
635
+ "name": "score-first",
636
+ "filter": [
637
+ {
638
+ "function": "regex",
639
+ "regex_pattern": "(\\b[ABCD]\\b)"
640
+ },
641
+ {
642
+ "function": "take_first"
643
+ },
644
+ {
645
+ "function": "map",
646
+ "mapping_dict": {
647
+ "A": 0,
648
+ "B": 1,
649
+ "C": 2,
650
+ "D": 3
651
+ },
652
+ "default_value": -1
653
+ },
654
+ {
655
+ "function": "take_first"
656
+ }
657
+ ]
658
+ }
659
+ ],
660
+ "should_decontaminate": true,
661
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
662
+ },
663
+ "polish_ppc_multiple_choice": {
664
+ "task": "polish_ppc_multiple_choice",
665
+ "dataset_path": "djstrong/ppc",
666
+ "training_split": "train",
667
+ "validation_split": "validation",
668
+ "test_split": "test",
669
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
670
+ "doc_to_target": "{{label|int - 1}}",
671
+ "doc_to_choice": [
672
+ "A",
673
+ "B",
674
+ "C"
675
+ ],
676
+ "description": "",
677
+ "target_delimiter": " ",
678
+ "fewshot_delimiter": "\n\n",
679
+ "num_fewshot": 5,
680
+ "metric_list": [
681
+ {
682
+ "metric": "acc",
683
+ "aggregation": "mean",
684
+ "higher_is_better": true
685
+ },
686
+ {
687
+ "metric": "acc_norm",
688
+ "aggregation": "mean",
689
+ "higher_is_better": true
690
+ }
691
+ ],
692
+ "output_type": "multiple_choice",
693
+ "repeats": 1,
694
+ "should_decontaminate": true,
695
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
696
+ },
697
+ "polish_ppc_regex": {
698
+ "task": "polish_ppc_regex",
699
+ "dataset_path": "djstrong/ppc",
700
+ "training_split": "train",
701
+ "validation_split": "validation",
702
+ "test_split": "test",
703
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
704
+ "doc_to_target": "{{label|int}}",
705
+ "description": "",
706
+ "target_delimiter": " ",
707
+ "fewshot_delimiter": "\n\n",
708
+ "num_fewshot": 5,
709
+ "metric_list": [
710
+ {
711
+ "metric": "f1",
712
+ "aggregation": "mean",
713
+ "higher_is_better": true,
714
+ "hf_evaluate": true,
715
+ "average": "micro"
716
+ },
717
+ {
718
+ "metric": "accuracy",
719
+ "aggregation": "mean",
720
+ "higher_is_better": true
721
+ }
722
+ ],
723
+ "output_type": "generate_until",
724
+ "generation_kwargs": {
725
+ "until": [
726
+ ".",
727
+ ","
728
+ ],
729
+ "do_sample": false,
730
+ "temperature": 0.0,
731
+ "max_gen_toks": 50
732
+ },
733
+ "repeats": 1,
734
+ "filter_list": [
735
+ {
736
+ "name": "score-first",
737
+ "filter": [
738
+ {
739
+ "function": "regex",
740
+ "regex_pattern": "(\\b[ABCD]\\b)"
741
+ },
742
+ {
743
+ "function": "take_first"
744
+ },
745
+ {
746
+ "function": "map",
747
+ "mapping_dict": {
748
+ "A": 0,
749
+ "B": 1,
750
+ "C": 2,
751
+ "D": 3
752
+ },
753
+ "default_value": -1
754
+ },
755
+ {
756
+ "function": "take_first"
757
+ }
758
+ ]
759
+ }
760
+ ],
761
+ "should_decontaminate": true,
762
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
763
+ },
764
+ "polish_psc_multiple_choice": {
765
+ "task": "polish_psc_multiple_choice",
766
+ "dataset_path": "allegro/klej-psc",
767
+ "training_split": "train",
768
+ "test_split": "test",
769
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
770
+ "doc_to_target": "{{label|int}}",
771
+ "doc_to_choice": [
772
+ "Nie",
773
+ "Tak"
774
+ ],
775
+ "description": "",
776
+ "target_delimiter": " ",
777
+ "fewshot_delimiter": "\n\n",
778
+ "num_fewshot": 5,
779
+ "metric_list": [
780
+ {
781
+ "metric": "acc",
782
+ "aggregation": "mean",
783
+ "higher_is_better": true
784
+ },
785
+ {
786
+ "metric": "acc_norm",
787
+ "aggregation": "mean",
788
+ "higher_is_better": true
789
+ }
790
+ ],
791
+ "output_type": "multiple_choice",
792
+ "repeats": 1,
793
+ "should_decontaminate": true,
794
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
795
+ },
796
+ "polish_psc_regex": {
797
+ "task": "polish_psc_regex",
798
+ "dataset_path": "allegro/klej-psc",
799
+ "training_split": "train",
800
+ "test_split": "test",
801
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
802
+ "doc_to_target": "{{label|int + 1}}",
803
+ "description": "",
804
+ "target_delimiter": " ",
805
+ "fewshot_delimiter": "\n\n",
806
+ "num_fewshot": 5,
807
+ "metric_list": [
808
+ {
809
+ "metric": "f1",
810
+ "aggregation": "mean",
811
+ "higher_is_better": true,
812
+ "hf_evaluate": true,
813
+ "average": "micro"
814
+ },
815
+ {
816
+ "metric": "accuracy",
817
+ "aggregation": "mean",
818
+ "higher_is_better": true
819
+ }
820
+ ],
821
+ "output_type": "generate_until",
822
+ "generation_kwargs": {
823
+ "until": [
824
+ ".",
825
+ ","
826
+ ],
827
+ "do_sample": false,
828
+ "temperature": 0.0,
829
+ "max_gen_toks": 50
830
+ },
831
+ "repeats": 1,
832
+ "filter_list": [
833
+ {
834
+ "name": "score-first",
835
+ "filter": [
836
+ {
837
+ "function": "regex",
838
+ "regex_pattern": "(\\b[ABCD]\\b)"
839
+ },
840
+ {
841
+ "function": "take_first"
842
+ },
843
+ {
844
+ "function": "map",
845
+ "mapping_dict": {
846
+ "A": 0,
847
+ "B": 1,
848
+ "C": 2,
849
+ "D": 3
850
+ },
851
+ "default_value": -1
852
+ },
853
+ {
854
+ "function": "take_first"
855
+ }
856
+ ]
857
+ }
858
+ ],
859
+ "should_decontaminate": true,
860
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
861
+ }
862
+ },
863
+ "versions": {
864
+ "belebele_pol_Latn": 0.0,
865
+ "polemo2_in": 1.0,
866
+ "polemo2_in_multiple_choice": "Yaml",
867
+ "polemo2_out": 1.0,
868
+ "polemo2_out_multiple_choice": "Yaml",
869
+ "polish": "N/A",
870
+ "polish_8tags_multiple_choice": "Yaml",
871
+ "polish_8tags_regex": "Yaml",
872
+ "polish_belebele_regex": "Yaml",
873
+ "polish_dyk_multiple_choice": "Yaml",
874
+ "polish_dyk_regex": "Yaml",
875
+ "polish_ppc_multiple_choice": "Yaml",
876
+ "polish_ppc_regex": "Yaml",
877
+ "polish_psc_multiple_choice": "Yaml",
878
+ "polish_psc_regex": "Yaml"
879
+ },
880
+ "n-shot": {
881
+ "belebele_pol_Latn": 5,
882
+ "polemo2_in": 5,
883
+ "polemo2_in_multiple_choice": 5,
884
+ "polemo2_out": 5,
885
+ "polemo2_out_multiple_choice": 5,
886
+ "polish": 5,
887
+ "polish_8tags_multiple_choice": 5,
888
+ "polish_8tags_regex": 5,
889
+ "polish_belebele_regex": 5,
890
+ "polish_dyk_multiple_choice": 5,
891
+ "polish_dyk_regex": 5,
892
+ "polish_ppc_multiple_choice": 5,
893
+ "polish_ppc_regex": 5,
894
+ "polish_psc_multiple_choice": 5,
895
+ "polish_psc_regex": 5
896
+ },
897
+ "config": {
898
+ "model": "hf",
899
+ "model_args": "pretrained=Azurro/APT3-1B-Base",
900
+ "batch_size": "auto",
901
+ "batch_sizes": [
902
+ 16
903
+ ],
904
+ "device": "cuda:0",
905
+ "use_cache": null,
906
+ "limit": null,
907
+ "bootstrap_iters": 100000,
908
+ "gen_kwargs": null
909
+ },
910
+ "git_hash": null
911
+ }
polish_benchmarks-out2/results_trurl-2-7b-0_polish/results.json ADDED
@@ -0,0 +1,911 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish": {
4
+ "acc,none": 0.38655822884426444,
5
+ "acc_stderr,none": 0.009810522827733922,
6
+ "acc_norm,none": 0.3877248155817996,
7
+ "acc_norm_stderr,none": 0.009903004691484696,
8
+ "f1,score-first": 0.5390263032489272,
9
+ "f1_stderr,score-first": 0.09713328442043558,
10
+ "accuracy,score-first": 0.5390263032489272,
11
+ "accuracy_stderr,score-first": 0.09713328442043558,
12
+ "alias": "polish"
13
+ },
14
+ "belebele_pol_Latn": {
15
+ "acc,none": 0.39555555555555555,
16
+ "acc_stderr,none": 0.016308052223501762,
17
+ "acc_norm,none": 0.39555555555555555,
18
+ "acc_norm_stderr,none": 0.016308052223501762,
19
+ "alias": " - belebele_pol_Latn"
20
+ },
21
+ "polemo2_in": {
22
+ "f1,score-first": 0.4695290858725762,
23
+ "f1_stderr,score-first": 0.018586362627404755,
24
+ "accuracy,score-first": 0.4695290858725762,
25
+ "accuracy_stderr,score-first": 0.018586362627404755,
26
+ "alias": " - polemo2_in"
27
+ },
28
+ "polemo2_in_multiple_choice": {
29
+ "acc,none": 0.16204986149584488,
30
+ "acc_stderr,none": 0.013723528490778158,
31
+ "acc_norm,none": 0.16204986149584488,
32
+ "acc_norm_stderr,none": 0.013723528490778158,
33
+ "alias": " - polemo2_in_multiple_choice"
34
+ },
35
+ "polemo2_out": {
36
+ "f1,score-first": 0.4473684210526316,
37
+ "f1_stderr,score-first": 0.02239376166833581,
38
+ "accuracy,score-first": 0.4473684210526316,
39
+ "accuracy_stderr,score-first": 0.02239376166833581,
40
+ "alias": " - polemo2_out"
41
+ },
42
+ "polemo2_out_multiple_choice": {
43
+ "acc,none": 0.0020242914979757085,
44
+ "acc_stderr,none": 0.0020242914979757094,
45
+ "acc_norm,none": 0.0020242914979757085,
46
+ "acc_norm_stderr,none": 0.0020242914979757094,
47
+ "alias": " - polemo2_out_multiple_choice"
48
+ },
49
+ "polish_8tags_multiple_choice": {
50
+ "acc,none": 0.5121225983531564,
51
+ "acc_stderr,none": 0.0075605243767434,
52
+ "acc_norm,none": 0.5146386093321135,
53
+ "acc_norm_stderr,none": 0.007559505586488673,
54
+ "alias": " - polish_8tags_multiple_choice"
55
+ },
56
+ "polish_8tags_regex": {
57
+ "f1,score-first": 0.6697163769441903,
58
+ "f1_stderr,score-first": 0.007113749634252443,
59
+ "accuracy,score-first": 0.6697163769441903,
60
+ "accuracy_stderr,score-first": 0.007113749634252443,
61
+ "alias": " - polish_8tags_regex"
62
+ },
63
+ "polish_belebele_regex": {
64
+ "f1,score-first": 0.44333333333333336,
65
+ "f1_stderr,score-first": 0.0165684909720556,
66
+ "accuracy,score-first": 0.44333333333333336,
67
+ "accuracy_stderr,score-first": 0.0165684909720556,
68
+ "alias": " - polish_belebele_regex"
69
+ },
70
+ "polish_dyk_multiple_choice": {
71
+ "acc,none": 0.2118561710398445,
72
+ "acc_stderr,none": 0.012744615626342343,
73
+ "acc_norm,none": 0.2118561710398445,
74
+ "acc_norm_stderr,none": 0.012744615626342343,
75
+ "alias": " - polish_dyk_multiple_choice"
76
+ },
77
+ "polish_dyk_regex": {
78
+ "f1,score-first": 0.8289601554907677,
79
+ "f1_stderr,score-first": 0.011744077740056324,
80
+ "accuracy,score-first": 0.8289601554907677,
81
+ "accuracy_stderr,score-first": 0.011744077740056324,
82
+ "alias": " - polish_dyk_regex"
83
+ },
84
+ "polish_ppc_multiple_choice": {
85
+ "acc,none": 0.334,
86
+ "acc_stderr,none": 0.014922019523732963,
87
+ "acc_norm,none": 0.334,
88
+ "acc_norm_stderr,none": 0.014922019523732963,
89
+ "alias": " - polish_ppc_multiple_choice"
90
+ },
91
+ "polish_ppc_regex": {
92
+ "f1,score-first": 0.421,
93
+ "f1_stderr,score-first": 0.015620595475301318,
94
+ "accuracy,score-first": 0.421,
95
+ "accuracy_stderr,score-first": 0.015620595475301318,
96
+ "alias": " - polish_ppc_regex"
97
+ },
98
+ "polish_psc_multiple_choice": {
99
+ "acc,none": 0.3070500927643785,
100
+ "acc_stderr,none": 0.014055544850266423,
101
+ "acc_norm,none": 0.3070500927643785,
102
+ "acc_norm_stderr,none": 0.014055544850266423,
103
+ "alias": " - polish_psc_multiple_choice"
104
+ },
105
+ "polish_psc_regex": {
106
+ "f1,score-first": 0.4322820037105751,
107
+ "f1_stderr,score-first": 0.0150953107201147,
108
+ "accuracy,score-first": 0.4322820037105751,
109
+ "accuracy_stderr,score-first": 0.0150953107201147,
110
+ "alias": " - polish_psc_regex"
111
+ }
112
+ },
113
+ "groups": {
114
+ "polish": {
115
+ "acc,none": 0.38655822884426444,
116
+ "acc_stderr,none": 0.009810522827733922,
117
+ "acc_norm,none": 0.3877248155817996,
118
+ "acc_norm_stderr,none": 0.009903004691484696,
119
+ "f1,score-first": 0.5390263032489272,
120
+ "f1_stderr,score-first": 0.09713328442043558,
121
+ "accuracy,score-first": 0.5390263032489272,
122
+ "accuracy_stderr,score-first": 0.09713328442043558,
123
+ "alias": "polish"
124
+ }
125
+ },
126
+ "configs": {
127
+ "belebele_pol_Latn": {
128
+ "task": "belebele_pol_Latn",
129
+ "group": "belebele",
130
+ "dataset_path": "facebook/belebele",
131
+ "test_split": "pol_Latn",
132
+ "fewshot_split": "pol_Latn",
133
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
134
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
135
+ "doc_to_choice": [
136
+ "A",
137
+ "B",
138
+ "C",
139
+ "D"
140
+ ],
141
+ "description": "",
142
+ "target_delimiter": " ",
143
+ "fewshot_delimiter": "\n\n",
144
+ "fewshot_config": {
145
+ "sampler": "first_n"
146
+ },
147
+ "num_fewshot": 0,
148
+ "metric_list": [
149
+ {
150
+ "metric": "acc",
151
+ "aggregation": "mean",
152
+ "higher_is_better": true
153
+ },
154
+ {
155
+ "metric": "acc_norm",
156
+ "aggregation": "mean",
157
+ "higher_is_better": true
158
+ }
159
+ ],
160
+ "output_type": "multiple_choice",
161
+ "repeats": 1,
162
+ "should_decontaminate": true,
163
+ "doc_to_decontamination_query": "{{question}}",
164
+ "metadata": {
165
+ "version": 0.0
166
+ }
167
+ },
168
+ "polemo2_in": {
169
+ "task": "polemo2_in",
170
+ "group": [
171
+ "polemo2"
172
+ ],
173
+ "dataset_path": "allegro/klej-polemo2-in",
174
+ "training_split": "train",
175
+ "validation_split": "validation",
176
+ "test_split": "test",
177
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
178
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
179
+ "description": "",
180
+ "target_delimiter": " ",
181
+ "fewshot_delimiter": "\n\n",
182
+ "num_fewshot": 0,
183
+ "metric_list": [
184
+ {
185
+ "metric": "f1",
186
+ "aggregation": "mean",
187
+ "higher_is_better": true,
188
+ "hf_evaluate": true,
189
+ "average": "micro"
190
+ },
191
+ {
192
+ "metric": "accuracy",
193
+ "aggregation": "mean",
194
+ "higher_is_better": true
195
+ }
196
+ ],
197
+ "output_type": "generate_until",
198
+ "generation_kwargs": {
199
+ "until": [
200
+ ".",
201
+ ","
202
+ ],
203
+ "do_sample": false,
204
+ "temperature": 0.0,
205
+ "max_gen_toks": 50
206
+ },
207
+ "repeats": 1,
208
+ "filter_list": [
209
+ {
210
+ "name": "score-first",
211
+ "filter": [
212
+ {
213
+ "function": "regex",
214
+ "regex_pattern": "(\\b[ABCD]\\b)"
215
+ },
216
+ {
217
+ "function": "take_first"
218
+ },
219
+ {
220
+ "function": "map",
221
+ "mapping_dict": {
222
+ "A": 0,
223
+ "B": 1,
224
+ "C": 2,
225
+ "D": 3
226
+ },
227
+ "default_value": -1
228
+ },
229
+ {
230
+ "function": "take_first"
231
+ }
232
+ ]
233
+ }
234
+ ],
235
+ "should_decontaminate": true,
236
+ "doc_to_decontamination_query": "{{sentence}}",
237
+ "metadata": {
238
+ "version": 1.0
239
+ }
240
+ },
241
+ "polemo2_in_multiple_choice": {
242
+ "task": "polemo2_in_multiple_choice",
243
+ "group": [
244
+ "polemo2_mc"
245
+ ],
246
+ "dataset_path": "allegro/klej-polemo2-in",
247
+ "training_split": "train",
248
+ "validation_split": "validation",
249
+ "test_split": "test",
250
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
251
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
252
+ "doc_to_choice": [
253
+ "Neutralny",
254
+ "Negatywny",
255
+ "Pozytywny",
256
+ "Niejednoznaczny"
257
+ ],
258
+ "description": "",
259
+ "target_delimiter": " ",
260
+ "fewshot_delimiter": "\n\n",
261
+ "num_fewshot": 0,
262
+ "metric_list": [
263
+ {
264
+ "metric": "acc",
265
+ "aggregation": "mean",
266
+ "higher_is_better": true
267
+ },
268
+ {
269
+ "metric": "acc_norm",
270
+ "aggregation": "mean",
271
+ "higher_is_better": true
272
+ }
273
+ ],
274
+ "output_type": "multiple_choice",
275
+ "repeats": 1,
276
+ "should_decontaminate": true,
277
+ "doc_to_decontamination_query": "{{sentence}}"
278
+ },
279
+ "polemo2_out": {
280
+ "task": "polemo2_out",
281
+ "group": [
282
+ "polemo2"
283
+ ],
284
+ "dataset_path": "allegro/klej-polemo2-out",
285
+ "training_split": "train",
286
+ "validation_split": "validation",
287
+ "test_split": "test",
288
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
289
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
290
+ "description": "",
291
+ "target_delimiter": " ",
292
+ "fewshot_delimiter": "\n\n",
293
+ "num_fewshot": 0,
294
+ "metric_list": [
295
+ {
296
+ "metric": "f1",
297
+ "aggregation": "mean",
298
+ "higher_is_better": true,
299
+ "hf_evaluate": true,
300
+ "average": "micro"
301
+ },
302
+ {
303
+ "metric": "accuracy",
304
+ "aggregation": "mean",
305
+ "higher_is_better": true
306
+ }
307
+ ],
308
+ "output_type": "generate_until",
309
+ "generation_kwargs": {
310
+ "until": [
311
+ ".",
312
+ ","
313
+ ],
314
+ "do_sample": false,
315
+ "temperature": 0.0,
316
+ "max_gen_toks": 50
317
+ },
318
+ "repeats": 1,
319
+ "filter_list": [
320
+ {
321
+ "name": "score-first",
322
+ "filter": [
323
+ {
324
+ "function": "regex",
325
+ "regex_pattern": "(\\b[ABCD]\\b)"
326
+ },
327
+ {
328
+ "function": "take_first"
329
+ },
330
+ {
331
+ "function": "map",
332
+ "mapping_dict": {
333
+ "A": 0,
334
+ "B": 1,
335
+ "C": 2,
336
+ "D": 3
337
+ },
338
+ "default_value": -1
339
+ },
340
+ {
341
+ "function": "take_first"
342
+ }
343
+ ]
344
+ }
345
+ ],
346
+ "should_decontaminate": true,
347
+ "doc_to_decontamination_query": "{{sentence}}",
348
+ "metadata": {
349
+ "version": 1.0
350
+ }
351
+ },
352
+ "polemo2_out_multiple_choice": {
353
+ "task": "polemo2_out_multiple_choice",
354
+ "group": [
355
+ "polemo2_mc"
356
+ ],
357
+ "dataset_path": "allegro/klej-polemo2-out",
358
+ "training_split": "train",
359
+ "validation_split": "validation",
360
+ "test_split": "test",
361
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
362
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
363
+ "doc_to_choice": [
364
+ "Neutralny",
365
+ "Negatywny",
366
+ "Pozytywny",
367
+ "Niejednoznaczny"
368
+ ],
369
+ "description": "",
370
+ "target_delimiter": " ",
371
+ "fewshot_delimiter": "\n\n",
372
+ "num_fewshot": 0,
373
+ "metric_list": [
374
+ {
375
+ "metric": "acc",
376
+ "aggregation": "mean",
377
+ "higher_is_better": true
378
+ },
379
+ {
380
+ "metric": "acc_norm",
381
+ "aggregation": "mean",
382
+ "higher_is_better": true
383
+ }
384
+ ],
385
+ "output_type": "multiple_choice",
386
+ "repeats": 1,
387
+ "should_decontaminate": true,
388
+ "doc_to_decontamination_query": "{{sentence}}"
389
+ },
390
+ "polish_8tags_multiple_choice": {
391
+ "task": "polish_8tags_multiple_choice",
392
+ "dataset_path": "djstrong/8tags",
393
+ "training_split": "train",
394
+ "test_split": "test",
395
+ "fewshot_split": "train",
396
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
397
+ "doc_to_target": "{{label|int}}",
398
+ "doc_to_choice": [
399
+ "Film",
400
+ "Historia",
401
+ "Jedzenie",
402
+ "Medycyna",
403
+ "Motoryzacja",
404
+ "Praca",
405
+ "Sport",
406
+ "Technologie"
407
+ ],
408
+ "description": "",
409
+ "target_delimiter": " ",
410
+ "fewshot_delimiter": "\n\n",
411
+ "num_fewshot": 0,
412
+ "metric_list": [
413
+ {
414
+ "metric": "acc",
415
+ "aggregation": "mean",
416
+ "higher_is_better": true
417
+ },
418
+ {
419
+ "metric": "acc_norm",
420
+ "aggregation": "mean",
421
+ "higher_is_better": true
422
+ }
423
+ ],
424
+ "output_type": "multiple_choice",
425
+ "repeats": 1,
426
+ "should_decontaminate": true,
427
+ "doc_to_decontamination_query": "{{sentence}}"
428
+ },
429
+ "polish_8tags_regex": {
430
+ "task": "polish_8tags_regex",
431
+ "dataset_path": "djstrong/8tags",
432
+ "training_split": "train",
433
+ "validation_split": "validation",
434
+ "test_split": "test",
435
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
436
+ "doc_to_target": "{{label|int}}",
437
+ "description": "",
438
+ "target_delimiter": " ",
439
+ "fewshot_delimiter": "\n\n",
440
+ "num_fewshot": 0,
441
+ "metric_list": [
442
+ {
443
+ "metric": "f1",
444
+ "aggregation": "mean",
445
+ "higher_is_better": true,
446
+ "hf_evaluate": true,
447
+ "average": "micro"
448
+ },
449
+ {
450
+ "metric": "accuracy",
451
+ "aggregation": "mean",
452
+ "higher_is_better": true
453
+ }
454
+ ],
455
+ "output_type": "generate_until",
456
+ "generation_kwargs": {
457
+ "until": [
458
+ ".",
459
+ ","
460
+ ],
461
+ "do_sample": false,
462
+ "temperature": 0.0,
463
+ "max_gen_toks": 50
464
+ },
465
+ "repeats": 1,
466
+ "filter_list": [
467
+ {
468
+ "name": "score-first",
469
+ "filter": [
470
+ {
471
+ "function": "regex",
472
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
473
+ },
474
+ {
475
+ "function": "take_first"
476
+ },
477
+ {
478
+ "function": "map",
479
+ "mapping_dict": {
480
+ "A": 0,
481
+ "B": 1,
482
+ "C": 2,
483
+ "D": 3,
484
+ "E": 4,
485
+ "F": 5,
486
+ "G": 6,
487
+ "H": 7
488
+ },
489
+ "default_value": -1
490
+ },
491
+ {
492
+ "function": "take_first"
493
+ }
494
+ ]
495
+ }
496
+ ],
497
+ "should_decontaminate": true,
498
+ "doc_to_decontamination_query": "{{sentence}}"
499
+ },
500
+ "polish_belebele_regex": {
501
+ "task": "polish_belebele_regex",
502
+ "dataset_path": "facebook/belebele",
503
+ "test_split": "pol_Latn",
504
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
505
+ "doc_to_target": "{{correct_answer_num|int - 1}}",
506
+ "description": "",
507
+ "target_delimiter": " ",
508
+ "fewshot_delimiter": "\n\n",
509
+ "num_fewshot": 0,
510
+ "metric_list": [
511
+ {
512
+ "metric": "f1",
513
+ "aggregation": "mean",
514
+ "higher_is_better": true,
515
+ "hf_evaluate": true,
516
+ "average": "micro"
517
+ },
518
+ {
519
+ "metric": "accuracy",
520
+ "aggregation": "mean",
521
+ "higher_is_better": true
522
+ }
523
+ ],
524
+ "output_type": "generate_until",
525
+ "generation_kwargs": {
526
+ "until": [
527
+ ".",
528
+ ","
529
+ ],
530
+ "do_sample": false,
531
+ "temperature": 0.0,
532
+ "max_gen_toks": 50
533
+ },
534
+ "repeats": 1,
535
+ "filter_list": [
536
+ {
537
+ "name": "score-first",
538
+ "filter": [
539
+ {
540
+ "function": "regex",
541
+ "regex_pattern": "(\\b[ABCD]\\b)"
542
+ },
543
+ {
544
+ "function": "take_first"
545
+ },
546
+ {
547
+ "function": "map",
548
+ "mapping_dict": {
549
+ "A": 0,
550
+ "B": 1,
551
+ "C": 2,
552
+ "D": 3
553
+ },
554
+ "default_value": -1
555
+ },
556
+ {
557
+ "function": "take_first"
558
+ }
559
+ ]
560
+ }
561
+ ],
562
+ "should_decontaminate": true,
563
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
564
+ },
565
+ "polish_dyk_multiple_choice": {
566
+ "task": "polish_dyk_multiple_choice",
567
+ "dataset_path": "allegro/klej-dyk",
568
+ "training_split": "train",
569
+ "test_split": "test",
570
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
571
+ "doc_to_target": "{{target|int}}",
572
+ "doc_to_choice": [
573
+ "Nie",
574
+ "Tak"
575
+ ],
576
+ "description": "",
577
+ "target_delimiter": " ",
578
+ "fewshot_delimiter": "\n\n",
579
+ "num_fewshot": 0,
580
+ "metric_list": [
581
+ {
582
+ "metric": "acc",
583
+ "aggregation": "mean",
584
+ "higher_is_better": true
585
+ },
586
+ {
587
+ "metric": "acc_norm",
588
+ "aggregation": "mean",
589
+ "higher_is_better": true
590
+ }
591
+ ],
592
+ "output_type": "multiple_choice",
593
+ "repeats": 1,
594
+ "should_decontaminate": true,
595
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
596
+ },
597
+ "polish_dyk_regex": {
598
+ "task": "polish_dyk_regex",
599
+ "dataset_path": "allegro/klej-dyk",
600
+ "training_split": "train",
601
+ "test_split": "test",
602
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
603
+ "doc_to_target": "{{target|int+1}}",
604
+ "description": "",
605
+ "target_delimiter": " ",
606
+ "fewshot_delimiter": "\n\n",
607
+ "num_fewshot": 0,
608
+ "metric_list": [
609
+ {
610
+ "metric": "f1",
611
+ "aggregation": "mean",
612
+ "higher_is_better": true,
613
+ "hf_evaluate": true,
614
+ "average": "micro"
615
+ },
616
+ {
617
+ "metric": "accuracy",
618
+ "aggregation": "mean",
619
+ "higher_is_better": true
620
+ }
621
+ ],
622
+ "output_type": "generate_until",
623
+ "generation_kwargs": {
624
+ "until": [
625
+ ".",
626
+ ","
627
+ ],
628
+ "do_sample": false,
629
+ "temperature": 0.0,
630
+ "max_gen_toks": 50
631
+ },
632
+ "repeats": 1,
633
+ "filter_list": [
634
+ {
635
+ "name": "score-first",
636
+ "filter": [
637
+ {
638
+ "function": "regex",
639
+ "regex_pattern": "(\\b[ABCD]\\b)"
640
+ },
641
+ {
642
+ "function": "take_first"
643
+ },
644
+ {
645
+ "function": "map",
646
+ "mapping_dict": {
647
+ "A": 0,
648
+ "B": 1,
649
+ "C": 2,
650
+ "D": 3
651
+ },
652
+ "default_value": -1
653
+ },
654
+ {
655
+ "function": "take_first"
656
+ }
657
+ ]
658
+ }
659
+ ],
660
+ "should_decontaminate": true,
661
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
662
+ },
663
+ "polish_ppc_multiple_choice": {
664
+ "task": "polish_ppc_multiple_choice",
665
+ "dataset_path": "djstrong/ppc",
666
+ "training_split": "train",
667
+ "validation_split": "validation",
668
+ "test_split": "test",
669
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
670
+ "doc_to_target": "{{label|int - 1}}",
671
+ "doc_to_choice": [
672
+ "A",
673
+ "B",
674
+ "C"
675
+ ],
676
+ "description": "",
677
+ "target_delimiter": " ",
678
+ "fewshot_delimiter": "\n\n",
679
+ "num_fewshot": 0,
680
+ "metric_list": [
681
+ {
682
+ "metric": "acc",
683
+ "aggregation": "mean",
684
+ "higher_is_better": true
685
+ },
686
+ {
687
+ "metric": "acc_norm",
688
+ "aggregation": "mean",
689
+ "higher_is_better": true
690
+ }
691
+ ],
692
+ "output_type": "multiple_choice",
693
+ "repeats": 1,
694
+ "should_decontaminate": true,
695
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
696
+ },
697
+ "polish_ppc_regex": {
698
+ "task": "polish_ppc_regex",
699
+ "dataset_path": "djstrong/ppc",
700
+ "training_split": "train",
701
+ "validation_split": "validation",
702
+ "test_split": "test",
703
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
704
+ "doc_to_target": "{{label|int}}",
705
+ "description": "",
706
+ "target_delimiter": " ",
707
+ "fewshot_delimiter": "\n\n",
708
+ "num_fewshot": 0,
709
+ "metric_list": [
710
+ {
711
+ "metric": "f1",
712
+ "aggregation": "mean",
713
+ "higher_is_better": true,
714
+ "hf_evaluate": true,
715
+ "average": "micro"
716
+ },
717
+ {
718
+ "metric": "accuracy",
719
+ "aggregation": "mean",
720
+ "higher_is_better": true
721
+ }
722
+ ],
723
+ "output_type": "generate_until",
724
+ "generation_kwargs": {
725
+ "until": [
726
+ ".",
727
+ ","
728
+ ],
729
+ "do_sample": false,
730
+ "temperature": 0.0,
731
+ "max_gen_toks": 50
732
+ },
733
+ "repeats": 1,
734
+ "filter_list": [
735
+ {
736
+ "name": "score-first",
737
+ "filter": [
738
+ {
739
+ "function": "regex",
740
+ "regex_pattern": "(\\b[ABCD]\\b)"
741
+ },
742
+ {
743
+ "function": "take_first"
744
+ },
745
+ {
746
+ "function": "map",
747
+ "mapping_dict": {
748
+ "A": 0,
749
+ "B": 1,
750
+ "C": 2,
751
+ "D": 3
752
+ },
753
+ "default_value": -1
754
+ },
755
+ {
756
+ "function": "take_first"
757
+ }
758
+ ]
759
+ }
760
+ ],
761
+ "should_decontaminate": true,
762
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
763
+ },
764
+ "polish_psc_multiple_choice": {
765
+ "task": "polish_psc_multiple_choice",
766
+ "dataset_path": "allegro/klej-psc",
767
+ "training_split": "train",
768
+ "test_split": "test",
769
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
770
+ "doc_to_target": "{{label|int}}",
771
+ "doc_to_choice": [
772
+ "Nie",
773
+ "Tak"
774
+ ],
775
+ "description": "",
776
+ "target_delimiter": " ",
777
+ "fewshot_delimiter": "\n\n",
778
+ "num_fewshot": 0,
779
+ "metric_list": [
780
+ {
781
+ "metric": "acc",
782
+ "aggregation": "mean",
783
+ "higher_is_better": true
784
+ },
785
+ {
786
+ "metric": "acc_norm",
787
+ "aggregation": "mean",
788
+ "higher_is_better": true
789
+ }
790
+ ],
791
+ "output_type": "multiple_choice",
792
+ "repeats": 1,
793
+ "should_decontaminate": true,
794
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
795
+ },
796
+ "polish_psc_regex": {
797
+ "task": "polish_psc_regex",
798
+ "dataset_path": "allegro/klej-psc",
799
+ "training_split": "train",
800
+ "test_split": "test",
801
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
802
+ "doc_to_target": "{{label|int + 1}}",
803
+ "description": "",
804
+ "target_delimiter": " ",
805
+ "fewshot_delimiter": "\n\n",
806
+ "num_fewshot": 0,
807
+ "metric_list": [
808
+ {
809
+ "metric": "f1",
810
+ "aggregation": "mean",
811
+ "higher_is_better": true,
812
+ "hf_evaluate": true,
813
+ "average": "micro"
814
+ },
815
+ {
816
+ "metric": "accuracy",
817
+ "aggregation": "mean",
818
+ "higher_is_better": true
819
+ }
820
+ ],
821
+ "output_type": "generate_until",
822
+ "generation_kwargs": {
823
+ "until": [
824
+ ".",
825
+ ","
826
+ ],
827
+ "do_sample": false,
828
+ "temperature": 0.0,
829
+ "max_gen_toks": 50
830
+ },
831
+ "repeats": 1,
832
+ "filter_list": [
833
+ {
834
+ "name": "score-first",
835
+ "filter": [
836
+ {
837
+ "function": "regex",
838
+ "regex_pattern": "(\\b[ABCD]\\b)"
839
+ },
840
+ {
841
+ "function": "take_first"
842
+ },
843
+ {
844
+ "function": "map",
845
+ "mapping_dict": {
846
+ "A": 0,
847
+ "B": 1,
848
+ "C": 2,
849
+ "D": 3
850
+ },
851
+ "default_value": -1
852
+ },
853
+ {
854
+ "function": "take_first"
855
+ }
856
+ ]
857
+ }
858
+ ],
859
+ "should_decontaminate": true,
860
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
861
+ }
862
+ },
863
+ "versions": {
864
+ "belebele_pol_Latn": 0.0,
865
+ "polemo2_in": 1.0,
866
+ "polemo2_in_multiple_choice": "Yaml",
867
+ "polemo2_out": 1.0,
868
+ "polemo2_out_multiple_choice": "Yaml",
869
+ "polish": "N/A",
870
+ "polish_8tags_multiple_choice": "Yaml",
871
+ "polish_8tags_regex": "Yaml",
872
+ "polish_belebele_regex": "Yaml",
873
+ "polish_dyk_multiple_choice": "Yaml",
874
+ "polish_dyk_regex": "Yaml",
875
+ "polish_ppc_multiple_choice": "Yaml",
876
+ "polish_ppc_regex": "Yaml",
877
+ "polish_psc_multiple_choice": "Yaml",
878
+ "polish_psc_regex": "Yaml"
879
+ },
880
+ "n-shot": {
881
+ "belebele_pol_Latn": 0,
882
+ "polemo2_in": 0,
883
+ "polemo2_in_multiple_choice": 0,
884
+ "polemo2_out": 0,
885
+ "polemo2_out_multiple_choice": 0,
886
+ "polish": 0,
887
+ "polish_8tags_multiple_choice": 0,
888
+ "polish_8tags_regex": 0,
889
+ "polish_belebele_regex": 0,
890
+ "polish_dyk_multiple_choice": 0,
891
+ "polish_dyk_regex": 0,
892
+ "polish_ppc_multiple_choice": 0,
893
+ "polish_ppc_regex": 0,
894
+ "polish_psc_multiple_choice": 0,
895
+ "polish_psc_regex": 0
896
+ },
897
+ "config": {
898
+ "model": "hf",
899
+ "model_args": "pretrained=Voicelab/trurl-2-7b",
900
+ "batch_size": "auto",
901
+ "batch_sizes": [
902
+ 32
903
+ ],
904
+ "device": "cuda:0",
905
+ "use_cache": null,
906
+ "limit": null,
907
+ "bootstrap_iters": 100000,
908
+ "gen_kwargs": null
909
+ },
910
+ "git_hash": null
911
+ }
polish_benchmarks-out21/results_mt5-xl-0_polish_generate/results.json ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_generate": {
4
+ "exact_match,score-first": 0.030536737884314748,
5
+ "exact_match_stderr,score-first": 0.01076945600934832,
6
+ "alias": "polish_generate"
7
+ },
8
+ "polemo2_in": {
9
+ "exact_match,score-first": 0.036011080332409975,
10
+ "exact_match_stderr,score-first": 0.006938833457029125,
11
+ "alias": " - polemo2_in"
12
+ },
13
+ "polemo2_out": {
14
+ "exact_match,score-first": 0.008097165991902834,
15
+ "exact_match_stderr,score-first": 0.004036245995213747,
16
+ "alias": " - polemo2_out"
17
+ },
18
+ "polish_8tags_regex": {
19
+ "exact_match,score-first": 0.042543458371454713,
20
+ "exact_match_stderr,score-first": 0.0030527098470848964,
21
+ "alias": " - polish_8tags_regex"
22
+ },
23
+ "polish_belebele_regex": {
24
+ "exact_match,score-first": 0.023333333333333334,
25
+ "exact_match_stderr,score-first": 0.005034794273880348,
26
+ "alias": " - polish_belebele_regex"
27
+ },
28
+ "polish_dyk_regex": {
29
+ "exact_match,score-first": 0.014577259475218658,
30
+ "exact_match_stderr,score-first": 0.0037381151461304968,
31
+ "alias": " - polish_dyk_regex"
32
+ },
33
+ "polish_ppc_regex": {
34
+ "exact_match,score-first": 0.03,
35
+ "exact_match_stderr,score-first": 0.0053971408290992015,
36
+ "alias": " - polish_ppc_regex"
37
+ },
38
+ "polish_psc_regex": {
39
+ "exact_match,score-first": 0.01020408163265306,
40
+ "exact_match_stderr,score-first": 0.0030623296535250712,
41
+ "alias": " - polish_psc_regex"
42
+ }
43
+ },
44
+ "groups": {
45
+ "polish_generate": {
46
+ "exact_match,score-first": 0.030536737884314748,
47
+ "exact_match_stderr,score-first": 0.01076945600934832,
48
+ "alias": "polish_generate"
49
+ }
50
+ },
51
+ "configs": {
52
+ "polemo2_in": {
53
+ "task": "polemo2_in",
54
+ "group": [
55
+ "polemo2"
56
+ ],
57
+ "dataset_path": "allegro/klej-polemo2-in",
58
+ "training_split": "train",
59
+ "validation_split": "validation",
60
+ "test_split": "test",
61
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
62
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
63
+ "description": "",
64
+ "target_delimiter": " ",
65
+ "fewshot_delimiter": "\n\n",
66
+ "num_fewshot": 0,
67
+ "metric_list": [
68
+ {
69
+ "metric": "exact_match",
70
+ "aggregation": "mean",
71
+ "higher_is_better": true
72
+ }
73
+ ],
74
+ "output_type": "generate_until",
75
+ "generation_kwargs": {
76
+ "until": [
77
+ ".",
78
+ ","
79
+ ],
80
+ "do_sample": false,
81
+ "temperature": 0.0,
82
+ "max_gen_toks": 50
83
+ },
84
+ "repeats": 1,
85
+ "filter_list": [
86
+ {
87
+ "name": "score-first",
88
+ "filter": [
89
+ {
90
+ "function": "regex",
91
+ "regex_pattern": "(\\b[ABCD]\\b)"
92
+ },
93
+ {
94
+ "function": "take_first"
95
+ }
96
+ ]
97
+ }
98
+ ],
99
+ "should_decontaminate": true,
100
+ "doc_to_decontamination_query": "{{sentence}}",
101
+ "metadata": {
102
+ "version": 1.0
103
+ }
104
+ },
105
+ "polemo2_out": {
106
+ "task": "polemo2_out",
107
+ "group": [
108
+ "polemo2"
109
+ ],
110
+ "dataset_path": "allegro/klej-polemo2-out",
111
+ "training_split": "train",
112
+ "validation_split": "validation",
113
+ "test_split": "test",
114
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:",
115
+ "doc_to_target": "{{{'__label__meta_zero': 'A', '__label__meta_minus_m': 'B', '__label__meta_plus_m': 'C', '__label__meta_amb': 'D'}.get(target)}}",
116
+ "description": "",
117
+ "target_delimiter": " ",
118
+ "fewshot_delimiter": "\n\n",
119
+ "num_fewshot": 0,
120
+ "metric_list": [
121
+ {
122
+ "metric": "exact_match",
123
+ "aggregation": "mean",
124
+ "higher_is_better": true
125
+ }
126
+ ],
127
+ "output_type": "generate_until",
128
+ "generation_kwargs": {
129
+ "until": [
130
+ ".",
131
+ ","
132
+ ],
133
+ "do_sample": false,
134
+ "temperature": 0.0,
135
+ "max_gen_toks": 50
136
+ },
137
+ "repeats": 1,
138
+ "filter_list": [
139
+ {
140
+ "name": "score-first",
141
+ "filter": [
142
+ {
143
+ "function": "regex",
144
+ "regex_pattern": "(\\b[ABCD]\\b)"
145
+ },
146
+ {
147
+ "function": "take_first"
148
+ }
149
+ ]
150
+ }
151
+ ],
152
+ "should_decontaminate": true,
153
+ "doc_to_decontamination_query": "{{sentence}}",
154
+ "metadata": {
155
+ "version": 1.0
156
+ }
157
+ },
158
+ "polish_8tags_regex": {
159
+ "task": "polish_8tags_regex",
160
+ "dataset_path": "sdadas/8tags",
161
+ "training_split": "train",
162
+ "validation_split": "validation",
163
+ "test_split": "test",
164
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:",
165
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H'}.get(label)}}",
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 0,
170
+ "metric_list": [
171
+ {
172
+ "metric": "exact_match",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ }
176
+ ],
177
+ "output_type": "generate_until",
178
+ "generation_kwargs": {
179
+ "until": [
180
+ ".",
181
+ ","
182
+ ],
183
+ "do_sample": false,
184
+ "temperature": 0.0,
185
+ "max_gen_toks": 50
186
+ },
187
+ "repeats": 1,
188
+ "filter_list": [
189
+ {
190
+ "name": "score-first",
191
+ "filter": [
192
+ {
193
+ "function": "regex",
194
+ "regex_pattern": "(\\b[ABCDEFGH]\\b)"
195
+ },
196
+ {
197
+ "function": "take_first"
198
+ }
199
+ ]
200
+ }
201
+ ],
202
+ "should_decontaminate": true,
203
+ "doc_to_decontamination_query": "{{sentence}}"
204
+ },
205
+ "polish_belebele_regex": {
206
+ "task": "polish_belebele_regex",
207
+ "dataset_path": "facebook/belebele",
208
+ "test_split": "pol_Latn",
209
+ "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:",
210
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(correct_answer_num|int - 1)}}",
211
+ "description": "",
212
+ "target_delimiter": " ",
213
+ "fewshot_delimiter": "\n\n",
214
+ "num_fewshot": 0,
215
+ "metric_list": [
216
+ {
217
+ "metric": "exact_match",
218
+ "aggregation": "mean",
219
+ "higher_is_better": true
220
+ }
221
+ ],
222
+ "output_type": "generate_until",
223
+ "generation_kwargs": {
224
+ "until": [
225
+ ".",
226
+ ","
227
+ ],
228
+ "do_sample": false,
229
+ "temperature": 0.0,
230
+ "max_gen_toks": 50
231
+ },
232
+ "repeats": 1,
233
+ "filter_list": [
234
+ {
235
+ "name": "score-first",
236
+ "filter": [
237
+ {
238
+ "function": "regex",
239
+ "regex_pattern": "(\\b[ABCD]\\b)"
240
+ },
241
+ {
242
+ "function": "take_first"
243
+ }
244
+ ]
245
+ }
246
+ ],
247
+ "should_decontaminate": true,
248
+ "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}"
249
+ },
250
+ "polish_dyk_regex": {
251
+ "task": "polish_dyk_regex",
252
+ "dataset_path": "allegro/klej-dyk",
253
+ "training_split": "train",
254
+ "test_split": "test",
255
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:",
256
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(target|int + 1)}}",
257
+ "description": "",
258
+ "target_delimiter": " ",
259
+ "fewshot_delimiter": "\n\n",
260
+ "num_fewshot": 0,
261
+ "metric_list": [
262
+ {
263
+ "metric": "exact_match",
264
+ "aggregation": "mean",
265
+ "higher_is_better": true
266
+ }
267
+ ],
268
+ "output_type": "generate_until",
269
+ "generation_kwargs": {
270
+ "until": [
271
+ ".",
272
+ ","
273
+ ],
274
+ "do_sample": false,
275
+ "temperature": 0.0,
276
+ "max_gen_toks": 50
277
+ },
278
+ "repeats": 1,
279
+ "filter_list": [
280
+ {
281
+ "name": "score-first",
282
+ "filter": [
283
+ {
284
+ "function": "regex",
285
+ "regex_pattern": "(\\b[ABCD]\\b)"
286
+ },
287
+ {
288
+ "function": "take_first"
289
+ }
290
+ ]
291
+ }
292
+ ],
293
+ "should_decontaminate": true,
294
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
295
+ },
296
+ "polish_ppc_regex": {
297
+ "task": "polish_ppc_regex",
298
+ "dataset_path": "sdadas/ppc",
299
+ "training_split": "train",
300
+ "validation_split": "validation",
301
+ "test_split": "test",
302
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:",
303
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int)}}",
304
+ "description": "",
305
+ "target_delimiter": " ",
306
+ "fewshot_delimiter": "\n\n",
307
+ "num_fewshot": 0,
308
+ "metric_list": [
309
+ {
310
+ "metric": "exact_match",
311
+ "aggregation": "mean",
312
+ "higher_is_better": true
313
+ }
314
+ ],
315
+ "output_type": "generate_until",
316
+ "generation_kwargs": {
317
+ "until": [
318
+ ".",
319
+ ","
320
+ ],
321
+ "do_sample": false,
322
+ "temperature": 0.0,
323
+ "max_gen_toks": 50
324
+ },
325
+ "repeats": 1,
326
+ "filter_list": [
327
+ {
328
+ "name": "score-first",
329
+ "filter": [
330
+ {
331
+ "function": "regex",
332
+ "regex_pattern": "(\\b[ABCD]\\b)"
333
+ },
334
+ {
335
+ "function": "take_first"
336
+ }
337
+ ]
338
+ }
339
+ ],
340
+ "should_decontaminate": true,
341
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
342
+ },
343
+ "polish_psc_regex": {
344
+ "task": "polish_psc_regex",
345
+ "dataset_path": "allegro/klej-psc",
346
+ "training_split": "train",
347
+ "test_split": "test",
348
+ "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:",
349
+ "doc_to_target": "{{{0: 'A', 1: 'B', 2: 'C', 3: 'D'}.get(label|int + 1)}}",
350
+ "description": "",
351
+ "target_delimiter": " ",
352
+ "fewshot_delimiter": "\n\n",
353
+ "num_fewshot": 0,
354
+ "metric_list": [
355
+ {
356
+ "metric": "exact_match",
357
+ "aggregation": "mean",
358
+ "higher_is_better": true
359
+ }
360
+ ],
361
+ "output_type": "generate_until",
362
+ "generation_kwargs": {
363
+ "until": [
364
+ ".",
365
+ ","
366
+ ],
367
+ "do_sample": false,
368
+ "temperature": 0.0,
369
+ "max_gen_toks": 50
370
+ },
371
+ "repeats": 1,
372
+ "filter_list": [
373
+ {
374
+ "name": "score-first",
375
+ "filter": [
376
+ {
377
+ "function": "regex",
378
+ "regex_pattern": "(\\b[ABCD]\\b)"
379
+ },
380
+ {
381
+ "function": "take_first"
382
+ }
383
+ ]
384
+ }
385
+ ],
386
+ "should_decontaminate": true,
387
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
388
+ }
389
+ },
390
+ "versions": {
391
+ "polemo2_in": 1.0,
392
+ "polemo2_out": 1.0,
393
+ "polish_8tags_regex": "Yaml",
394
+ "polish_belebele_regex": "Yaml",
395
+ "polish_dyk_regex": "Yaml",
396
+ "polish_generate": "N/A",
397
+ "polish_ppc_regex": "Yaml",
398
+ "polish_psc_regex": "Yaml"
399
+ },
400
+ "n-shot": {
401
+ "polemo2_in": 0,
402
+ "polemo2_out": 0,
403
+ "polish_8tags_regex": 0,
404
+ "polish_belebele_regex": 0,
405
+ "polish_dyk_regex": 0,
406
+ "polish_generate": 0,
407
+ "polish_ppc_regex": 0,
408
+ "polish_psc_regex": 0
409
+ },
410
+ "config": {
411
+ "model": "hf",
412
+ "model_args": "pretrained=google/mt5-xl",
413
+ "batch_size": "4",
414
+ "batch_sizes": [],
415
+ "device": "cuda:0",
416
+ "use_cache": null,
417
+ "limit": null,
418
+ "bootstrap_iters": 100000,
419
+ "gen_kwargs": null
420
+ },
421
+ "git_hash": null
422
+ }
polish_benchmarks-out21/results_mt5-xl-0_polish_mc/results.json ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "polish_mc": {
4
+ "acc,none": 0.2997394476289734,
5
+ "acc_stderr,none": 0.2267205746448775,
6
+ "acc_norm,none": 0.30484627410109433,
7
+ "acc_norm_stderr,none": 0.22457641813702411,
8
+ "alias": "polish_mc"
9
+ },
10
+ "belebele_pol_Latn": {
11
+ "acc,none": 0.21888888888888888,
12
+ "acc_stderr,none": 0.013790766978256945,
13
+ "acc_norm,none": 0.21888888888888888,
14
+ "acc_norm_stderr,none": 0.013790766978256945,
15
+ "alias": " - belebele_pol_Latn"
16
+ },
17
+ "polemo2_in_multiple_choice": {
18
+ "acc,none": 0.14265927977839335,
19
+ "acc_stderr,none": 0.013024441425272966,
20
+ "acc_norm,none": 0.15096952908587258,
21
+ "acc_norm_stderr,none": 0.0133333308677827,
22
+ "alias": " - polemo2_in_multiple_choice"
23
+ },
24
+ "polemo2_out_multiple_choice": {
25
+ "acc,none": 0.06275303643724696,
26
+ "acc_stderr,none": 0.010922471844180805,
27
+ "acc_norm,none": 0.32793522267206476,
28
+ "acc_norm_stderr,none": 0.021143463996780142,
29
+ "alias": " - polemo2_out_multiple_choice"
30
+ },
31
+ "polish_8tags_multiple_choice": {
32
+ "acc,none": 0.1253430924062214,
33
+ "acc_stderr,none": 0.005008165412043464,
34
+ "acc_norm,none": 0.10521500457456541,
35
+ "acc_norm_stderr,none": 0.004640959541395227,
36
+ "alias": " - polish_8tags_multiple_choice"
37
+ },
38
+ "polish_dyk_multiple_choice": {
39
+ "acc,none": 0.8309037900874635,
40
+ "acc_stderr,none": 0.011690840877120627,
41
+ "acc_norm,none": 0.8309037900874635,
42
+ "acc_norm_stderr,none": 0.011690840877120627,
43
+ "alias": " - polish_dyk_multiple_choice"
44
+ },
45
+ "polish_ppc_multiple_choice": {
46
+ "acc,none": 0.392,
47
+ "acc_stderr,none": 0.015445859463771302,
48
+ "acc_norm,none": 0.392,
49
+ "acc_norm_stderr,none": 0.015445859463771302,
50
+ "alias": " - polish_ppc_multiple_choice"
51
+ },
52
+ "polish_psc_multiple_choice": {
53
+ "acc,none": 0.6957328385899815,
54
+ "acc_stderr,none": 0.014019771683156095,
55
+ "acc_norm,none": 0.6957328385899815,
56
+ "acc_norm_stderr,none": 0.014019771683156095,
57
+ "alias": " - polish_psc_multiple_choice"
58
+ }
59
+ },
60
+ "groups": {
61
+ "polish_mc": {
62
+ "acc,none": 0.2997394476289734,
63
+ "acc_stderr,none": 0.2267205746448775,
64
+ "acc_norm,none": 0.30484627410109433,
65
+ "acc_norm_stderr,none": 0.22457641813702411,
66
+ "alias": "polish_mc"
67
+ }
68
+ },
69
+ "configs": {
70
+ "belebele_pol_Latn": {
71
+ "task": "belebele_pol_Latn",
72
+ "group": "belebele",
73
+ "dataset_path": "facebook/belebele",
74
+ "test_split": "pol_Latn",
75
+ "fewshot_split": "pol_Latn",
76
+ "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
77
+ "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
78
+ "doc_to_choice": [
79
+ "A",
80
+ "B",
81
+ "C",
82
+ "D"
83
+ ],
84
+ "description": "",
85
+ "target_delimiter": " ",
86
+ "fewshot_delimiter": "\n\n",
87
+ "fewshot_config": {
88
+ "sampler": "first_n"
89
+ },
90
+ "num_fewshot": 0,
91
+ "metric_list": [
92
+ {
93
+ "metric": "acc",
94
+ "aggregation": "mean",
95
+ "higher_is_better": true
96
+ },
97
+ {
98
+ "metric": "acc_norm",
99
+ "aggregation": "mean",
100
+ "higher_is_better": true
101
+ }
102
+ ],
103
+ "output_type": "multiple_choice",
104
+ "repeats": 1,
105
+ "should_decontaminate": true,
106
+ "doc_to_decontamination_query": "{{question}}",
107
+ "metadata": {
108
+ "version": 0.0
109
+ }
110
+ },
111
+ "polemo2_in_multiple_choice": {
112
+ "task": "polemo2_in_multiple_choice",
113
+ "group": [
114
+ "polemo2_mc"
115
+ ],
116
+ "dataset_path": "allegro/klej-polemo2-in",
117
+ "training_split": "train",
118
+ "validation_split": "validation",
119
+ "test_split": "test",
120
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
121
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
122
+ "doc_to_choice": [
123
+ "Neutralny",
124
+ "Negatywny",
125
+ "Pozytywny",
126
+ "Niejednoznaczny"
127
+ ],
128
+ "description": "",
129
+ "target_delimiter": " ",
130
+ "fewshot_delimiter": "\n\n",
131
+ "num_fewshot": 0,
132
+ "metric_list": [
133
+ {
134
+ "metric": "acc",
135
+ "aggregation": "mean",
136
+ "higher_is_better": true
137
+ },
138
+ {
139
+ "metric": "acc_norm",
140
+ "aggregation": "mean",
141
+ "higher_is_better": true
142
+ }
143
+ ],
144
+ "output_type": "multiple_choice",
145
+ "repeats": 1,
146
+ "should_decontaminate": true,
147
+ "doc_to_decontamination_query": "{{sentence}}"
148
+ },
149
+ "polemo2_out_multiple_choice": {
150
+ "task": "polemo2_out_multiple_choice",
151
+ "group": [
152
+ "polemo2_mc"
153
+ ],
154
+ "dataset_path": "allegro/klej-polemo2-out",
155
+ "training_split": "train",
156
+ "validation_split": "validation",
157
+ "test_split": "test",
158
+ "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
159
+ "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
160
+ "doc_to_choice": [
161
+ "Neutralny",
162
+ "Negatywny",
163
+ "Pozytywny",
164
+ "Niejednoznaczny"
165
+ ],
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "num_fewshot": 0,
170
+ "metric_list": [
171
+ {
172
+ "metric": "acc",
173
+ "aggregation": "mean",
174
+ "higher_is_better": true
175
+ },
176
+ {
177
+ "metric": "acc_norm",
178
+ "aggregation": "mean",
179
+ "higher_is_better": true
180
+ }
181
+ ],
182
+ "output_type": "multiple_choice",
183
+ "repeats": 1,
184
+ "should_decontaminate": true,
185
+ "doc_to_decontamination_query": "{{sentence}}"
186
+ },
187
+ "polish_8tags_multiple_choice": {
188
+ "task": "polish_8tags_multiple_choice",
189
+ "dataset_path": "sdadas/8tags",
190
+ "training_split": "train",
191
+ "test_split": "test",
192
+ "fewshot_split": "train",
193
+ "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
194
+ "doc_to_target": "{{label|int}}",
195
+ "doc_to_choice": [
196
+ "Film",
197
+ "Historia",
198
+ "Jedzenie",
199
+ "Medycyna",
200
+ "Motoryzacja",
201
+ "Praca",
202
+ "Sport",
203
+ "Technologie"
204
+ ],
205
+ "description": "",
206
+ "target_delimiter": " ",
207
+ "fewshot_delimiter": "\n\n",
208
+ "num_fewshot": 0,
209
+ "metric_list": [
210
+ {
211
+ "metric": "acc",
212
+ "aggregation": "mean",
213
+ "higher_is_better": true
214
+ },
215
+ {
216
+ "metric": "acc_norm",
217
+ "aggregation": "mean",
218
+ "higher_is_better": true
219
+ }
220
+ ],
221
+ "output_type": "multiple_choice",
222
+ "repeats": 1,
223
+ "should_decontaminate": true,
224
+ "doc_to_decontamination_query": "{{sentence}}"
225
+ },
226
+ "polish_dyk_multiple_choice": {
227
+ "task": "polish_dyk_multiple_choice",
228
+ "dataset_path": "allegro/klej-dyk",
229
+ "training_split": "train",
230
+ "test_split": "test",
231
+ "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
232
+ "doc_to_target": "{{target|int}}",
233
+ "doc_to_choice": [
234
+ "Nie",
235
+ "Tak"
236
+ ],
237
+ "description": "",
238
+ "target_delimiter": " ",
239
+ "fewshot_delimiter": "\n\n",
240
+ "num_fewshot": 0,
241
+ "metric_list": [
242
+ {
243
+ "metric": "acc",
244
+ "aggregation": "mean",
245
+ "higher_is_better": true
246
+ },
247
+ {
248
+ "metric": "acc_norm",
249
+ "aggregation": "mean",
250
+ "higher_is_better": true
251
+ }
252
+ ],
253
+ "output_type": "multiple_choice",
254
+ "repeats": 1,
255
+ "should_decontaminate": true,
256
+ "doc_to_decontamination_query": "{{question}} {{answer}}"
257
+ },
258
+ "polish_ppc_multiple_choice": {
259
+ "task": "polish_ppc_multiple_choice",
260
+ "dataset_path": "sdadas/ppc",
261
+ "training_split": "train",
262
+ "validation_split": "validation",
263
+ "test_split": "test",
264
+ "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
265
+ "doc_to_target": "{{label|int - 1}}",
266
+ "doc_to_choice": [
267
+ "A",
268
+ "B",
269
+ "C"
270
+ ],
271
+ "description": "",
272
+ "target_delimiter": " ",
273
+ "fewshot_delimiter": "\n\n",
274
+ "num_fewshot": 0,
275
+ "metric_list": [
276
+ {
277
+ "metric": "acc",
278
+ "aggregation": "mean",
279
+ "higher_is_better": true
280
+ },
281
+ {
282
+ "metric": "acc_norm",
283
+ "aggregation": "mean",
284
+ "higher_is_better": true
285
+ }
286
+ ],
287
+ "output_type": "multiple_choice",
288
+ "repeats": 1,
289
+ "should_decontaminate": true,
290
+ "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
291
+ },
292
+ "polish_psc_multiple_choice": {
293
+ "task": "polish_psc_multiple_choice",
294
+ "dataset_path": "allegro/klej-psc",
295
+ "training_split": "train",
296
+ "test_split": "test",
297
+ "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
298
+ "doc_to_target": "{{label|int}}",
299
+ "doc_to_choice": [
300
+ "Nie",
301
+ "Tak"
302
+ ],
303
+ "description": "",
304
+ "target_delimiter": " ",
305
+ "fewshot_delimiter": "\n\n",
306
+ "num_fewshot": 0,
307
+ "metric_list": [
308
+ {
309
+ "metric": "acc",
310
+ "aggregation": "mean",
311
+ "higher_is_better": true
312
+ },
313
+ {
314
+ "metric": "acc_norm",
315
+ "aggregation": "mean",
316
+ "higher_is_better": true
317
+ }
318
+ ],
319
+ "output_type": "multiple_choice",
320
+ "repeats": 1,
321
+ "should_decontaminate": true,
322
+ "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
323
+ }
324
+ },
325
+ "versions": {
326
+ "belebele_pol_Latn": 0.0,
327
+ "polemo2_in_multiple_choice": "Yaml",
328
+ "polemo2_out_multiple_choice": "Yaml",
329
+ "polish_8tags_multiple_choice": "Yaml",
330
+ "polish_dyk_multiple_choice": "Yaml",
331
+ "polish_mc": "N/A",
332
+ "polish_ppc_multiple_choice": "Yaml",
333
+ "polish_psc_multiple_choice": "Yaml"
334
+ },
335
+ "n-shot": {
336
+ "belebele_pol_Latn": 0,
337
+ "polemo2_in_multiple_choice": 0,
338
+ "polemo2_out_multiple_choice": 0,
339
+ "polish_8tags_multiple_choice": 0,
340
+ "polish_dyk_multiple_choice": 0,
341
+ "polish_mc": 0,
342
+ "polish_ppc_multiple_choice": 0,
343
+ "polish_psc_multiple_choice": 0
344
+ },
345
+ "config": {
346
+ "model": "hf",
347
+ "model_args": "pretrained=google/mt5-xl",
348
+ "batch_size": "4",
349
+ "batch_sizes": [],
350
+ "device": "cuda:0",
351
+ "use_cache": null,
352
+ "limit": null,
353
+ "bootstrap_iters": 100000,
354
+ "gen_kwargs": null
355
+ },
356
+ "git_hash": null
357
+ }